Daily bump.
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2016 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "symbol-summary.h"
100 #include "ipa-prop.h"
101 #include "params.h"
102 #include "dbgcnt.h"
103 #include "tree-inline.h"
104 #include "ipa-inline.h"
105 #include "ipa-utils.h"
106 #include "builtins.h"
107
108 /* Enumeration of all aggregate reductions we can do. */
109 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
110 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
111 SRA_MODE_INTRA }; /* late intraprocedural SRA */
112
113 /* Global variable describing which aggregate reduction we are performing at
114 the moment. */
115 static enum sra_mode sra_mode;
116
117 struct assign_link;
118
119 /* ACCESS represents each access to an aggregate variable (as a whole or a
120 part). It can also represent a group of accesses that refer to exactly the
121 same fragment of an aggregate (i.e. those that have exactly the same offset
122 and size). Such representatives for a single aggregate, once determined,
123 are linked in a linked list and have the group fields set.
124
125 Moreover, when doing intraprocedural SRA, a tree is built from those
126 representatives (by the means of first_child and next_sibling pointers), in
127 which all items in a subtree are "within" the root, i.e. their offset is
128 greater or equal to offset of the root and offset+size is smaller or equal
129 to offset+size of the root. Children of an access are sorted by offset.
130
131 Note that accesses to parts of vector and complex number types always
132 represented by an access to the whole complex number or a vector. It is a
133 duty of the modifying functions to replace them appropriately. */
134
135 struct access
136 {
137 /* Values returned by `get_ref_base_and_extent' for each component reference
138 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
139 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
140 HOST_WIDE_INT offset;
141 HOST_WIDE_INT size;
142 tree base;
143
144 /* Expression. It is context dependent so do not use it to create new
145 expressions to access the original aggregate. See PR 42154 for a
146 testcase. */
147 tree expr;
148 /* Type. */
149 tree type;
150
151 /* The statement this access belongs to. */
152 gimple *stmt;
153
154 /* Next group representative for this aggregate. */
155 struct access *next_grp;
156
157 /* Pointer to the group representative. Pointer to itself if the struct is
158 the representative. */
159 struct access *group_representative;
160
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access *first_child;
164
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
166 described above. In IPA-SRA this is a pointer to the next access
167 belonging to the same group (having the same representative). */
168 struct access *next_sibling;
169
170 /* Pointers to the first and last element in the linked list of assign
171 links. */
172 struct assign_link *first_link, *last_link;
173
174 /* Pointer to the next access in the work queue. */
175 struct access *next_queued;
176
177 /* Replacement variable for this access "region." Never to be accessed
178 directly, always only by the means of get_access_replacement() and only
179 when grp_to_be_replaced flag is set. */
180 tree replacement_decl;
181
182 /* Is this access an access to a non-addressable field? */
183 unsigned non_addressable : 1;
184
185 /* Is this access made in reverse storage order? */
186 unsigned reverse : 1;
187
188 /* Is this particular access write access? */
189 unsigned write : 1;
190
191 /* Is this access currently in the work queue? */
192 unsigned grp_queued : 1;
193
194 /* Does this group contain a write access? This flag is propagated down the
195 access tree. */
196 unsigned grp_write : 1;
197
198 /* Does this group contain a read access? This flag is propagated down the
199 access tree. */
200 unsigned grp_read : 1;
201
202 /* Does this group contain a read access that comes from an assignment
203 statement? This flag is propagated down the access tree. */
204 unsigned grp_assignment_read : 1;
205
206 /* Does this group contain a write access that comes from an assignment
207 statement? This flag is propagated down the access tree. */
208 unsigned grp_assignment_write : 1;
209
210 /* Does this group contain a read access through a scalar type? This flag is
211 not propagated in the access tree in any direction. */
212 unsigned grp_scalar_read : 1;
213
214 /* Does this group contain a write access through a scalar type? This flag
215 is not propagated in the access tree in any direction. */
216 unsigned grp_scalar_write : 1;
217
218 /* Is this access an artificial one created to scalarize some record
219 entirely? */
220 unsigned grp_total_scalarization : 1;
221
222 /* Other passes of the analysis use this bit to make function
223 analyze_access_subtree create scalar replacements for this group if
224 possible. */
225 unsigned grp_hint : 1;
226
227 /* Is the subtree rooted in this access fully covered by scalar
228 replacements? */
229 unsigned grp_covered : 1;
230
231 /* If set to true, this access and all below it in an access tree must not be
232 scalarized. */
233 unsigned grp_unscalarizable_region : 1;
234
235 /* Whether data have been written to parts of the aggregate covered by this
236 access which is not to be scalarized. This flag is propagated up in the
237 access tree. */
238 unsigned grp_unscalarized_data : 1;
239
240 /* Does this access and/or group contain a write access through a
241 BIT_FIELD_REF? */
242 unsigned grp_partial_lhs : 1;
243
244 /* Set when a scalar replacement should be created for this variable. */
245 unsigned grp_to_be_replaced : 1;
246
247 /* Set when we want a replacement for the sole purpose of having it in
248 generated debug statements. */
249 unsigned grp_to_be_debug_replaced : 1;
250
251 /* Should TREE_NO_WARNING of a replacement be set? */
252 unsigned grp_no_warning : 1;
253
254 /* Is it possible that the group refers to data which might be (directly or
255 otherwise) modified? */
256 unsigned grp_maybe_modified : 1;
257
258 /* Set when this is a representative of a pointer to scalar (i.e. by
259 reference) parameter which we consider for turning into a plain scalar
260 (i.e. a by value parameter). */
261 unsigned grp_scalar_ptr : 1;
262
263 /* Set when we discover that this pointer is not safe to dereference in the
264 caller. */
265 unsigned grp_not_necessarilly_dereferenced : 1;
266 };
267
268 typedef struct access *access_p;
269
270
271 /* Alloc pool for allocating access structures. */
272 static object_allocator<struct access> access_pool ("SRA accesses");
273
274 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
275 are used to propagate subaccesses from rhs to lhs as long as they don't
276 conflict with what is already there. */
277 struct assign_link
278 {
279 struct access *lacc, *racc;
280 struct assign_link *next;
281 };
282
283 /* Alloc pool for allocating assign link structures. */
284 static object_allocator<assign_link> assign_link_pool ("SRA links");
285
286 /* Base (tree) -> Vector (vec<access_p> *) map. */
287 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
288
289 /* Candidate hash table helpers. */
290
291 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
292 {
293 static inline hashval_t hash (const tree_node *);
294 static inline bool equal (const tree_node *, const tree_node *);
295 };
296
297 /* Hash a tree in a uid_decl_map. */
298
299 inline hashval_t
300 uid_decl_hasher::hash (const tree_node *item)
301 {
302 return item->decl_minimal.uid;
303 }
304
305 /* Return true if the DECL_UID in both trees are equal. */
306
307 inline bool
308 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
309 {
310 return (a->decl_minimal.uid == b->decl_minimal.uid);
311 }
312
313 /* Set of candidates. */
314 static bitmap candidate_bitmap;
315 static hash_table<uid_decl_hasher> *candidates;
316
317 /* For a candidate UID return the candidates decl. */
318
319 static inline tree
320 candidate (unsigned uid)
321 {
322 tree_node t;
323 t.decl_minimal.uid = uid;
324 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
325 }
326
327 /* Bitmap of candidates which we should try to entirely scalarize away and
328 those which cannot be (because they are and need be used as a whole). */
329 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
330
331 /* Bitmap of candidates in the constant pool, which cannot be scalarized
332 because this would produce non-constant expressions (e.g. Ada). */
333 static bitmap disqualified_constants;
334
335 /* Obstack for creation of fancy names. */
336 static struct obstack name_obstack;
337
338 /* Head of a linked list of accesses that need to have its subaccesses
339 propagated to their assignment counterparts. */
340 static struct access *work_queue_head;
341
342 /* Number of parameters of the analyzed function when doing early ipa SRA. */
343 static int func_param_count;
344
345 /* scan_function sets the following to true if it encounters a call to
346 __builtin_apply_args. */
347 static bool encountered_apply_args;
348
349 /* Set by scan_function when it finds a recursive call. */
350 static bool encountered_recursive_call;
351
352 /* Set by scan_function when it finds a recursive call with less actual
353 arguments than formal parameters.. */
354 static bool encountered_unchangable_recursive_call;
355
356 /* This is a table in which for each basic block and parameter there is a
357 distance (offset + size) in that parameter which is dereferenced and
358 accessed in that BB. */
359 static HOST_WIDE_INT *bb_dereferences;
360 /* Bitmap of BBs that can cause the function to "stop" progressing by
361 returning, throwing externally, looping infinitely or calling a function
362 which might abort etc.. */
363 static bitmap final_bbs;
364
365 /* Representative of no accesses at all. */
366 static struct access no_accesses_representant;
367
368 /* Predicate to test the special value. */
369
370 static inline bool
371 no_accesses_p (struct access *access)
372 {
373 return access == &no_accesses_representant;
374 }
375
376 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
377 representative fields are dumped, otherwise those which only describe the
378 individual access are. */
379
380 static struct
381 {
382 /* Number of processed aggregates is readily available in
383 analyze_all_variable_accesses and so is not stored here. */
384
385 /* Number of created scalar replacements. */
386 int replacements;
387
388 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
389 expression. */
390 int exprs;
391
392 /* Number of statements created by generate_subtree_copies. */
393 int subtree_copies;
394
395 /* Number of statements created by load_assign_lhs_subreplacements. */
396 int subreplacements;
397
398 /* Number of times sra_modify_assign has deleted a statement. */
399 int deleted;
400
401 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
402 RHS reparately due to type conversions or nonexistent matching
403 references. */
404 int separate_lhs_rhs_handling;
405
406 /* Number of parameters that were removed because they were unused. */
407 int deleted_unused_parameters;
408
409 /* Number of scalars passed as parameters by reference that have been
410 converted to be passed by value. */
411 int scalar_by_ref_to_by_val;
412
413 /* Number of aggregate parameters that were replaced by one or more of their
414 components. */
415 int aggregate_params_reduced;
416
417 /* Numbber of components created when splitting aggregate parameters. */
418 int param_reductions_created;
419 } sra_stats;
420
421 static void
422 dump_access (FILE *f, struct access *access, bool grp)
423 {
424 fprintf (f, "access { ");
425 fprintf (f, "base = (%d)'", DECL_UID (access->base));
426 print_generic_expr (f, access->base, 0);
427 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
428 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
429 fprintf (f, ", expr = ");
430 print_generic_expr (f, access->expr, 0);
431 fprintf (f, ", type = ");
432 print_generic_expr (f, access->type, 0);
433 fprintf (f, ", non_addressable = %d, reverse = %d",
434 access->non_addressable, access->reverse);
435 if (grp)
436 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
437 "grp_assignment_write = %d, grp_scalar_read = %d, "
438 "grp_scalar_write = %d, grp_total_scalarization = %d, "
439 "grp_hint = %d, grp_covered = %d, "
440 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
441 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
442 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
443 "grp_not_necessarilly_dereferenced = %d\n",
444 access->grp_read, access->grp_write, access->grp_assignment_read,
445 access->grp_assignment_write, access->grp_scalar_read,
446 access->grp_scalar_write, access->grp_total_scalarization,
447 access->grp_hint, access->grp_covered,
448 access->grp_unscalarizable_region, access->grp_unscalarized_data,
449 access->grp_partial_lhs, access->grp_to_be_replaced,
450 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
451 access->grp_not_necessarilly_dereferenced);
452 else
453 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
454 "grp_partial_lhs = %d\n",
455 access->write, access->grp_total_scalarization,
456 access->grp_partial_lhs);
457 }
458
459 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
460
461 static void
462 dump_access_tree_1 (FILE *f, struct access *access, int level)
463 {
464 do
465 {
466 int i;
467
468 for (i = 0; i < level; i++)
469 fputs ("* ", dump_file);
470
471 dump_access (f, access, true);
472
473 if (access->first_child)
474 dump_access_tree_1 (f, access->first_child, level + 1);
475
476 access = access->next_sibling;
477 }
478 while (access);
479 }
480
481 /* Dump all access trees for a variable, given the pointer to the first root in
482 ACCESS. */
483
484 static void
485 dump_access_tree (FILE *f, struct access *access)
486 {
487 for (; access; access = access->next_grp)
488 dump_access_tree_1 (f, access, 0);
489 }
490
491 /* Return true iff ACC is non-NULL and has subaccesses. */
492
493 static inline bool
494 access_has_children_p (struct access *acc)
495 {
496 return acc && acc->first_child;
497 }
498
499 /* Return true iff ACC is (partly) covered by at least one replacement. */
500
501 static bool
502 access_has_replacements_p (struct access *acc)
503 {
504 struct access *child;
505 if (acc->grp_to_be_replaced)
506 return true;
507 for (child = acc->first_child; child; child = child->next_sibling)
508 if (access_has_replacements_p (child))
509 return true;
510 return false;
511 }
512
513 /* Return a vector of pointers to accesses for the variable given in BASE or
514 NULL if there is none. */
515
516 static vec<access_p> *
517 get_base_access_vector (tree base)
518 {
519 return base_access_vec->get (base);
520 }
521
522 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
523 in ACCESS. Return NULL if it cannot be found. */
524
525 static struct access *
526 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
527 HOST_WIDE_INT size)
528 {
529 while (access && (access->offset != offset || access->size != size))
530 {
531 struct access *child = access->first_child;
532
533 while (child && (child->offset + child->size <= offset))
534 child = child->next_sibling;
535 access = child;
536 }
537
538 return access;
539 }
540
541 /* Return the first group representative for DECL or NULL if none exists. */
542
543 static struct access *
544 get_first_repr_for_decl (tree base)
545 {
546 vec<access_p> *access_vec;
547
548 access_vec = get_base_access_vector (base);
549 if (!access_vec)
550 return NULL;
551
552 return (*access_vec)[0];
553 }
554
555 /* Find an access representative for the variable BASE and given OFFSET and
556 SIZE. Requires that access trees have already been built. Return NULL if
557 it cannot be found. */
558
559 static struct access *
560 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
561 HOST_WIDE_INT size)
562 {
563 struct access *access;
564
565 access = get_first_repr_for_decl (base);
566 while (access && (access->offset + access->size <= offset))
567 access = access->next_grp;
568 if (!access)
569 return NULL;
570
571 return find_access_in_subtree (access, offset, size);
572 }
573
574 /* Add LINK to the linked list of assign links of RACC. */
575 static void
576 add_link_to_rhs (struct access *racc, struct assign_link *link)
577 {
578 gcc_assert (link->racc == racc);
579
580 if (!racc->first_link)
581 {
582 gcc_assert (!racc->last_link);
583 racc->first_link = link;
584 }
585 else
586 racc->last_link->next = link;
587
588 racc->last_link = link;
589 link->next = NULL;
590 }
591
592 /* Move all link structures in their linked list in OLD_RACC to the linked list
593 in NEW_RACC. */
594 static void
595 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
596 {
597 if (!old_racc->first_link)
598 {
599 gcc_assert (!old_racc->last_link);
600 return;
601 }
602
603 if (new_racc->first_link)
604 {
605 gcc_assert (!new_racc->last_link->next);
606 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
607
608 new_racc->last_link->next = old_racc->first_link;
609 new_racc->last_link = old_racc->last_link;
610 }
611 else
612 {
613 gcc_assert (!new_racc->last_link);
614
615 new_racc->first_link = old_racc->first_link;
616 new_racc->last_link = old_racc->last_link;
617 }
618 old_racc->first_link = old_racc->last_link = NULL;
619 }
620
621 /* Add ACCESS to the work queue (which is actually a stack). */
622
623 static void
624 add_access_to_work_queue (struct access *access)
625 {
626 if (!access->grp_queued)
627 {
628 gcc_assert (!access->next_queued);
629 access->next_queued = work_queue_head;
630 access->grp_queued = 1;
631 work_queue_head = access;
632 }
633 }
634
635 /* Pop an access from the work queue, and return it, assuming there is one. */
636
637 static struct access *
638 pop_access_from_work_queue (void)
639 {
640 struct access *access = work_queue_head;
641
642 work_queue_head = access->next_queued;
643 access->next_queued = NULL;
644 access->grp_queued = 0;
645 return access;
646 }
647
648
649 /* Allocate necessary structures. */
650
651 static void
652 sra_initialize (void)
653 {
654 candidate_bitmap = BITMAP_ALLOC (NULL);
655 candidates = new hash_table<uid_decl_hasher>
656 (vec_safe_length (cfun->local_decls) / 2);
657 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
658 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
659 disqualified_constants = BITMAP_ALLOC (NULL);
660 gcc_obstack_init (&name_obstack);
661 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
662 memset (&sra_stats, 0, sizeof (sra_stats));
663 encountered_apply_args = false;
664 encountered_recursive_call = false;
665 encountered_unchangable_recursive_call = false;
666 }
667
668 /* Deallocate all general structures. */
669
670 static void
671 sra_deinitialize (void)
672 {
673 BITMAP_FREE (candidate_bitmap);
674 delete candidates;
675 candidates = NULL;
676 BITMAP_FREE (should_scalarize_away_bitmap);
677 BITMAP_FREE (cannot_scalarize_away_bitmap);
678 BITMAP_FREE (disqualified_constants);
679 access_pool.release ();
680 assign_link_pool.release ();
681 obstack_free (&name_obstack, NULL);
682
683 delete base_access_vec;
684 }
685
686 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
687
688 static bool constant_decl_p (tree decl)
689 {
690 return TREE_CODE (decl) == VAR_DECL && DECL_IN_CONSTANT_POOL (decl);
691 }
692
693 /* Remove DECL from candidates for SRA and write REASON to the dump file if
694 there is one. */
695 static void
696 disqualify_candidate (tree decl, const char *reason)
697 {
698 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
699 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
700 if (constant_decl_p (decl))
701 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
702
703 if (dump_file && (dump_flags & TDF_DETAILS))
704 {
705 fprintf (dump_file, "! Disqualifying ");
706 print_generic_expr (dump_file, decl, 0);
707 fprintf (dump_file, " - %s\n", reason);
708 }
709 }
710
711 /* Return true iff the type contains a field or an element which does not allow
712 scalarization. */
713
714 static bool
715 type_internals_preclude_sra_p (tree type, const char **msg)
716 {
717 tree fld;
718 tree et;
719
720 switch (TREE_CODE (type))
721 {
722 case RECORD_TYPE:
723 case UNION_TYPE:
724 case QUAL_UNION_TYPE:
725 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
726 if (TREE_CODE (fld) == FIELD_DECL)
727 {
728 tree ft = TREE_TYPE (fld);
729
730 if (TREE_THIS_VOLATILE (fld))
731 {
732 *msg = "volatile structure field";
733 return true;
734 }
735 if (!DECL_FIELD_OFFSET (fld))
736 {
737 *msg = "no structure field offset";
738 return true;
739 }
740 if (!DECL_SIZE (fld))
741 {
742 *msg = "zero structure field size";
743 return true;
744 }
745 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
746 {
747 *msg = "structure field offset not fixed";
748 return true;
749 }
750 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
751 {
752 *msg = "structure field size not fixed";
753 return true;
754 }
755 if (!tree_fits_shwi_p (bit_position (fld)))
756 {
757 *msg = "structure field size too big";
758 return true;
759 }
760 if (AGGREGATE_TYPE_P (ft)
761 && int_bit_position (fld) % BITS_PER_UNIT != 0)
762 {
763 *msg = "structure field is bit field";
764 return true;
765 }
766
767 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
768 return true;
769 }
770
771 return false;
772
773 case ARRAY_TYPE:
774 et = TREE_TYPE (type);
775
776 if (TYPE_VOLATILE (et))
777 {
778 *msg = "element type is volatile";
779 return true;
780 }
781
782 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
783 return true;
784
785 return false;
786
787 default:
788 return false;
789 }
790 }
791
792 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
793 base variable if it is. Return T if it is not an SSA_NAME. */
794
795 static tree
796 get_ssa_base_param (tree t)
797 {
798 if (TREE_CODE (t) == SSA_NAME)
799 {
800 if (SSA_NAME_IS_DEFAULT_DEF (t))
801 return SSA_NAME_VAR (t);
802 else
803 return NULL_TREE;
804 }
805 return t;
806 }
807
808 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
809 belongs to, unless the BB has already been marked as a potentially
810 final. */
811
812 static void
813 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
814 {
815 basic_block bb = gimple_bb (stmt);
816 int idx, parm_index = 0;
817 tree parm;
818
819 if (bitmap_bit_p (final_bbs, bb->index))
820 return;
821
822 for (parm = DECL_ARGUMENTS (current_function_decl);
823 parm && parm != base;
824 parm = DECL_CHAIN (parm))
825 parm_index++;
826
827 gcc_assert (parm_index < func_param_count);
828
829 idx = bb->index * func_param_count + parm_index;
830 if (bb_dereferences[idx] < dist)
831 bb_dereferences[idx] = dist;
832 }
833
834 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
835 the three fields. Also add it to the vector of accesses corresponding to
836 the base. Finally, return the new access. */
837
838 static struct access *
839 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
840 {
841 struct access *access = access_pool.allocate ();
842
843 memset (access, 0, sizeof (struct access));
844 access->base = base;
845 access->offset = offset;
846 access->size = size;
847
848 base_access_vec->get_or_insert (base).safe_push (access);
849
850 return access;
851 }
852
853 static bool maybe_add_sra_candidate (tree);
854
855 /* Create and insert access for EXPR. Return created access, or NULL if it is
856 not possible. Also scan for uses of constant pool as we go along and add
857 to candidates. */
858
859 static struct access *
860 create_access (tree expr, gimple *stmt, bool write)
861 {
862 struct access *access;
863 HOST_WIDE_INT offset, size, max_size;
864 tree base = expr;
865 bool reverse, ptr, unscalarizable_region = false;
866
867 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
868
869 if (sra_mode == SRA_MODE_EARLY_IPA
870 && TREE_CODE (base) == MEM_REF)
871 {
872 base = get_ssa_base_param (TREE_OPERAND (base, 0));
873 if (!base)
874 return NULL;
875 ptr = true;
876 }
877 else
878 ptr = false;
879
880 /* For constant-pool entries, check we can substitute the constant value. */
881 if (constant_decl_p (base)
882 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
883 {
884 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
885 if (expr != base
886 && !is_gimple_reg_type (TREE_TYPE (expr))
887 && dump_file && (dump_flags & TDF_DETAILS))
888 {
889 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
890 and elements of multidimensional arrays (which are
891 multi-element arrays in their own right). */
892 fprintf (dump_file, "Allowing non-reg-type load of part"
893 " of constant-pool entry: ");
894 print_generic_expr (dump_file, expr, 0);
895 }
896 maybe_add_sra_candidate (base);
897 }
898
899 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
900 return NULL;
901
902 if (sra_mode == SRA_MODE_EARLY_IPA)
903 {
904 if (size < 0 || size != max_size)
905 {
906 disqualify_candidate (base, "Encountered a variable sized access.");
907 return NULL;
908 }
909 if (TREE_CODE (expr) == COMPONENT_REF
910 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
911 {
912 disqualify_candidate (base, "Encountered a bit-field access.");
913 return NULL;
914 }
915 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
916
917 if (ptr)
918 mark_parm_dereference (base, offset + size, stmt);
919 }
920 else
921 {
922 if (size != max_size)
923 {
924 size = max_size;
925 unscalarizable_region = true;
926 }
927 if (size < 0)
928 {
929 disqualify_candidate (base, "Encountered an unconstrained access.");
930 return NULL;
931 }
932 }
933
934 access = create_access_1 (base, offset, size);
935 access->expr = expr;
936 access->type = TREE_TYPE (expr);
937 access->write = write;
938 access->grp_unscalarizable_region = unscalarizable_region;
939 access->stmt = stmt;
940 access->reverse = reverse;
941
942 if (TREE_CODE (expr) == COMPONENT_REF
943 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
944 access->non_addressable = 1;
945
946 return access;
947 }
948
949
950 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
951 ARRAY_TYPE with fields that are either of gimple register types (excluding
952 bit-fields) or (recursively) scalarizable types. */
953
954 static bool
955 scalarizable_type_p (tree type)
956 {
957 gcc_assert (!is_gimple_reg_type (type));
958 if (type_contains_placeholder_p (type))
959 return false;
960
961 switch (TREE_CODE (type))
962 {
963 case RECORD_TYPE:
964 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
965 if (TREE_CODE (fld) == FIELD_DECL)
966 {
967 tree ft = TREE_TYPE (fld);
968
969 if (DECL_BIT_FIELD (fld))
970 return false;
971
972 if (!is_gimple_reg_type (ft)
973 && !scalarizable_type_p (ft))
974 return false;
975 }
976
977 return true;
978
979 case ARRAY_TYPE:
980 {
981 if (TYPE_DOMAIN (type) == NULL_TREE
982 || !tree_fits_shwi_p (TYPE_SIZE (type))
983 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
984 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= 0)
985 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
986 return false;
987 if (tree_to_shwi (TYPE_SIZE (type)) == 0
988 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
989 /* Zero-element array, should not prevent scalarization. */
990 ;
991 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
992 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
993 /* Variable-length array, do not allow scalarization. */
994 return false;
995
996 tree elem = TREE_TYPE (type);
997 if (!is_gimple_reg_type (elem)
998 && !scalarizable_type_p (elem))
999 return false;
1000 return true;
1001 }
1002 default:
1003 return false;
1004 }
1005 }
1006
1007 static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
1008
1009 /* Create total_scalarization accesses for all scalar fields of a member
1010 of type DECL_TYPE conforming to scalarizable_type_p. BASE
1011 must be the top-most VAR_DECL representing the variable; within that,
1012 OFFSET locates the member and REF must be the memory reference expression for
1013 the member. */
1014
1015 static void
1016 completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
1017 {
1018 switch (TREE_CODE (decl_type))
1019 {
1020 case RECORD_TYPE:
1021 for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1022 if (TREE_CODE (fld) == FIELD_DECL)
1023 {
1024 HOST_WIDE_INT pos = offset + int_bit_position (fld);
1025 tree ft = TREE_TYPE (fld);
1026 tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
1027
1028 scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
1029 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1030 nref, ft);
1031 }
1032 break;
1033 case ARRAY_TYPE:
1034 {
1035 tree elemtype = TREE_TYPE (decl_type);
1036 tree elem_size = TYPE_SIZE (elemtype);
1037 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1038 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1039 gcc_assert (el_size > 0);
1040
1041 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1042 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1043 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1044 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
1045 if (maxidx)
1046 {
1047 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1048 tree domain = TYPE_DOMAIN (decl_type);
1049 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1050 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1051 offset_int idx = wi::to_offset (minidx);
1052 offset_int max = wi::to_offset (maxidx);
1053 if (!TYPE_UNSIGNED (domain))
1054 {
1055 idx = wi::sext (idx, TYPE_PRECISION (domain));
1056 max = wi::sext (max, TYPE_PRECISION (domain));
1057 }
1058 for (int el_off = offset; idx <= max; ++idx)
1059 {
1060 tree nref = build4 (ARRAY_REF, elemtype,
1061 ref,
1062 wide_int_to_tree (domain, idx),
1063 NULL_TREE, NULL_TREE);
1064 scalarize_elem (base, el_off, el_size,
1065 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1066 nref, elemtype);
1067 el_off += el_size;
1068 }
1069 }
1070 }
1071 break;
1072 default:
1073 gcc_unreachable ();
1074 }
1075 }
1076
1077 /* Create total_scalarization accesses for a member of type TYPE, which must
1078 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1079 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1080 the member, REVERSE gives its torage order. and REF must be the reference
1081 expression for it. */
1082
1083 static void
1084 scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1085 tree ref, tree type)
1086 {
1087 if (is_gimple_reg_type (type))
1088 {
1089 struct access *access = create_access_1 (base, pos, size);
1090 access->expr = ref;
1091 access->type = type;
1092 access->grp_total_scalarization = 1;
1093 access->reverse = reverse;
1094 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1095 }
1096 else
1097 completely_scalarize (base, type, pos, ref);
1098 }
1099
1100 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
1101 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1102
1103 static void
1104 create_total_scalarization_access (tree var)
1105 {
1106 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1107 struct access *access;
1108
1109 access = create_access_1 (var, 0, size);
1110 access->expr = var;
1111 access->type = TREE_TYPE (var);
1112 access->grp_total_scalarization = 1;
1113 }
1114
1115 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1116
1117 static inline bool
1118 contains_view_convert_expr_p (const_tree ref)
1119 {
1120 while (handled_component_p (ref))
1121 {
1122 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1123 return true;
1124 ref = TREE_OPERAND (ref, 0);
1125 }
1126
1127 return false;
1128 }
1129
1130 /* Search the given tree for a declaration by skipping handled components and
1131 exclude it from the candidates. */
1132
1133 static void
1134 disqualify_base_of_expr (tree t, const char *reason)
1135 {
1136 t = get_base_address (t);
1137 if (sra_mode == SRA_MODE_EARLY_IPA
1138 && TREE_CODE (t) == MEM_REF)
1139 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1140
1141 if (t && DECL_P (t))
1142 disqualify_candidate (t, reason);
1143 }
1144
1145 /* Scan expression EXPR and create access structures for all accesses to
1146 candidates for scalarization. Return the created access or NULL if none is
1147 created. */
1148
1149 static struct access *
1150 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1151 {
1152 struct access *ret = NULL;
1153 bool partial_ref;
1154
1155 if (TREE_CODE (expr) == BIT_FIELD_REF
1156 || TREE_CODE (expr) == IMAGPART_EXPR
1157 || TREE_CODE (expr) == REALPART_EXPR)
1158 {
1159 expr = TREE_OPERAND (expr, 0);
1160 partial_ref = true;
1161 }
1162 else
1163 partial_ref = false;
1164
1165 /* We need to dive through V_C_Es in order to get the size of its parameter
1166 and not the result type. Ada produces such statements. We are also
1167 capable of handling the topmost V_C_E but not any of those buried in other
1168 handled components. */
1169 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR && !storage_order_barrier_p (expr))
1170 expr = TREE_OPERAND (expr, 0);
1171
1172 if (contains_view_convert_expr_p (expr))
1173 {
1174 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1175 "component.");
1176 return NULL;
1177 }
1178 if (TREE_THIS_VOLATILE (expr))
1179 {
1180 disqualify_base_of_expr (expr, "part of a volatile reference.");
1181 return NULL;
1182 }
1183
1184 switch (TREE_CODE (expr))
1185 {
1186 case MEM_REF:
1187 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1188 && sra_mode != SRA_MODE_EARLY_IPA)
1189 return NULL;
1190 /* fall through */
1191 case VAR_DECL:
1192 case PARM_DECL:
1193 case RESULT_DECL:
1194 case COMPONENT_REF:
1195 case ARRAY_REF:
1196 case ARRAY_RANGE_REF:
1197 ret = create_access (expr, stmt, write);
1198 break;
1199
1200 default:
1201 break;
1202 }
1203
1204 if (write && partial_ref && ret)
1205 ret->grp_partial_lhs = 1;
1206
1207 return ret;
1208 }
1209
1210 /* Scan expression EXPR and create access structures for all accesses to
1211 candidates for scalarization. Return true if any access has been inserted.
1212 STMT must be the statement from which the expression is taken, WRITE must be
1213 true if the expression is a store and false otherwise. */
1214
1215 static bool
1216 build_access_from_expr (tree expr, gimple *stmt, bool write)
1217 {
1218 struct access *access;
1219
1220 access = build_access_from_expr_1 (expr, stmt, write);
1221 if (access)
1222 {
1223 /* This means the aggregate is accesses as a whole in a way other than an
1224 assign statement and thus cannot be removed even if we had a scalar
1225 replacement for everything. */
1226 if (cannot_scalarize_away_bitmap)
1227 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1228 return true;
1229 }
1230 return false;
1231 }
1232
1233 /* Return the single non-EH successor edge of BB or NULL if there is none or
1234 more than one. */
1235
1236 static edge
1237 single_non_eh_succ (basic_block bb)
1238 {
1239 edge e, res = NULL;
1240 edge_iterator ei;
1241
1242 FOR_EACH_EDGE (e, ei, bb->succs)
1243 if (!(e->flags & EDGE_EH))
1244 {
1245 if (res)
1246 return NULL;
1247 res = e;
1248 }
1249
1250 return res;
1251 }
1252
1253 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1254 there is no alternative spot where to put statements SRA might need to
1255 generate after it. The spot we are looking for is an edge leading to a
1256 single non-EH successor, if it exists and is indeed single. RHS may be
1257 NULL, in that case ignore it. */
1258
1259 static bool
1260 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1261 {
1262 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1263 && stmt_ends_bb_p (stmt))
1264 {
1265 if (single_non_eh_succ (gimple_bb (stmt)))
1266 return false;
1267
1268 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1269 if (rhs)
1270 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1271 return true;
1272 }
1273 return false;
1274 }
1275
1276 /* Scan expressions occurring in STMT, create access structures for all accesses
1277 to candidates for scalarization and remove those candidates which occur in
1278 statements or expressions that prevent them from being split apart. Return
1279 true if any access has been inserted. */
1280
1281 static bool
1282 build_accesses_from_assign (gimple *stmt)
1283 {
1284 tree lhs, rhs;
1285 struct access *lacc, *racc;
1286
1287 if (!gimple_assign_single_p (stmt)
1288 /* Scope clobbers don't influence scalarization. */
1289 || gimple_clobber_p (stmt))
1290 return false;
1291
1292 lhs = gimple_assign_lhs (stmt);
1293 rhs = gimple_assign_rhs1 (stmt);
1294
1295 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1296 return false;
1297
1298 racc = build_access_from_expr_1 (rhs, stmt, false);
1299 lacc = build_access_from_expr_1 (lhs, stmt, true);
1300
1301 if (lacc)
1302 {
1303 lacc->grp_assignment_write = 1;
1304 if (storage_order_barrier_p (rhs))
1305 lacc->grp_unscalarizable_region = 1;
1306 }
1307
1308 if (racc)
1309 {
1310 racc->grp_assignment_read = 1;
1311 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1312 && !is_gimple_reg_type (racc->type))
1313 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1314 if (storage_order_barrier_p (lhs))
1315 racc->grp_unscalarizable_region = 1;
1316 }
1317
1318 if (lacc && racc
1319 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1320 && !lacc->grp_unscalarizable_region
1321 && !racc->grp_unscalarizable_region
1322 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1323 && lacc->size == racc->size
1324 && useless_type_conversion_p (lacc->type, racc->type))
1325 {
1326 struct assign_link *link;
1327
1328 link = assign_link_pool.allocate ();
1329 memset (link, 0, sizeof (struct assign_link));
1330
1331 link->lacc = lacc;
1332 link->racc = racc;
1333
1334 add_link_to_rhs (racc, link);
1335 }
1336
1337 return lacc || racc;
1338 }
1339
1340 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1341 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1342
1343 static bool
1344 asm_visit_addr (gimple *, tree op, tree, void *)
1345 {
1346 op = get_base_address (op);
1347 if (op
1348 && DECL_P (op))
1349 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1350
1351 return false;
1352 }
1353
1354 /* Return true iff callsite CALL has at least as many actual arguments as there
1355 are formal parameters of the function currently processed by IPA-SRA and
1356 that their types match. */
1357
1358 static inline bool
1359 callsite_arguments_match_p (gimple *call)
1360 {
1361 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1362 return false;
1363
1364 tree parm;
1365 int i;
1366 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1367 parm;
1368 parm = DECL_CHAIN (parm), i++)
1369 {
1370 tree arg = gimple_call_arg (call, i);
1371 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1372 return false;
1373 }
1374 return true;
1375 }
1376
1377 /* Scan function and look for interesting expressions and create access
1378 structures for them. Return true iff any access is created. */
1379
1380 static bool
1381 scan_function (void)
1382 {
1383 basic_block bb;
1384 bool ret = false;
1385
1386 FOR_EACH_BB_FN (bb, cfun)
1387 {
1388 gimple_stmt_iterator gsi;
1389 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1390 {
1391 gimple *stmt = gsi_stmt (gsi);
1392 tree t;
1393 unsigned i;
1394
1395 if (final_bbs && stmt_can_throw_external (stmt))
1396 bitmap_set_bit (final_bbs, bb->index);
1397 switch (gimple_code (stmt))
1398 {
1399 case GIMPLE_RETURN:
1400 t = gimple_return_retval (as_a <greturn *> (stmt));
1401 if (t != NULL_TREE)
1402 ret |= build_access_from_expr (t, stmt, false);
1403 if (final_bbs)
1404 bitmap_set_bit (final_bbs, bb->index);
1405 break;
1406
1407 case GIMPLE_ASSIGN:
1408 ret |= build_accesses_from_assign (stmt);
1409 break;
1410
1411 case GIMPLE_CALL:
1412 for (i = 0; i < gimple_call_num_args (stmt); i++)
1413 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1414 stmt, false);
1415
1416 if (sra_mode == SRA_MODE_EARLY_IPA)
1417 {
1418 tree dest = gimple_call_fndecl (stmt);
1419 int flags = gimple_call_flags (stmt);
1420
1421 if (dest)
1422 {
1423 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1424 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1425 encountered_apply_args = true;
1426 if (recursive_call_p (current_function_decl, dest))
1427 {
1428 encountered_recursive_call = true;
1429 if (!callsite_arguments_match_p (stmt))
1430 encountered_unchangable_recursive_call = true;
1431 }
1432 }
1433
1434 if (final_bbs
1435 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1436 bitmap_set_bit (final_bbs, bb->index);
1437 }
1438
1439 t = gimple_call_lhs (stmt);
1440 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1441 ret |= build_access_from_expr (t, stmt, true);
1442 break;
1443
1444 case GIMPLE_ASM:
1445 {
1446 gasm *asm_stmt = as_a <gasm *> (stmt);
1447 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1448 asm_visit_addr);
1449 if (final_bbs)
1450 bitmap_set_bit (final_bbs, bb->index);
1451
1452 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1453 {
1454 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1455 ret |= build_access_from_expr (t, asm_stmt, false);
1456 }
1457 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1458 {
1459 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1460 ret |= build_access_from_expr (t, asm_stmt, true);
1461 }
1462 }
1463 break;
1464
1465 default:
1466 break;
1467 }
1468 }
1469 }
1470
1471 return ret;
1472 }
1473
1474 /* Helper of QSORT function. There are pointers to accesses in the array. An
1475 access is considered smaller than another if it has smaller offset or if the
1476 offsets are the same but is size is bigger. */
1477
1478 static int
1479 compare_access_positions (const void *a, const void *b)
1480 {
1481 const access_p *fp1 = (const access_p *) a;
1482 const access_p *fp2 = (const access_p *) b;
1483 const access_p f1 = *fp1;
1484 const access_p f2 = *fp2;
1485
1486 if (f1->offset != f2->offset)
1487 return f1->offset < f2->offset ? -1 : 1;
1488
1489 if (f1->size == f2->size)
1490 {
1491 if (f1->type == f2->type)
1492 return 0;
1493 /* Put any non-aggregate type before any aggregate type. */
1494 else if (!is_gimple_reg_type (f1->type)
1495 && is_gimple_reg_type (f2->type))
1496 return 1;
1497 else if (is_gimple_reg_type (f1->type)
1498 && !is_gimple_reg_type (f2->type))
1499 return -1;
1500 /* Put any complex or vector type before any other scalar type. */
1501 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1502 && TREE_CODE (f1->type) != VECTOR_TYPE
1503 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1504 || TREE_CODE (f2->type) == VECTOR_TYPE))
1505 return 1;
1506 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1507 || TREE_CODE (f1->type) == VECTOR_TYPE)
1508 && TREE_CODE (f2->type) != COMPLEX_TYPE
1509 && TREE_CODE (f2->type) != VECTOR_TYPE)
1510 return -1;
1511 /* Put the integral type with the bigger precision first. */
1512 else if (INTEGRAL_TYPE_P (f1->type)
1513 && INTEGRAL_TYPE_P (f2->type))
1514 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1515 /* Put any integral type with non-full precision last. */
1516 else if (INTEGRAL_TYPE_P (f1->type)
1517 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1518 != TYPE_PRECISION (f1->type)))
1519 return 1;
1520 else if (INTEGRAL_TYPE_P (f2->type)
1521 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1522 != TYPE_PRECISION (f2->type)))
1523 return -1;
1524 /* Stabilize the sort. */
1525 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1526 }
1527
1528 /* We want the bigger accesses first, thus the opposite operator in the next
1529 line: */
1530 return f1->size > f2->size ? -1 : 1;
1531 }
1532
1533
1534 /* Append a name of the declaration to the name obstack. A helper function for
1535 make_fancy_name. */
1536
1537 static void
1538 make_fancy_decl_name (tree decl)
1539 {
1540 char buffer[32];
1541
1542 tree name = DECL_NAME (decl);
1543 if (name)
1544 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1545 IDENTIFIER_LENGTH (name));
1546 else
1547 {
1548 sprintf (buffer, "D%u", DECL_UID (decl));
1549 obstack_grow (&name_obstack, buffer, strlen (buffer));
1550 }
1551 }
1552
1553 /* Helper for make_fancy_name. */
1554
1555 static void
1556 make_fancy_name_1 (tree expr)
1557 {
1558 char buffer[32];
1559 tree index;
1560
1561 if (DECL_P (expr))
1562 {
1563 make_fancy_decl_name (expr);
1564 return;
1565 }
1566
1567 switch (TREE_CODE (expr))
1568 {
1569 case COMPONENT_REF:
1570 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1571 obstack_1grow (&name_obstack, '$');
1572 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1573 break;
1574
1575 case ARRAY_REF:
1576 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1577 obstack_1grow (&name_obstack, '$');
1578 /* Arrays with only one element may not have a constant as their
1579 index. */
1580 index = TREE_OPERAND (expr, 1);
1581 if (TREE_CODE (index) != INTEGER_CST)
1582 break;
1583 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1584 obstack_grow (&name_obstack, buffer, strlen (buffer));
1585 break;
1586
1587 case ADDR_EXPR:
1588 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1589 break;
1590
1591 case MEM_REF:
1592 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1593 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1594 {
1595 obstack_1grow (&name_obstack, '$');
1596 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1597 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1598 obstack_grow (&name_obstack, buffer, strlen (buffer));
1599 }
1600 break;
1601
1602 case BIT_FIELD_REF:
1603 case REALPART_EXPR:
1604 case IMAGPART_EXPR:
1605 gcc_unreachable (); /* we treat these as scalars. */
1606 break;
1607 default:
1608 break;
1609 }
1610 }
1611
1612 /* Create a human readable name for replacement variable of ACCESS. */
1613
1614 static char *
1615 make_fancy_name (tree expr)
1616 {
1617 make_fancy_name_1 (expr);
1618 obstack_1grow (&name_obstack, '\0');
1619 return XOBFINISH (&name_obstack, char *);
1620 }
1621
1622 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1623 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1624 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1625 be non-NULL and is used to insert new statements either before or below
1626 the current one as specified by INSERT_AFTER. This function is not capable
1627 of handling bitfields. */
1628
1629 tree
1630 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1631 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1632 bool insert_after)
1633 {
1634 tree prev_base = base;
1635 tree off;
1636 tree mem_ref;
1637 HOST_WIDE_INT base_offset;
1638 unsigned HOST_WIDE_INT misalign;
1639 unsigned int align;
1640
1641 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1642 get_object_alignment_1 (base, &align, &misalign);
1643 base = get_addr_base_and_unit_offset (base, &base_offset);
1644
1645 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1646 offset such as array[var_index]. */
1647 if (!base)
1648 {
1649 gassign *stmt;
1650 tree tmp, addr;
1651
1652 gcc_checking_assert (gsi);
1653 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1654 addr = build_fold_addr_expr (unshare_expr (prev_base));
1655 STRIP_USELESS_TYPE_CONVERSION (addr);
1656 stmt = gimple_build_assign (tmp, addr);
1657 gimple_set_location (stmt, loc);
1658 if (insert_after)
1659 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1660 else
1661 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1662
1663 off = build_int_cst (reference_alias_ptr_type (prev_base),
1664 offset / BITS_PER_UNIT);
1665 base = tmp;
1666 }
1667 else if (TREE_CODE (base) == MEM_REF)
1668 {
1669 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1670 base_offset + offset / BITS_PER_UNIT);
1671 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1672 base = unshare_expr (TREE_OPERAND (base, 0));
1673 }
1674 else
1675 {
1676 off = build_int_cst (reference_alias_ptr_type (prev_base),
1677 base_offset + offset / BITS_PER_UNIT);
1678 base = build_fold_addr_expr (unshare_expr (base));
1679 }
1680
1681 misalign = (misalign + offset) & (align - 1);
1682 if (misalign != 0)
1683 align = (misalign & -misalign);
1684 if (align != TYPE_ALIGN (exp_type))
1685 exp_type = build_aligned_type (exp_type, align);
1686
1687 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1688 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1689 if (TREE_THIS_VOLATILE (prev_base))
1690 TREE_THIS_VOLATILE (mem_ref) = 1;
1691 if (TREE_SIDE_EFFECTS (prev_base))
1692 TREE_SIDE_EFFECTS (mem_ref) = 1;
1693 return mem_ref;
1694 }
1695
1696 /* Construct a memory reference to a part of an aggregate BASE at the given
1697 OFFSET and of the same type as MODEL. In case this is a reference to a
1698 bit-field, the function will replicate the last component_ref of model's
1699 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1700 build_ref_for_offset. */
1701
1702 static tree
1703 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1704 struct access *model, gimple_stmt_iterator *gsi,
1705 bool insert_after)
1706 {
1707 if (TREE_CODE (model->expr) == COMPONENT_REF
1708 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1709 {
1710 /* This access represents a bit-field. */
1711 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1712
1713 offset -= int_bit_position (fld);
1714 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1715 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1716 gsi, insert_after);
1717 /* The flag will be set on the record type. */
1718 REF_REVERSE_STORAGE_ORDER (t) = 0;
1719 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1720 NULL_TREE);
1721 }
1722 else
1723 return
1724 build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1725 gsi, insert_after);
1726 }
1727
1728 /* Attempt to build a memory reference that we could but into a gimple
1729 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1730 create statements and return s NULL instead. This function also ignores
1731 alignment issues and so its results should never end up in non-debug
1732 statements. */
1733
1734 static tree
1735 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1736 struct access *model)
1737 {
1738 HOST_WIDE_INT base_offset;
1739 tree off;
1740
1741 if (TREE_CODE (model->expr) == COMPONENT_REF
1742 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1743 return NULL_TREE;
1744
1745 base = get_addr_base_and_unit_offset (base, &base_offset);
1746 if (!base)
1747 return NULL_TREE;
1748 if (TREE_CODE (base) == MEM_REF)
1749 {
1750 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1751 base_offset + offset / BITS_PER_UNIT);
1752 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1753 base = unshare_expr (TREE_OPERAND (base, 0));
1754 }
1755 else
1756 {
1757 off = build_int_cst (reference_alias_ptr_type (base),
1758 base_offset + offset / BITS_PER_UNIT);
1759 base = build_fold_addr_expr (unshare_expr (base));
1760 }
1761
1762 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1763 }
1764
1765 /* Construct a memory reference consisting of component_refs and array_refs to
1766 a part of an aggregate *RES (which is of type TYPE). The requested part
1767 should have type EXP_TYPE at be the given OFFSET. This function might not
1768 succeed, it returns true when it does and only then *RES points to something
1769 meaningful. This function should be used only to build expressions that we
1770 might need to present to user (e.g. in warnings). In all other situations,
1771 build_ref_for_model or build_ref_for_offset should be used instead. */
1772
1773 static bool
1774 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1775 tree exp_type)
1776 {
1777 while (1)
1778 {
1779 tree fld;
1780 tree tr_size, index, minidx;
1781 HOST_WIDE_INT el_size;
1782
1783 if (offset == 0 && exp_type
1784 && types_compatible_p (exp_type, type))
1785 return true;
1786
1787 switch (TREE_CODE (type))
1788 {
1789 case UNION_TYPE:
1790 case QUAL_UNION_TYPE:
1791 case RECORD_TYPE:
1792 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1793 {
1794 HOST_WIDE_INT pos, size;
1795 tree tr_pos, expr, *expr_ptr;
1796
1797 if (TREE_CODE (fld) != FIELD_DECL)
1798 continue;
1799
1800 tr_pos = bit_position (fld);
1801 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1802 continue;
1803 pos = tree_to_uhwi (tr_pos);
1804 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1805 tr_size = DECL_SIZE (fld);
1806 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1807 continue;
1808 size = tree_to_uhwi (tr_size);
1809 if (size == 0)
1810 {
1811 if (pos != offset)
1812 continue;
1813 }
1814 else if (pos > offset || (pos + size) <= offset)
1815 continue;
1816
1817 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1818 NULL_TREE);
1819 expr_ptr = &expr;
1820 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1821 offset - pos, exp_type))
1822 {
1823 *res = expr;
1824 return true;
1825 }
1826 }
1827 return false;
1828
1829 case ARRAY_TYPE:
1830 tr_size = TYPE_SIZE (TREE_TYPE (type));
1831 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1832 return false;
1833 el_size = tree_to_uhwi (tr_size);
1834
1835 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1836 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1837 return false;
1838 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1839 if (!integer_zerop (minidx))
1840 index = int_const_binop (PLUS_EXPR, index, minidx);
1841 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1842 NULL_TREE, NULL_TREE);
1843 offset = offset % el_size;
1844 type = TREE_TYPE (type);
1845 break;
1846
1847 default:
1848 if (offset != 0)
1849 return false;
1850
1851 if (exp_type)
1852 return false;
1853 else
1854 return true;
1855 }
1856 }
1857 }
1858
1859 /* Return true iff TYPE is stdarg va_list type. */
1860
1861 static inline bool
1862 is_va_list_type (tree type)
1863 {
1864 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1865 }
1866
1867 /* Print message to dump file why a variable was rejected. */
1868
1869 static void
1870 reject (tree var, const char *msg)
1871 {
1872 if (dump_file && (dump_flags & TDF_DETAILS))
1873 {
1874 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1875 print_generic_expr (dump_file, var, 0);
1876 fprintf (dump_file, "\n");
1877 }
1878 }
1879
1880 /* Return true if VAR is a candidate for SRA. */
1881
1882 static bool
1883 maybe_add_sra_candidate (tree var)
1884 {
1885 tree type = TREE_TYPE (var);
1886 const char *msg;
1887 tree_node **slot;
1888
1889 if (!AGGREGATE_TYPE_P (type))
1890 {
1891 reject (var, "not aggregate");
1892 return false;
1893 }
1894 /* Allow constant-pool entries (that "need to live in memory")
1895 unless we are doing IPA SRA. */
1896 if (needs_to_live_in_memory (var)
1897 && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
1898 {
1899 reject (var, "needs to live in memory");
1900 return false;
1901 }
1902 if (TREE_THIS_VOLATILE (var))
1903 {
1904 reject (var, "is volatile");
1905 return false;
1906 }
1907 if (!COMPLETE_TYPE_P (type))
1908 {
1909 reject (var, "has incomplete type");
1910 return false;
1911 }
1912 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1913 {
1914 reject (var, "type size not fixed");
1915 return false;
1916 }
1917 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1918 {
1919 reject (var, "type size is zero");
1920 return false;
1921 }
1922 if (type_internals_preclude_sra_p (type, &msg))
1923 {
1924 reject (var, msg);
1925 return false;
1926 }
1927 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1928 we also want to schedule it rather late. Thus we ignore it in
1929 the early pass. */
1930 (sra_mode == SRA_MODE_EARLY_INTRA
1931 && is_va_list_type (type)))
1932 {
1933 reject (var, "is va_list");
1934 return false;
1935 }
1936
1937 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1938 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1939 *slot = var;
1940
1941 if (dump_file && (dump_flags & TDF_DETAILS))
1942 {
1943 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1944 print_generic_expr (dump_file, var, 0);
1945 fprintf (dump_file, "\n");
1946 }
1947
1948 return true;
1949 }
1950
1951 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1952 those with type which is suitable for scalarization. */
1953
1954 static bool
1955 find_var_candidates (void)
1956 {
1957 tree var, parm;
1958 unsigned int i;
1959 bool ret = false;
1960
1961 for (parm = DECL_ARGUMENTS (current_function_decl);
1962 parm;
1963 parm = DECL_CHAIN (parm))
1964 ret |= maybe_add_sra_candidate (parm);
1965
1966 FOR_EACH_LOCAL_DECL (cfun, i, var)
1967 {
1968 if (TREE_CODE (var) != VAR_DECL)
1969 continue;
1970
1971 ret |= maybe_add_sra_candidate (var);
1972 }
1973
1974 return ret;
1975 }
1976
1977 /* Sort all accesses for the given variable, check for partial overlaps and
1978 return NULL if there are any. If there are none, pick a representative for
1979 each combination of offset and size and create a linked list out of them.
1980 Return the pointer to the first representative and make sure it is the first
1981 one in the vector of accesses. */
1982
1983 static struct access *
1984 sort_and_splice_var_accesses (tree var)
1985 {
1986 int i, j, access_count;
1987 struct access *res, **prev_acc_ptr = &res;
1988 vec<access_p> *access_vec;
1989 bool first = true;
1990 HOST_WIDE_INT low = -1, high = 0;
1991
1992 access_vec = get_base_access_vector (var);
1993 if (!access_vec)
1994 return NULL;
1995 access_count = access_vec->length ();
1996
1997 /* Sort by <OFFSET, SIZE>. */
1998 access_vec->qsort (compare_access_positions);
1999
2000 i = 0;
2001 while (i < access_count)
2002 {
2003 struct access *access = (*access_vec)[i];
2004 bool grp_write = access->write;
2005 bool grp_read = !access->write;
2006 bool grp_scalar_write = access->write
2007 && is_gimple_reg_type (access->type);
2008 bool grp_scalar_read = !access->write
2009 && is_gimple_reg_type (access->type);
2010 bool grp_assignment_read = access->grp_assignment_read;
2011 bool grp_assignment_write = access->grp_assignment_write;
2012 bool multiple_scalar_reads = false;
2013 bool total_scalarization = access->grp_total_scalarization;
2014 bool grp_partial_lhs = access->grp_partial_lhs;
2015 bool first_scalar = is_gimple_reg_type (access->type);
2016 bool unscalarizable_region = access->grp_unscalarizable_region;
2017
2018 if (first || access->offset >= high)
2019 {
2020 first = false;
2021 low = access->offset;
2022 high = access->offset + access->size;
2023 }
2024 else if (access->offset > low && access->offset + access->size > high)
2025 return NULL;
2026 else
2027 gcc_assert (access->offset >= low
2028 && access->offset + access->size <= high);
2029
2030 j = i + 1;
2031 while (j < access_count)
2032 {
2033 struct access *ac2 = (*access_vec)[j];
2034 if (ac2->offset != access->offset || ac2->size != access->size)
2035 break;
2036 if (ac2->write)
2037 {
2038 grp_write = true;
2039 grp_scalar_write = (grp_scalar_write
2040 || is_gimple_reg_type (ac2->type));
2041 }
2042 else
2043 {
2044 grp_read = true;
2045 if (is_gimple_reg_type (ac2->type))
2046 {
2047 if (grp_scalar_read)
2048 multiple_scalar_reads = true;
2049 else
2050 grp_scalar_read = true;
2051 }
2052 }
2053 grp_assignment_read |= ac2->grp_assignment_read;
2054 grp_assignment_write |= ac2->grp_assignment_write;
2055 grp_partial_lhs |= ac2->grp_partial_lhs;
2056 unscalarizable_region |= ac2->grp_unscalarizable_region;
2057 total_scalarization |= ac2->grp_total_scalarization;
2058 relink_to_new_repr (access, ac2);
2059
2060 /* If there are both aggregate-type and scalar-type accesses with
2061 this combination of size and offset, the comparison function
2062 should have put the scalars first. */
2063 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2064 ac2->group_representative = access;
2065 j++;
2066 }
2067
2068 i = j;
2069
2070 access->group_representative = access;
2071 access->grp_write = grp_write;
2072 access->grp_read = grp_read;
2073 access->grp_scalar_read = grp_scalar_read;
2074 access->grp_scalar_write = grp_scalar_write;
2075 access->grp_assignment_read = grp_assignment_read;
2076 access->grp_assignment_write = grp_assignment_write;
2077 access->grp_hint = total_scalarization
2078 || (multiple_scalar_reads && !constant_decl_p (var));
2079 access->grp_total_scalarization = total_scalarization;
2080 access->grp_partial_lhs = grp_partial_lhs;
2081 access->grp_unscalarizable_region = unscalarizable_region;
2082 if (access->first_link)
2083 add_access_to_work_queue (access);
2084
2085 *prev_acc_ptr = access;
2086 prev_acc_ptr = &access->next_grp;
2087 }
2088
2089 gcc_assert (res == (*access_vec)[0]);
2090 return res;
2091 }
2092
2093 /* Create a variable for the given ACCESS which determines the type, name and a
2094 few other properties. Return the variable declaration and store it also to
2095 ACCESS->replacement. */
2096
2097 static tree
2098 create_access_replacement (struct access *access)
2099 {
2100 tree repl;
2101
2102 if (access->grp_to_be_debug_replaced)
2103 {
2104 repl = create_tmp_var_raw (access->type);
2105 DECL_CONTEXT (repl) = current_function_decl;
2106 }
2107 else
2108 /* Drop any special alignment on the type if it's not on the main
2109 variant. This avoids issues with weirdo ABIs like AAPCS. */
2110 repl = create_tmp_var (build_qualified_type
2111 (TYPE_MAIN_VARIANT (access->type),
2112 TYPE_QUALS (access->type)), "SR");
2113 if (TREE_CODE (access->type) == COMPLEX_TYPE
2114 || TREE_CODE (access->type) == VECTOR_TYPE)
2115 {
2116 if (!access->grp_partial_lhs)
2117 DECL_GIMPLE_REG_P (repl) = 1;
2118 }
2119 else if (access->grp_partial_lhs
2120 && is_gimple_reg_type (access->type))
2121 TREE_ADDRESSABLE (repl) = 1;
2122
2123 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2124 DECL_ARTIFICIAL (repl) = 1;
2125 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2126
2127 if (DECL_NAME (access->base)
2128 && !DECL_IGNORED_P (access->base)
2129 && !DECL_ARTIFICIAL (access->base))
2130 {
2131 char *pretty_name = make_fancy_name (access->expr);
2132 tree debug_expr = unshare_expr_without_location (access->expr), d;
2133 bool fail = false;
2134
2135 DECL_NAME (repl) = get_identifier (pretty_name);
2136 DECL_NAMELESS (repl) = 1;
2137 obstack_free (&name_obstack, pretty_name);
2138
2139 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2140 as DECL_DEBUG_EXPR isn't considered when looking for still
2141 used SSA_NAMEs and thus they could be freed. All debug info
2142 generation cares is whether something is constant or variable
2143 and that get_ref_base_and_extent works properly on the
2144 expression. It cannot handle accesses at a non-constant offset
2145 though, so just give up in those cases. */
2146 for (d = debug_expr;
2147 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2148 d = TREE_OPERAND (d, 0))
2149 switch (TREE_CODE (d))
2150 {
2151 case ARRAY_REF:
2152 case ARRAY_RANGE_REF:
2153 if (TREE_OPERAND (d, 1)
2154 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2155 fail = true;
2156 if (TREE_OPERAND (d, 3)
2157 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2158 fail = true;
2159 /* FALLTHRU */
2160 case COMPONENT_REF:
2161 if (TREE_OPERAND (d, 2)
2162 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2163 fail = true;
2164 break;
2165 case MEM_REF:
2166 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2167 fail = true;
2168 else
2169 d = TREE_OPERAND (d, 0);
2170 break;
2171 default:
2172 break;
2173 }
2174 if (!fail)
2175 {
2176 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2177 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2178 }
2179 if (access->grp_no_warning)
2180 TREE_NO_WARNING (repl) = 1;
2181 else
2182 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2183 }
2184 else
2185 TREE_NO_WARNING (repl) = 1;
2186
2187 if (dump_file)
2188 {
2189 if (access->grp_to_be_debug_replaced)
2190 {
2191 fprintf (dump_file, "Created a debug-only replacement for ");
2192 print_generic_expr (dump_file, access->base, 0);
2193 fprintf (dump_file, " offset: %u, size: %u\n",
2194 (unsigned) access->offset, (unsigned) access->size);
2195 }
2196 else
2197 {
2198 fprintf (dump_file, "Created a replacement for ");
2199 print_generic_expr (dump_file, access->base, 0);
2200 fprintf (dump_file, " offset: %u, size: %u: ",
2201 (unsigned) access->offset, (unsigned) access->size);
2202 print_generic_expr (dump_file, repl, 0);
2203 fprintf (dump_file, "\n");
2204 }
2205 }
2206 sra_stats.replacements++;
2207
2208 return repl;
2209 }
2210
2211 /* Return ACCESS scalar replacement, which must exist. */
2212
2213 static inline tree
2214 get_access_replacement (struct access *access)
2215 {
2216 gcc_checking_assert (access->replacement_decl);
2217 return access->replacement_decl;
2218 }
2219
2220
2221 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2222 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2223 to it is not "within" the root. Return false iff some accesses partially
2224 overlap. */
2225
2226 static bool
2227 build_access_subtree (struct access **access)
2228 {
2229 struct access *root = *access, *last_child = NULL;
2230 HOST_WIDE_INT limit = root->offset + root->size;
2231
2232 *access = (*access)->next_grp;
2233 while (*access && (*access)->offset + (*access)->size <= limit)
2234 {
2235 if (!last_child)
2236 root->first_child = *access;
2237 else
2238 last_child->next_sibling = *access;
2239 last_child = *access;
2240
2241 if (!build_access_subtree (access))
2242 return false;
2243 }
2244
2245 if (*access && (*access)->offset < limit)
2246 return false;
2247
2248 return true;
2249 }
2250
2251 /* Build a tree of access representatives, ACCESS is the pointer to the first
2252 one, others are linked in a list by the next_grp field. Return false iff
2253 some accesses partially overlap. */
2254
2255 static bool
2256 build_access_trees (struct access *access)
2257 {
2258 while (access)
2259 {
2260 struct access *root = access;
2261
2262 if (!build_access_subtree (&access))
2263 return false;
2264 root->next_grp = access;
2265 }
2266 return true;
2267 }
2268
2269 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2270 array. */
2271
2272 static bool
2273 expr_with_var_bounded_array_refs_p (tree expr)
2274 {
2275 while (handled_component_p (expr))
2276 {
2277 if (TREE_CODE (expr) == ARRAY_REF
2278 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2279 return true;
2280 expr = TREE_OPERAND (expr, 0);
2281 }
2282 return false;
2283 }
2284
2285 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2286 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2287 sorts of access flags appropriately along the way, notably always set
2288 grp_read and grp_assign_read according to MARK_READ and grp_write when
2289 MARK_WRITE is true.
2290
2291 Creating a replacement for a scalar access is considered beneficial if its
2292 grp_hint is set (this means we are either attempting total scalarization or
2293 there is more than one direct read access) or according to the following
2294 table:
2295
2296 Access written to through a scalar type (once or more times)
2297 |
2298 | Written to in an assignment statement
2299 | |
2300 | | Access read as scalar _once_
2301 | | |
2302 | | | Read in an assignment statement
2303 | | | |
2304 | | | | Scalarize Comment
2305 -----------------------------------------------------------------------------
2306 0 0 0 0 No access for the scalar
2307 0 0 0 1 No access for the scalar
2308 0 0 1 0 No Single read - won't help
2309 0 0 1 1 No The same case
2310 0 1 0 0 No access for the scalar
2311 0 1 0 1 No access for the scalar
2312 0 1 1 0 Yes s = *g; return s.i;
2313 0 1 1 1 Yes The same case as above
2314 1 0 0 0 No Won't help
2315 1 0 0 1 Yes s.i = 1; *g = s;
2316 1 0 1 0 Yes s.i = 5; g = s.i;
2317 1 0 1 1 Yes The same case as above
2318 1 1 0 0 No Won't help.
2319 1 1 0 1 Yes s.i = 1; *g = s;
2320 1 1 1 0 Yes s = *g; return s.i;
2321 1 1 1 1 Yes Any of the above yeses */
2322
2323 static bool
2324 analyze_access_subtree (struct access *root, struct access *parent,
2325 bool allow_replacements)
2326 {
2327 struct access *child;
2328 HOST_WIDE_INT limit = root->offset + root->size;
2329 HOST_WIDE_INT covered_to = root->offset;
2330 bool scalar = is_gimple_reg_type (root->type);
2331 bool hole = false, sth_created = false;
2332
2333 if (parent)
2334 {
2335 if (parent->grp_read)
2336 root->grp_read = 1;
2337 if (parent->grp_assignment_read)
2338 root->grp_assignment_read = 1;
2339 if (parent->grp_write)
2340 root->grp_write = 1;
2341 if (parent->grp_assignment_write)
2342 root->grp_assignment_write = 1;
2343 if (parent->grp_total_scalarization)
2344 root->grp_total_scalarization = 1;
2345 }
2346
2347 if (root->grp_unscalarizable_region)
2348 allow_replacements = false;
2349
2350 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2351 allow_replacements = false;
2352
2353 for (child = root->first_child; child; child = child->next_sibling)
2354 {
2355 hole |= covered_to < child->offset;
2356 sth_created |= analyze_access_subtree (child, root,
2357 allow_replacements && !scalar);
2358
2359 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2360 root->grp_total_scalarization &= child->grp_total_scalarization;
2361 if (child->grp_covered)
2362 covered_to += child->size;
2363 else
2364 hole = true;
2365 }
2366
2367 if (allow_replacements && scalar && !root->first_child
2368 && (root->grp_hint
2369 || ((root->grp_scalar_read || root->grp_assignment_read)
2370 && (root->grp_scalar_write || root->grp_assignment_write))))
2371 {
2372 /* Always create access replacements that cover the whole access.
2373 For integral types this means the precision has to match.
2374 Avoid assumptions based on the integral type kind, too. */
2375 if (INTEGRAL_TYPE_P (root->type)
2376 && (TREE_CODE (root->type) != INTEGER_TYPE
2377 || TYPE_PRECISION (root->type) != root->size)
2378 /* But leave bitfield accesses alone. */
2379 && (TREE_CODE (root->expr) != COMPONENT_REF
2380 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2381 {
2382 tree rt = root->type;
2383 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2384 && (root->size % BITS_PER_UNIT) == 0);
2385 root->type = build_nonstandard_integer_type (root->size,
2386 TYPE_UNSIGNED (rt));
2387 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2388 root->offset, root->reverse,
2389 root->type, NULL, false);
2390
2391 if (dump_file && (dump_flags & TDF_DETAILS))
2392 {
2393 fprintf (dump_file, "Changing the type of a replacement for ");
2394 print_generic_expr (dump_file, root->base, 0);
2395 fprintf (dump_file, " offset: %u, size: %u ",
2396 (unsigned) root->offset, (unsigned) root->size);
2397 fprintf (dump_file, " to an integer.\n");
2398 }
2399 }
2400
2401 root->grp_to_be_replaced = 1;
2402 root->replacement_decl = create_access_replacement (root);
2403 sth_created = true;
2404 hole = false;
2405 }
2406 else
2407 {
2408 if (allow_replacements
2409 && scalar && !root->first_child
2410 && (root->grp_scalar_write || root->grp_assignment_write)
2411 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2412 DECL_UID (root->base)))
2413 {
2414 gcc_checking_assert (!root->grp_scalar_read
2415 && !root->grp_assignment_read);
2416 sth_created = true;
2417 if (MAY_HAVE_DEBUG_STMTS)
2418 {
2419 root->grp_to_be_debug_replaced = 1;
2420 root->replacement_decl = create_access_replacement (root);
2421 }
2422 }
2423
2424 if (covered_to < limit)
2425 hole = true;
2426 if (scalar || !allow_replacements)
2427 root->grp_total_scalarization = 0;
2428 }
2429
2430 if (!hole || root->grp_total_scalarization)
2431 root->grp_covered = 1;
2432 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL
2433 || constant_decl_p (root->base))
2434 root->grp_unscalarized_data = 1; /* not covered and written to */
2435 return sth_created;
2436 }
2437
2438 /* Analyze all access trees linked by next_grp by the means of
2439 analyze_access_subtree. */
2440 static bool
2441 analyze_access_trees (struct access *access)
2442 {
2443 bool ret = false;
2444
2445 while (access)
2446 {
2447 if (analyze_access_subtree (access, NULL, true))
2448 ret = true;
2449 access = access->next_grp;
2450 }
2451
2452 return ret;
2453 }
2454
2455 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2456 SIZE would conflict with an already existing one. If exactly such a child
2457 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2458
2459 static bool
2460 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2461 HOST_WIDE_INT size, struct access **exact_match)
2462 {
2463 struct access *child;
2464
2465 for (child = lacc->first_child; child; child = child->next_sibling)
2466 {
2467 if (child->offset == norm_offset && child->size == size)
2468 {
2469 *exact_match = child;
2470 return true;
2471 }
2472
2473 if (child->offset < norm_offset + size
2474 && child->offset + child->size > norm_offset)
2475 return true;
2476 }
2477
2478 return false;
2479 }
2480
2481 /* Create a new child access of PARENT, with all properties just like MODEL
2482 except for its offset and with its grp_write false and grp_read true.
2483 Return the new access or NULL if it cannot be created. Note that this access
2484 is created long after all splicing and sorting, it's not located in any
2485 access vector and is automatically a representative of its group. */
2486
2487 static struct access *
2488 create_artificial_child_access (struct access *parent, struct access *model,
2489 HOST_WIDE_INT new_offset)
2490 {
2491 struct access **child;
2492 tree expr = parent->base;
2493
2494 gcc_assert (!model->grp_unscalarizable_region);
2495
2496 struct access *access = access_pool.allocate ();
2497 memset (access, 0, sizeof (struct access));
2498 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2499 model->type))
2500 {
2501 access->grp_no_warning = true;
2502 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2503 new_offset, model, NULL, false);
2504 }
2505
2506 access->base = parent->base;
2507 access->expr = expr;
2508 access->offset = new_offset;
2509 access->size = model->size;
2510 access->type = model->type;
2511 access->grp_write = true;
2512 access->grp_read = false;
2513 access->reverse = model->reverse;
2514
2515 child = &parent->first_child;
2516 while (*child && (*child)->offset < new_offset)
2517 child = &(*child)->next_sibling;
2518
2519 access->next_sibling = *child;
2520 *child = access;
2521
2522 return access;
2523 }
2524
2525
2526 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2527 true if any new subaccess was created. Additionally, if RACC is a scalar
2528 access but LACC is not, change the type of the latter, if possible. */
2529
2530 static bool
2531 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2532 {
2533 struct access *rchild;
2534 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2535 bool ret = false;
2536
2537 if (is_gimple_reg_type (lacc->type)
2538 || lacc->grp_unscalarizable_region
2539 || racc->grp_unscalarizable_region)
2540 return false;
2541
2542 if (is_gimple_reg_type (racc->type))
2543 {
2544 if (!lacc->first_child && !racc->first_child)
2545 {
2546 tree t = lacc->base;
2547
2548 lacc->type = racc->type;
2549 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2550 lacc->offset, racc->type))
2551 lacc->expr = t;
2552 else
2553 {
2554 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2555 lacc->base, lacc->offset,
2556 racc, NULL, false);
2557 lacc->grp_no_warning = true;
2558 }
2559 }
2560 return false;
2561 }
2562
2563 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2564 {
2565 struct access *new_acc = NULL;
2566 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2567
2568 if (rchild->grp_unscalarizable_region)
2569 continue;
2570
2571 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2572 &new_acc))
2573 {
2574 if (new_acc)
2575 {
2576 rchild->grp_hint = 1;
2577 new_acc->grp_hint |= new_acc->grp_read;
2578 if (rchild->first_child)
2579 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2580 }
2581 continue;
2582 }
2583
2584 rchild->grp_hint = 1;
2585 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2586 if (new_acc)
2587 {
2588 ret = true;
2589 if (racc->first_child)
2590 propagate_subaccesses_across_link (new_acc, rchild);
2591 }
2592 }
2593
2594 return ret;
2595 }
2596
2597 /* Propagate all subaccesses across assignment links. */
2598
2599 static void
2600 propagate_all_subaccesses (void)
2601 {
2602 while (work_queue_head)
2603 {
2604 struct access *racc = pop_access_from_work_queue ();
2605 struct assign_link *link;
2606
2607 gcc_assert (racc->first_link);
2608
2609 for (link = racc->first_link; link; link = link->next)
2610 {
2611 struct access *lacc = link->lacc;
2612
2613 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2614 continue;
2615 lacc = lacc->group_representative;
2616 if (propagate_subaccesses_across_link (lacc, racc)
2617 && lacc->first_link)
2618 add_access_to_work_queue (lacc);
2619 }
2620 }
2621 }
2622
2623 /* Go through all accesses collected throughout the (intraprocedural) analysis
2624 stage, exclude overlapping ones, identify representatives and build trees
2625 out of them, making decisions about scalarization on the way. Return true
2626 iff there are any to-be-scalarized variables after this stage. */
2627
2628 static bool
2629 analyze_all_variable_accesses (void)
2630 {
2631 int res = 0;
2632 bitmap tmp = BITMAP_ALLOC (NULL);
2633 bitmap_iterator bi;
2634 unsigned i;
2635 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2636
2637 enum compiler_param param = optimize_speed_p
2638 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2639 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2640
2641 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2642 fall back to a target default. */
2643 unsigned HOST_WIDE_INT max_scalarization_size
2644 = global_options_set.x_param_values[param]
2645 ? PARAM_VALUE (param)
2646 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2647
2648 max_scalarization_size *= BITS_PER_UNIT;
2649
2650 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2651 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2652 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2653 {
2654 tree var = candidate (i);
2655
2656 if (TREE_CODE (var) == VAR_DECL
2657 && scalarizable_type_p (TREE_TYPE (var)))
2658 {
2659 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2660 <= max_scalarization_size)
2661 {
2662 create_total_scalarization_access (var);
2663 completely_scalarize (var, TREE_TYPE (var), 0, var);
2664 if (dump_file && (dump_flags & TDF_DETAILS))
2665 {
2666 fprintf (dump_file, "Will attempt to totally scalarize ");
2667 print_generic_expr (dump_file, var, 0);
2668 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2669 }
2670 }
2671 else if (dump_file && (dump_flags & TDF_DETAILS))
2672 {
2673 fprintf (dump_file, "Too big to totally scalarize: ");
2674 print_generic_expr (dump_file, var, 0);
2675 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2676 }
2677 }
2678 }
2679
2680 bitmap_copy (tmp, candidate_bitmap);
2681 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2682 {
2683 tree var = candidate (i);
2684 struct access *access;
2685
2686 access = sort_and_splice_var_accesses (var);
2687 if (!access || !build_access_trees (access))
2688 disqualify_candidate (var,
2689 "No or inhibitingly overlapping accesses.");
2690 }
2691
2692 propagate_all_subaccesses ();
2693
2694 bitmap_copy (tmp, candidate_bitmap);
2695 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2696 {
2697 tree var = candidate (i);
2698 struct access *access = get_first_repr_for_decl (var);
2699
2700 if (analyze_access_trees (access))
2701 {
2702 res++;
2703 if (dump_file && (dump_flags & TDF_DETAILS))
2704 {
2705 fprintf (dump_file, "\nAccess trees for ");
2706 print_generic_expr (dump_file, var, 0);
2707 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2708 dump_access_tree (dump_file, access);
2709 fprintf (dump_file, "\n");
2710 }
2711 }
2712 else
2713 disqualify_candidate (var, "No scalar replacements to be created.");
2714 }
2715
2716 BITMAP_FREE (tmp);
2717
2718 if (res)
2719 {
2720 statistics_counter_event (cfun, "Scalarized aggregates", res);
2721 return true;
2722 }
2723 else
2724 return false;
2725 }
2726
2727 /* Generate statements copying scalar replacements of accesses within a subtree
2728 into or out of AGG. ACCESS, all its children, siblings and their children
2729 are to be processed. AGG is an aggregate type expression (can be a
2730 declaration but does not have to be, it can for example also be a mem_ref or
2731 a series of handled components). TOP_OFFSET is the offset of the processed
2732 subtree which has to be subtracted from offsets of individual accesses to
2733 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2734 replacements in the interval <start_offset, start_offset + chunk_size>,
2735 otherwise copy all. GSI is a statement iterator used to place the new
2736 statements. WRITE should be true when the statements should write from AGG
2737 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2738 statements will be added after the current statement in GSI, they will be
2739 added before the statement otherwise. */
2740
2741 static void
2742 generate_subtree_copies (struct access *access, tree agg,
2743 HOST_WIDE_INT top_offset,
2744 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2745 gimple_stmt_iterator *gsi, bool write,
2746 bool insert_after, location_t loc)
2747 {
2748 /* Never write anything into constant pool decls. See PR70602. */
2749 if (!write && constant_decl_p (agg))
2750 return;
2751 do
2752 {
2753 if (chunk_size && access->offset >= start_offset + chunk_size)
2754 return;
2755
2756 if (access->grp_to_be_replaced
2757 && (chunk_size == 0
2758 || access->offset + access->size > start_offset))
2759 {
2760 tree expr, repl = get_access_replacement (access);
2761 gassign *stmt;
2762
2763 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2764 access, gsi, insert_after);
2765
2766 if (write)
2767 {
2768 if (access->grp_partial_lhs)
2769 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2770 !insert_after,
2771 insert_after ? GSI_NEW_STMT
2772 : GSI_SAME_STMT);
2773 stmt = gimple_build_assign (repl, expr);
2774 }
2775 else
2776 {
2777 TREE_NO_WARNING (repl) = 1;
2778 if (access->grp_partial_lhs)
2779 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2780 !insert_after,
2781 insert_after ? GSI_NEW_STMT
2782 : GSI_SAME_STMT);
2783 stmt = gimple_build_assign (expr, repl);
2784 }
2785 gimple_set_location (stmt, loc);
2786
2787 if (insert_after)
2788 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2789 else
2790 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2791 update_stmt (stmt);
2792 sra_stats.subtree_copies++;
2793 }
2794 else if (write
2795 && access->grp_to_be_debug_replaced
2796 && (chunk_size == 0
2797 || access->offset + access->size > start_offset))
2798 {
2799 gdebug *ds;
2800 tree drhs = build_debug_ref_for_model (loc, agg,
2801 access->offset - top_offset,
2802 access);
2803 ds = gimple_build_debug_bind (get_access_replacement (access),
2804 drhs, gsi_stmt (*gsi));
2805 if (insert_after)
2806 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2807 else
2808 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2809 }
2810
2811 if (access->first_child)
2812 generate_subtree_copies (access->first_child, agg, top_offset,
2813 start_offset, chunk_size, gsi,
2814 write, insert_after, loc);
2815
2816 access = access->next_sibling;
2817 }
2818 while (access);
2819 }
2820
2821 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2822 root of the subtree to be processed. GSI is the statement iterator used
2823 for inserting statements which are added after the current statement if
2824 INSERT_AFTER is true or before it otherwise. */
2825
2826 static void
2827 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2828 bool insert_after, location_t loc)
2829
2830 {
2831 struct access *child;
2832
2833 if (access->grp_to_be_replaced)
2834 {
2835 gassign *stmt;
2836
2837 stmt = gimple_build_assign (get_access_replacement (access),
2838 build_zero_cst (access->type));
2839 if (insert_after)
2840 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2841 else
2842 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2843 update_stmt (stmt);
2844 gimple_set_location (stmt, loc);
2845 }
2846 else if (access->grp_to_be_debug_replaced)
2847 {
2848 gdebug *ds
2849 = gimple_build_debug_bind (get_access_replacement (access),
2850 build_zero_cst (access->type),
2851 gsi_stmt (*gsi));
2852 if (insert_after)
2853 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2854 else
2855 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2856 }
2857
2858 for (child = access->first_child; child; child = child->next_sibling)
2859 init_subtree_with_zero (child, gsi, insert_after, loc);
2860 }
2861
2862 /* Clobber all scalar replacements in an access subtree. ACCESS is the
2863 root of the subtree to be processed. GSI is the statement iterator used
2864 for inserting statements which are added after the current statement if
2865 INSERT_AFTER is true or before it otherwise. */
2866
2867 static void
2868 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2869 bool insert_after, location_t loc)
2870
2871 {
2872 struct access *child;
2873
2874 if (access->grp_to_be_replaced)
2875 {
2876 tree rep = get_access_replacement (access);
2877 tree clobber = build_constructor (access->type, NULL);
2878 TREE_THIS_VOLATILE (clobber) = 1;
2879 gimple *stmt = gimple_build_assign (rep, clobber);
2880
2881 if (insert_after)
2882 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2883 else
2884 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2885 update_stmt (stmt);
2886 gimple_set_location (stmt, loc);
2887 }
2888
2889 for (child = access->first_child; child; child = child->next_sibling)
2890 clobber_subtree (child, gsi, insert_after, loc);
2891 }
2892
2893 /* Search for an access representative for the given expression EXPR and
2894 return it or NULL if it cannot be found. */
2895
2896 static struct access *
2897 get_access_for_expr (tree expr)
2898 {
2899 HOST_WIDE_INT offset, size, max_size;
2900 tree base;
2901 bool reverse;
2902
2903 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2904 a different size than the size of its argument and we need the latter
2905 one. */
2906 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2907 expr = TREE_OPERAND (expr, 0);
2908
2909 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
2910 if (max_size == -1 || !DECL_P (base))
2911 return NULL;
2912
2913 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2914 return NULL;
2915
2916 return get_var_base_offset_size_access (base, offset, max_size);
2917 }
2918
2919 /* Replace the expression EXPR with a scalar replacement if there is one and
2920 generate other statements to do type conversion or subtree copying if
2921 necessary. GSI is used to place newly created statements, WRITE is true if
2922 the expression is being written to (it is on a LHS of a statement or output
2923 in an assembly statement). */
2924
2925 static bool
2926 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2927 {
2928 location_t loc;
2929 struct access *access;
2930 tree type, bfr, orig_expr;
2931
2932 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2933 {
2934 bfr = *expr;
2935 expr = &TREE_OPERAND (*expr, 0);
2936 }
2937 else
2938 bfr = NULL_TREE;
2939
2940 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2941 expr = &TREE_OPERAND (*expr, 0);
2942 access = get_access_for_expr (*expr);
2943 if (!access)
2944 return false;
2945 type = TREE_TYPE (*expr);
2946 orig_expr = *expr;
2947
2948 loc = gimple_location (gsi_stmt (*gsi));
2949 gimple_stmt_iterator alt_gsi = gsi_none ();
2950 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2951 {
2952 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2953 gsi = &alt_gsi;
2954 }
2955
2956 if (access->grp_to_be_replaced)
2957 {
2958 tree repl = get_access_replacement (access);
2959 /* If we replace a non-register typed access simply use the original
2960 access expression to extract the scalar component afterwards.
2961 This happens if scalarizing a function return value or parameter
2962 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2963 gcc.c-torture/compile/20011217-1.c.
2964
2965 We also want to use this when accessing a complex or vector which can
2966 be accessed as a different type too, potentially creating a need for
2967 type conversion (see PR42196) and when scalarized unions are involved
2968 in assembler statements (see PR42398). */
2969 if (!useless_type_conversion_p (type, access->type))
2970 {
2971 tree ref;
2972
2973 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
2974
2975 if (write)
2976 {
2977 gassign *stmt;
2978
2979 if (access->grp_partial_lhs)
2980 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2981 false, GSI_NEW_STMT);
2982 stmt = gimple_build_assign (repl, ref);
2983 gimple_set_location (stmt, loc);
2984 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2985 }
2986 else
2987 {
2988 gassign *stmt;
2989
2990 if (access->grp_partial_lhs)
2991 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2992 true, GSI_SAME_STMT);
2993 stmt = gimple_build_assign (ref, repl);
2994 gimple_set_location (stmt, loc);
2995 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2996 }
2997 }
2998 else
2999 *expr = repl;
3000 sra_stats.exprs++;
3001 }
3002 else if (write && access->grp_to_be_debug_replaced)
3003 {
3004 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3005 NULL_TREE,
3006 gsi_stmt (*gsi));
3007 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3008 }
3009
3010 if (access->first_child)
3011 {
3012 HOST_WIDE_INT start_offset, chunk_size;
3013 if (bfr
3014 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3015 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3016 {
3017 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3018 start_offset = access->offset
3019 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3020 }
3021 else
3022 start_offset = chunk_size = 0;
3023
3024 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3025 start_offset, chunk_size, gsi, write, write,
3026 loc);
3027 }
3028 return true;
3029 }
3030
3031 /* Where scalar replacements of the RHS have been written to when a replacement
3032 of a LHS of an assigments cannot be direclty loaded from a replacement of
3033 the RHS. */
3034 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3035 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3036 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3037
3038 struct subreplacement_assignment_data
3039 {
3040 /* Offset of the access representing the lhs of the assignment. */
3041 HOST_WIDE_INT left_offset;
3042
3043 /* LHS and RHS of the original assignment. */
3044 tree assignment_lhs, assignment_rhs;
3045
3046 /* Access representing the rhs of the whole assignment. */
3047 struct access *top_racc;
3048
3049 /* Stmt iterator used for statement insertions after the original assignment.
3050 It points to the main GSI used to traverse a BB during function body
3051 modification. */
3052 gimple_stmt_iterator *new_gsi;
3053
3054 /* Stmt iterator used for statement insertions before the original
3055 assignment. Keeps on pointing to the original statement. */
3056 gimple_stmt_iterator old_gsi;
3057
3058 /* Location of the assignment. */
3059 location_t loc;
3060
3061 /* Keeps the information whether we have needed to refresh replacements of
3062 the LHS and from which side of the assignments this takes place. */
3063 enum unscalarized_data_handling refreshed;
3064 };
3065
3066 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3067 base aggregate if there are unscalarized data or directly to LHS of the
3068 statement that is pointed to by GSI otherwise. */
3069
3070 static void
3071 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3072 {
3073 tree src;
3074 if (sad->top_racc->grp_unscalarized_data)
3075 {
3076 src = sad->assignment_rhs;
3077 sad->refreshed = SRA_UDH_RIGHT;
3078 }
3079 else
3080 {
3081 src = sad->assignment_lhs;
3082 sad->refreshed = SRA_UDH_LEFT;
3083 }
3084 generate_subtree_copies (sad->top_racc->first_child, src,
3085 sad->top_racc->offset, 0, 0,
3086 &sad->old_gsi, false, false, sad->loc);
3087 }
3088
3089 /* Try to generate statements to load all sub-replacements in an access subtree
3090 formed by children of LACC from scalar replacements in the SAD->top_racc
3091 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3092 and load the accesses from it. */
3093
3094 static void
3095 load_assign_lhs_subreplacements (struct access *lacc,
3096 struct subreplacement_assignment_data *sad)
3097 {
3098 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3099 {
3100 HOST_WIDE_INT offset;
3101 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3102
3103 if (lacc->grp_to_be_replaced)
3104 {
3105 struct access *racc;
3106 gassign *stmt;
3107 tree rhs;
3108
3109 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3110 if (racc && racc->grp_to_be_replaced)
3111 {
3112 rhs = get_access_replacement (racc);
3113 if (!useless_type_conversion_p (lacc->type, racc->type))
3114 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3115 lacc->type, rhs);
3116
3117 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3118 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3119 NULL_TREE, true, GSI_SAME_STMT);
3120 }
3121 else
3122 {
3123 /* No suitable access on the right hand side, need to load from
3124 the aggregate. See if we have to update it first... */
3125 if (sad->refreshed == SRA_UDH_NONE)
3126 handle_unscalarized_data_in_subtree (sad);
3127
3128 if (sad->refreshed == SRA_UDH_LEFT)
3129 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3130 lacc->offset - sad->left_offset,
3131 lacc, sad->new_gsi, true);
3132 else
3133 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3134 lacc->offset - sad->left_offset,
3135 lacc, sad->new_gsi, true);
3136 if (lacc->grp_partial_lhs)
3137 rhs = force_gimple_operand_gsi (sad->new_gsi,
3138 rhs, true, NULL_TREE,
3139 false, GSI_NEW_STMT);
3140 }
3141
3142 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3143 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3144 gimple_set_location (stmt, sad->loc);
3145 update_stmt (stmt);
3146 sra_stats.subreplacements++;
3147 }
3148 else
3149 {
3150 if (sad->refreshed == SRA_UDH_NONE
3151 && lacc->grp_read && !lacc->grp_covered)
3152 handle_unscalarized_data_in_subtree (sad);
3153
3154 if (lacc && lacc->grp_to_be_debug_replaced)
3155 {
3156 gdebug *ds;
3157 tree drhs;
3158 struct access *racc = find_access_in_subtree (sad->top_racc,
3159 offset,
3160 lacc->size);
3161
3162 if (racc && racc->grp_to_be_replaced)
3163 {
3164 if (racc->grp_write || constant_decl_p (racc->base))
3165 drhs = get_access_replacement (racc);
3166 else
3167 drhs = NULL;
3168 }
3169 else if (sad->refreshed == SRA_UDH_LEFT)
3170 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3171 lacc->offset, lacc);
3172 else if (sad->refreshed == SRA_UDH_RIGHT)
3173 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3174 offset, lacc);
3175 else
3176 drhs = NULL_TREE;
3177 if (drhs
3178 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3179 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3180 lacc->type, drhs);
3181 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3182 drhs, gsi_stmt (sad->old_gsi));
3183 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3184 }
3185 }
3186
3187 if (lacc->first_child)
3188 load_assign_lhs_subreplacements (lacc, sad);
3189 }
3190 }
3191
3192 /* Result code for SRA assignment modification. */
3193 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3194 SRA_AM_MODIFIED, /* stmt changed but not
3195 removed */
3196 SRA_AM_REMOVED }; /* stmt eliminated */
3197
3198 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3199 to the assignment and GSI is the statement iterator pointing at it. Returns
3200 the same values as sra_modify_assign. */
3201
3202 static enum assignment_mod_result
3203 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3204 {
3205 tree lhs = gimple_assign_lhs (stmt);
3206 struct access *acc = get_access_for_expr (lhs);
3207 if (!acc)
3208 return SRA_AM_NONE;
3209 location_t loc = gimple_location (stmt);
3210
3211 if (gimple_clobber_p (stmt))
3212 {
3213 /* Clobber the replacement variable. */
3214 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3215 /* Remove clobbers of fully scalarized variables, they are dead. */
3216 if (acc->grp_covered)
3217 {
3218 unlink_stmt_vdef (stmt);
3219 gsi_remove (gsi, true);
3220 release_defs (stmt);
3221 return SRA_AM_REMOVED;
3222 }
3223 else
3224 return SRA_AM_MODIFIED;
3225 }
3226
3227 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt))) > 0)
3228 {
3229 /* I have never seen this code path trigger but if it can happen the
3230 following should handle it gracefully. */
3231 if (access_has_children_p (acc))
3232 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3233 true, true, loc);
3234 return SRA_AM_MODIFIED;
3235 }
3236
3237 if (acc->grp_covered)
3238 {
3239 init_subtree_with_zero (acc, gsi, false, loc);
3240 unlink_stmt_vdef (stmt);
3241 gsi_remove (gsi, true);
3242 release_defs (stmt);
3243 return SRA_AM_REMOVED;
3244 }
3245 else
3246 {
3247 init_subtree_with_zero (acc, gsi, true, loc);
3248 return SRA_AM_MODIFIED;
3249 }
3250 }
3251
3252 /* Create and return a new suitable default definition SSA_NAME for RACC which
3253 is an access describing an uninitialized part of an aggregate that is being
3254 loaded. */
3255
3256 static tree
3257 get_repl_default_def_ssa_name (struct access *racc)
3258 {
3259 gcc_checking_assert (!racc->grp_to_be_replaced
3260 && !racc->grp_to_be_debug_replaced);
3261 if (!racc->replacement_decl)
3262 racc->replacement_decl = create_access_replacement (racc);
3263 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3264 }
3265
3266 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3267 bit-field field declaration somewhere in it. */
3268
3269 static inline bool
3270 contains_vce_or_bfcref_p (const_tree ref)
3271 {
3272 while (handled_component_p (ref))
3273 {
3274 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3275 || (TREE_CODE (ref) == COMPONENT_REF
3276 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3277 return true;
3278 ref = TREE_OPERAND (ref, 0);
3279 }
3280
3281 return false;
3282 }
3283
3284 /* Examine both sides of the assignment statement pointed to by STMT, replace
3285 them with a scalare replacement if there is one and generate copying of
3286 replacements if scalarized aggregates have been used in the assignment. GSI
3287 is used to hold generated statements for type conversions and subtree
3288 copying. */
3289
3290 static enum assignment_mod_result
3291 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3292 {
3293 struct access *lacc, *racc;
3294 tree lhs, rhs;
3295 bool modify_this_stmt = false;
3296 bool force_gimple_rhs = false;
3297 location_t loc;
3298 gimple_stmt_iterator orig_gsi = *gsi;
3299
3300 if (!gimple_assign_single_p (stmt))
3301 return SRA_AM_NONE;
3302 lhs = gimple_assign_lhs (stmt);
3303 rhs = gimple_assign_rhs1 (stmt);
3304
3305 if (TREE_CODE (rhs) == CONSTRUCTOR)
3306 return sra_modify_constructor_assign (stmt, gsi);
3307
3308 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3309 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3310 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3311 {
3312 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3313 gsi, false);
3314 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3315 gsi, true);
3316 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3317 }
3318
3319 lacc = get_access_for_expr (lhs);
3320 racc = get_access_for_expr (rhs);
3321 if (!lacc && !racc)
3322 return SRA_AM_NONE;
3323 /* Avoid modifying initializations of constant-pool replacements. */
3324 if (racc && (racc->replacement_decl == lhs))
3325 return SRA_AM_NONE;
3326
3327 loc = gimple_location (stmt);
3328 if (lacc && lacc->grp_to_be_replaced)
3329 {
3330 lhs = get_access_replacement (lacc);
3331 gimple_assign_set_lhs (stmt, lhs);
3332 modify_this_stmt = true;
3333 if (lacc->grp_partial_lhs)
3334 force_gimple_rhs = true;
3335 sra_stats.exprs++;
3336 }
3337
3338 if (racc && racc->grp_to_be_replaced)
3339 {
3340 rhs = get_access_replacement (racc);
3341 modify_this_stmt = true;
3342 if (racc->grp_partial_lhs)
3343 force_gimple_rhs = true;
3344 sra_stats.exprs++;
3345 }
3346 else if (racc
3347 && !racc->grp_unscalarized_data
3348 && !racc->grp_unscalarizable_region
3349 && TREE_CODE (lhs) == SSA_NAME
3350 && !access_has_replacements_p (racc))
3351 {
3352 rhs = get_repl_default_def_ssa_name (racc);
3353 modify_this_stmt = true;
3354 sra_stats.exprs++;
3355 }
3356
3357 if (modify_this_stmt)
3358 {
3359 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3360 {
3361 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3362 ??? This should move to fold_stmt which we simply should
3363 call after building a VIEW_CONVERT_EXPR here. */
3364 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3365 && !contains_bitfld_component_ref_p (lhs))
3366 {
3367 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3368 gimple_assign_set_lhs (stmt, lhs);
3369 }
3370 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3371 && !contains_vce_or_bfcref_p (rhs))
3372 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3373
3374 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3375 {
3376 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3377 rhs);
3378 if (is_gimple_reg_type (TREE_TYPE (lhs))
3379 && TREE_CODE (lhs) != SSA_NAME)
3380 force_gimple_rhs = true;
3381 }
3382 }
3383 }
3384
3385 if (lacc && lacc->grp_to_be_debug_replaced)
3386 {
3387 tree dlhs = get_access_replacement (lacc);
3388 tree drhs = unshare_expr (rhs);
3389 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3390 {
3391 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3392 && !contains_vce_or_bfcref_p (drhs))
3393 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3394 if (drhs
3395 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3396 TREE_TYPE (drhs)))
3397 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3398 TREE_TYPE (dlhs), drhs);
3399 }
3400 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3401 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3402 }
3403
3404 /* From this point on, the function deals with assignments in between
3405 aggregates when at least one has scalar reductions of some of its
3406 components. There are three possible scenarios: Both the LHS and RHS have
3407 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3408
3409 In the first case, we would like to load the LHS components from RHS
3410 components whenever possible. If that is not possible, we would like to
3411 read it directly from the RHS (after updating it by storing in it its own
3412 components). If there are some necessary unscalarized data in the LHS,
3413 those will be loaded by the original assignment too. If neither of these
3414 cases happen, the original statement can be removed. Most of this is done
3415 by load_assign_lhs_subreplacements.
3416
3417 In the second case, we would like to store all RHS scalarized components
3418 directly into LHS and if they cover the aggregate completely, remove the
3419 statement too. In the third case, we want the LHS components to be loaded
3420 directly from the RHS (DSE will remove the original statement if it
3421 becomes redundant).
3422
3423 This is a bit complex but manageable when types match and when unions do
3424 not cause confusion in a way that we cannot really load a component of LHS
3425 from the RHS or vice versa (the access representing this level can have
3426 subaccesses that are accessible only through a different union field at a
3427 higher level - different from the one used in the examined expression).
3428 Unions are fun.
3429
3430 Therefore, I specially handle a fourth case, happening when there is a
3431 specific type cast or it is impossible to locate a scalarized subaccess on
3432 the other side of the expression. If that happens, I simply "refresh" the
3433 RHS by storing in it is scalarized components leave the original statement
3434 there to do the copying and then load the scalar replacements of the LHS.
3435 This is what the first branch does. */
3436
3437 if (modify_this_stmt
3438 || gimple_has_volatile_ops (stmt)
3439 || contains_vce_or_bfcref_p (rhs)
3440 || contains_vce_or_bfcref_p (lhs)
3441 || stmt_ends_bb_p (stmt))
3442 {
3443 /* No need to copy into a constant-pool, it comes pre-initialized. */
3444 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
3445 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3446 gsi, false, false, loc);
3447 if (access_has_children_p (lacc))
3448 {
3449 gimple_stmt_iterator alt_gsi = gsi_none ();
3450 if (stmt_ends_bb_p (stmt))
3451 {
3452 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3453 gsi = &alt_gsi;
3454 }
3455 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3456 gsi, true, true, loc);
3457 }
3458 sra_stats.separate_lhs_rhs_handling++;
3459
3460 /* This gimplification must be done after generate_subtree_copies,
3461 lest we insert the subtree copies in the middle of the gimplified
3462 sequence. */
3463 if (force_gimple_rhs)
3464 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3465 true, GSI_SAME_STMT);
3466 if (gimple_assign_rhs1 (stmt) != rhs)
3467 {
3468 modify_this_stmt = true;
3469 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3470 gcc_assert (stmt == gsi_stmt (orig_gsi));
3471 }
3472
3473 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3474 }
3475 else
3476 {
3477 if (access_has_children_p (lacc)
3478 && access_has_children_p (racc)
3479 /* When an access represents an unscalarizable region, it usually
3480 represents accesses with variable offset and thus must not be used
3481 to generate new memory accesses. */
3482 && !lacc->grp_unscalarizable_region
3483 && !racc->grp_unscalarizable_region)
3484 {
3485 struct subreplacement_assignment_data sad;
3486
3487 sad.left_offset = lacc->offset;
3488 sad.assignment_lhs = lhs;
3489 sad.assignment_rhs = rhs;
3490 sad.top_racc = racc;
3491 sad.old_gsi = *gsi;
3492 sad.new_gsi = gsi;
3493 sad.loc = gimple_location (stmt);
3494 sad.refreshed = SRA_UDH_NONE;
3495
3496 if (lacc->grp_read && !lacc->grp_covered)
3497 handle_unscalarized_data_in_subtree (&sad);
3498
3499 load_assign_lhs_subreplacements (lacc, &sad);
3500 if (sad.refreshed != SRA_UDH_RIGHT)
3501 {
3502 gsi_next (gsi);
3503 unlink_stmt_vdef (stmt);
3504 gsi_remove (&sad.old_gsi, true);
3505 release_defs (stmt);
3506 sra_stats.deleted++;
3507 return SRA_AM_REMOVED;
3508 }
3509 }
3510 else
3511 {
3512 if (access_has_children_p (racc)
3513 && !racc->grp_unscalarized_data
3514 && TREE_CODE (lhs) != SSA_NAME)
3515 {
3516 if (dump_file)
3517 {
3518 fprintf (dump_file, "Removing load: ");
3519 print_gimple_stmt (dump_file, stmt, 0, 0);
3520 }
3521 generate_subtree_copies (racc->first_child, lhs,
3522 racc->offset, 0, 0, gsi,
3523 false, false, loc);
3524 gcc_assert (stmt == gsi_stmt (*gsi));
3525 unlink_stmt_vdef (stmt);
3526 gsi_remove (gsi, true);
3527 release_defs (stmt);
3528 sra_stats.deleted++;
3529 return SRA_AM_REMOVED;
3530 }
3531 /* Restore the aggregate RHS from its components so the
3532 prevailing aggregate copy does the right thing. */
3533 if (access_has_children_p (racc))
3534 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3535 gsi, false, false, loc);
3536 /* Re-load the components of the aggregate copy destination.
3537 But use the RHS aggregate to load from to expose more
3538 optimization opportunities. */
3539 if (access_has_children_p (lacc))
3540 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3541 0, 0, gsi, true, true, loc);
3542 }
3543
3544 return SRA_AM_NONE;
3545 }
3546 }
3547
3548 /* Set any scalar replacements of values in the constant pool to the initial
3549 value of the constant. (Constant-pool decls like *.LC0 have effectively
3550 been initialized before the program starts, we must do the same for their
3551 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
3552 the function's entry block. */
3553
3554 static void
3555 initialize_constant_pool_replacements (void)
3556 {
3557 gimple_seq seq = NULL;
3558 gimple_stmt_iterator gsi = gsi_start (seq);
3559 bitmap_iterator bi;
3560 unsigned i;
3561
3562 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3563 {
3564 tree var = candidate (i);
3565 if (!constant_decl_p (var))
3566 continue;
3567 vec<access_p> *access_vec = get_base_access_vector (var);
3568 if (!access_vec)
3569 continue;
3570 for (unsigned i = 0; i < access_vec->length (); i++)
3571 {
3572 struct access *access = (*access_vec)[i];
3573 if (!access->replacement_decl)
3574 continue;
3575 gassign *stmt
3576 = gimple_build_assign (get_access_replacement (access),
3577 unshare_expr (access->expr));
3578 if (dump_file && (dump_flags & TDF_DETAILS))
3579 {
3580 fprintf (dump_file, "Generating constant initializer: ");
3581 print_gimple_stmt (dump_file, stmt, 0, 1);
3582 fprintf (dump_file, "\n");
3583 }
3584 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3585 update_stmt (stmt);
3586 }
3587 }
3588
3589 seq = gsi_seq (gsi);
3590 if (seq)
3591 gsi_insert_seq_on_edge_immediate (
3592 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3593 }
3594
3595 /* Traverse the function body and all modifications as decided in
3596 analyze_all_variable_accesses. Return true iff the CFG has been
3597 changed. */
3598
3599 static bool
3600 sra_modify_function_body (void)
3601 {
3602 bool cfg_changed = false;
3603 basic_block bb;
3604
3605 initialize_constant_pool_replacements ();
3606
3607 FOR_EACH_BB_FN (bb, cfun)
3608 {
3609 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3610 while (!gsi_end_p (gsi))
3611 {
3612 gimple *stmt = gsi_stmt (gsi);
3613 enum assignment_mod_result assign_result;
3614 bool modified = false, deleted = false;
3615 tree *t;
3616 unsigned i;
3617
3618 switch (gimple_code (stmt))
3619 {
3620 case GIMPLE_RETURN:
3621 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3622 if (*t != NULL_TREE)
3623 modified |= sra_modify_expr (t, &gsi, false);
3624 break;
3625
3626 case GIMPLE_ASSIGN:
3627 assign_result = sra_modify_assign (stmt, &gsi);
3628 modified |= assign_result == SRA_AM_MODIFIED;
3629 deleted = assign_result == SRA_AM_REMOVED;
3630 break;
3631
3632 case GIMPLE_CALL:
3633 /* Operands must be processed before the lhs. */
3634 for (i = 0; i < gimple_call_num_args (stmt); i++)
3635 {
3636 t = gimple_call_arg_ptr (stmt, i);
3637 modified |= sra_modify_expr (t, &gsi, false);
3638 }
3639
3640 if (gimple_call_lhs (stmt))
3641 {
3642 t = gimple_call_lhs_ptr (stmt);
3643 modified |= sra_modify_expr (t, &gsi, true);
3644 }
3645 break;
3646
3647 case GIMPLE_ASM:
3648 {
3649 gasm *asm_stmt = as_a <gasm *> (stmt);
3650 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3651 {
3652 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3653 modified |= sra_modify_expr (t, &gsi, false);
3654 }
3655 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3656 {
3657 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3658 modified |= sra_modify_expr (t, &gsi, true);
3659 }
3660 }
3661 break;
3662
3663 default:
3664 break;
3665 }
3666
3667 if (modified)
3668 {
3669 update_stmt (stmt);
3670 if (maybe_clean_eh_stmt (stmt)
3671 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3672 cfg_changed = true;
3673 }
3674 if (!deleted)
3675 gsi_next (&gsi);
3676 }
3677 }
3678
3679 gsi_commit_edge_inserts ();
3680 return cfg_changed;
3681 }
3682
3683 /* Generate statements initializing scalar replacements of parts of function
3684 parameters. */
3685
3686 static void
3687 initialize_parameter_reductions (void)
3688 {
3689 gimple_stmt_iterator gsi;
3690 gimple_seq seq = NULL;
3691 tree parm;
3692
3693 gsi = gsi_start (seq);
3694 for (parm = DECL_ARGUMENTS (current_function_decl);
3695 parm;
3696 parm = DECL_CHAIN (parm))
3697 {
3698 vec<access_p> *access_vec;
3699 struct access *access;
3700
3701 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3702 continue;
3703 access_vec = get_base_access_vector (parm);
3704 if (!access_vec)
3705 continue;
3706
3707 for (access = (*access_vec)[0];
3708 access;
3709 access = access->next_grp)
3710 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3711 EXPR_LOCATION (parm));
3712 }
3713
3714 seq = gsi_seq (gsi);
3715 if (seq)
3716 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3717 }
3718
3719 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3720 it reveals there are components of some aggregates to be scalarized, it runs
3721 the required transformations. */
3722 static unsigned int
3723 perform_intra_sra (void)
3724 {
3725 int ret = 0;
3726 sra_initialize ();
3727
3728 if (!find_var_candidates ())
3729 goto out;
3730
3731 if (!scan_function ())
3732 goto out;
3733
3734 if (!analyze_all_variable_accesses ())
3735 goto out;
3736
3737 if (sra_modify_function_body ())
3738 ret = TODO_update_ssa | TODO_cleanup_cfg;
3739 else
3740 ret = TODO_update_ssa;
3741 initialize_parameter_reductions ();
3742
3743 statistics_counter_event (cfun, "Scalar replacements created",
3744 sra_stats.replacements);
3745 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3746 statistics_counter_event (cfun, "Subtree copy stmts",
3747 sra_stats.subtree_copies);
3748 statistics_counter_event (cfun, "Subreplacement stmts",
3749 sra_stats.subreplacements);
3750 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3751 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3752 sra_stats.separate_lhs_rhs_handling);
3753
3754 out:
3755 sra_deinitialize ();
3756 return ret;
3757 }
3758
3759 /* Perform early intraprocedural SRA. */
3760 static unsigned int
3761 early_intra_sra (void)
3762 {
3763 sra_mode = SRA_MODE_EARLY_INTRA;
3764 return perform_intra_sra ();
3765 }
3766
3767 /* Perform "late" intraprocedural SRA. */
3768 static unsigned int
3769 late_intra_sra (void)
3770 {
3771 sra_mode = SRA_MODE_INTRA;
3772 return perform_intra_sra ();
3773 }
3774
3775
3776 static bool
3777 gate_intra_sra (void)
3778 {
3779 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3780 }
3781
3782
3783 namespace {
3784
3785 const pass_data pass_data_sra_early =
3786 {
3787 GIMPLE_PASS, /* type */
3788 "esra", /* name */
3789 OPTGROUP_NONE, /* optinfo_flags */
3790 TV_TREE_SRA, /* tv_id */
3791 ( PROP_cfg | PROP_ssa ), /* properties_required */
3792 0, /* properties_provided */
3793 0, /* properties_destroyed */
3794 0, /* todo_flags_start */
3795 TODO_update_ssa, /* todo_flags_finish */
3796 };
3797
3798 class pass_sra_early : public gimple_opt_pass
3799 {
3800 public:
3801 pass_sra_early (gcc::context *ctxt)
3802 : gimple_opt_pass (pass_data_sra_early, ctxt)
3803 {}
3804
3805 /* opt_pass methods: */
3806 virtual bool gate (function *) { return gate_intra_sra (); }
3807 virtual unsigned int execute (function *) { return early_intra_sra (); }
3808
3809 }; // class pass_sra_early
3810
3811 } // anon namespace
3812
3813 gimple_opt_pass *
3814 make_pass_sra_early (gcc::context *ctxt)
3815 {
3816 return new pass_sra_early (ctxt);
3817 }
3818
3819 namespace {
3820
3821 const pass_data pass_data_sra =
3822 {
3823 GIMPLE_PASS, /* type */
3824 "sra", /* name */
3825 OPTGROUP_NONE, /* optinfo_flags */
3826 TV_TREE_SRA, /* tv_id */
3827 ( PROP_cfg | PROP_ssa ), /* properties_required */
3828 0, /* properties_provided */
3829 0, /* properties_destroyed */
3830 TODO_update_address_taken, /* todo_flags_start */
3831 TODO_update_ssa, /* todo_flags_finish */
3832 };
3833
3834 class pass_sra : public gimple_opt_pass
3835 {
3836 public:
3837 pass_sra (gcc::context *ctxt)
3838 : gimple_opt_pass (pass_data_sra, ctxt)
3839 {}
3840
3841 /* opt_pass methods: */
3842 virtual bool gate (function *) { return gate_intra_sra (); }
3843 virtual unsigned int execute (function *) { return late_intra_sra (); }
3844
3845 }; // class pass_sra
3846
3847 } // anon namespace
3848
3849 gimple_opt_pass *
3850 make_pass_sra (gcc::context *ctxt)
3851 {
3852 return new pass_sra (ctxt);
3853 }
3854
3855
3856 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3857 parameter. */
3858
3859 static bool
3860 is_unused_scalar_param (tree parm)
3861 {
3862 tree name;
3863 return (is_gimple_reg (parm)
3864 && (!(name = ssa_default_def (cfun, parm))
3865 || has_zero_uses (name)));
3866 }
3867
3868 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3869 examine whether there are any direct or otherwise infeasible ones. If so,
3870 return true, otherwise return false. PARM must be a gimple register with a
3871 non-NULL default definition. */
3872
3873 static bool
3874 ptr_parm_has_direct_uses (tree parm)
3875 {
3876 imm_use_iterator ui;
3877 gimple *stmt;
3878 tree name = ssa_default_def (cfun, parm);
3879 bool ret = false;
3880
3881 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3882 {
3883 int uses_ok = 0;
3884 use_operand_p use_p;
3885
3886 if (is_gimple_debug (stmt))
3887 continue;
3888
3889 /* Valid uses include dereferences on the lhs and the rhs. */
3890 if (gimple_has_lhs (stmt))
3891 {
3892 tree lhs = gimple_get_lhs (stmt);
3893 while (handled_component_p (lhs))
3894 lhs = TREE_OPERAND (lhs, 0);
3895 if (TREE_CODE (lhs) == MEM_REF
3896 && TREE_OPERAND (lhs, 0) == name
3897 && integer_zerop (TREE_OPERAND (lhs, 1))
3898 && types_compatible_p (TREE_TYPE (lhs),
3899 TREE_TYPE (TREE_TYPE (name)))
3900 && !TREE_THIS_VOLATILE (lhs))
3901 uses_ok++;
3902 }
3903 if (gimple_assign_single_p (stmt))
3904 {
3905 tree rhs = gimple_assign_rhs1 (stmt);
3906 while (handled_component_p (rhs))
3907 rhs = TREE_OPERAND (rhs, 0);
3908 if (TREE_CODE (rhs) == MEM_REF
3909 && TREE_OPERAND (rhs, 0) == name
3910 && integer_zerop (TREE_OPERAND (rhs, 1))
3911 && types_compatible_p (TREE_TYPE (rhs),
3912 TREE_TYPE (TREE_TYPE (name)))
3913 && !TREE_THIS_VOLATILE (rhs))
3914 uses_ok++;
3915 }
3916 else if (is_gimple_call (stmt))
3917 {
3918 unsigned i;
3919 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3920 {
3921 tree arg = gimple_call_arg (stmt, i);
3922 while (handled_component_p (arg))
3923 arg = TREE_OPERAND (arg, 0);
3924 if (TREE_CODE (arg) == MEM_REF
3925 && TREE_OPERAND (arg, 0) == name
3926 && integer_zerop (TREE_OPERAND (arg, 1))
3927 && types_compatible_p (TREE_TYPE (arg),
3928 TREE_TYPE (TREE_TYPE (name)))
3929 && !TREE_THIS_VOLATILE (arg))
3930 uses_ok++;
3931 }
3932 }
3933
3934 /* If the number of valid uses does not match the number of
3935 uses in this stmt there is an unhandled use. */
3936 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3937 --uses_ok;
3938
3939 if (uses_ok != 0)
3940 ret = true;
3941
3942 if (ret)
3943 BREAK_FROM_IMM_USE_STMT (ui);
3944 }
3945
3946 return ret;
3947 }
3948
3949 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3950 them in candidate_bitmap. Note that these do not necessarily include
3951 parameter which are unused and thus can be removed. Return true iff any
3952 such candidate has been found. */
3953
3954 static bool
3955 find_param_candidates (void)
3956 {
3957 tree parm;
3958 int count = 0;
3959 bool ret = false;
3960 const char *msg;
3961
3962 for (parm = DECL_ARGUMENTS (current_function_decl);
3963 parm;
3964 parm = DECL_CHAIN (parm))
3965 {
3966 tree type = TREE_TYPE (parm);
3967 tree_node **slot;
3968
3969 count++;
3970
3971 if (TREE_THIS_VOLATILE (parm)
3972 || TREE_ADDRESSABLE (parm)
3973 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3974 continue;
3975
3976 if (is_unused_scalar_param (parm))
3977 {
3978 ret = true;
3979 continue;
3980 }
3981
3982 if (POINTER_TYPE_P (type))
3983 {
3984 type = TREE_TYPE (type);
3985
3986 if (TREE_CODE (type) == FUNCTION_TYPE
3987 || TYPE_VOLATILE (type)
3988 || (TREE_CODE (type) == ARRAY_TYPE
3989 && TYPE_NONALIASED_COMPONENT (type))
3990 || !is_gimple_reg (parm)
3991 || is_va_list_type (type)
3992 || ptr_parm_has_direct_uses (parm))
3993 continue;
3994 }
3995 else if (!AGGREGATE_TYPE_P (type))
3996 continue;
3997
3998 if (!COMPLETE_TYPE_P (type)
3999 || !tree_fits_uhwi_p (TYPE_SIZE (type))
4000 || tree_to_uhwi (TYPE_SIZE (type)) == 0
4001 || (AGGREGATE_TYPE_P (type)
4002 && type_internals_preclude_sra_p (type, &msg)))
4003 continue;
4004
4005 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
4006 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
4007 *slot = parm;
4008
4009 ret = true;
4010 if (dump_file && (dump_flags & TDF_DETAILS))
4011 {
4012 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
4013 print_generic_expr (dump_file, parm, 0);
4014 fprintf (dump_file, "\n");
4015 }
4016 }
4017
4018 func_param_count = count;
4019 return ret;
4020 }
4021
4022 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4023 maybe_modified. */
4024
4025 static bool
4026 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
4027 void *data)
4028 {
4029 struct access *repr = (struct access *) data;
4030
4031 repr->grp_maybe_modified = 1;
4032 return true;
4033 }
4034
4035 /* Analyze what representatives (in linked lists accessible from
4036 REPRESENTATIVES) can be modified by side effects of statements in the
4037 current function. */
4038
4039 static void
4040 analyze_modified_params (vec<access_p> representatives)
4041 {
4042 int i;
4043
4044 for (i = 0; i < func_param_count; i++)
4045 {
4046 struct access *repr;
4047
4048 for (repr = representatives[i];
4049 repr;
4050 repr = repr->next_grp)
4051 {
4052 struct access *access;
4053 bitmap visited;
4054 ao_ref ar;
4055
4056 if (no_accesses_p (repr))
4057 continue;
4058 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
4059 || repr->grp_maybe_modified)
4060 continue;
4061
4062 ao_ref_init (&ar, repr->expr);
4063 visited = BITMAP_ALLOC (NULL);
4064 for (access = repr; access; access = access->next_sibling)
4065 {
4066 /* All accesses are read ones, otherwise grp_maybe_modified would
4067 be trivially set. */
4068 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
4069 mark_maybe_modified, repr, &visited);
4070 if (repr->grp_maybe_modified)
4071 break;
4072 }
4073 BITMAP_FREE (visited);
4074 }
4075 }
4076 }
4077
4078 /* Propagate distances in bb_dereferences in the opposite direction than the
4079 control flow edges, in each step storing the maximum of the current value
4080 and the minimum of all successors. These steps are repeated until the table
4081 stabilizes. Note that BBs which might terminate the functions (according to
4082 final_bbs bitmap) never updated in this way. */
4083
4084 static void
4085 propagate_dereference_distances (void)
4086 {
4087 basic_block bb;
4088
4089 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
4090 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4091 FOR_EACH_BB_FN (bb, cfun)
4092 {
4093 queue.quick_push (bb);
4094 bb->aux = bb;
4095 }
4096
4097 while (!queue.is_empty ())
4098 {
4099 edge_iterator ei;
4100 edge e;
4101 bool change = false;
4102 int i;
4103
4104 bb = queue.pop ();
4105 bb->aux = NULL;
4106
4107 if (bitmap_bit_p (final_bbs, bb->index))
4108 continue;
4109
4110 for (i = 0; i < func_param_count; i++)
4111 {
4112 int idx = bb->index * func_param_count + i;
4113 bool first = true;
4114 HOST_WIDE_INT inh = 0;
4115
4116 FOR_EACH_EDGE (e, ei, bb->succs)
4117 {
4118 int succ_idx = e->dest->index * func_param_count + i;
4119
4120 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
4121 continue;
4122
4123 if (first)
4124 {
4125 first = false;
4126 inh = bb_dereferences [succ_idx];
4127 }
4128 else if (bb_dereferences [succ_idx] < inh)
4129 inh = bb_dereferences [succ_idx];
4130 }
4131
4132 if (!first && bb_dereferences[idx] < inh)
4133 {
4134 bb_dereferences[idx] = inh;
4135 change = true;
4136 }
4137 }
4138
4139 if (change && !bitmap_bit_p (final_bbs, bb->index))
4140 FOR_EACH_EDGE (e, ei, bb->preds)
4141 {
4142 if (e->src->aux)
4143 continue;
4144
4145 e->src->aux = e->src;
4146 queue.quick_push (e->src);
4147 }
4148 }
4149 }
4150
4151 /* Dump a dereferences TABLE with heading STR to file F. */
4152
4153 static void
4154 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4155 {
4156 basic_block bb;
4157
4158 fprintf (dump_file, "%s", str);
4159 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4160 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4161 {
4162 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4163 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4164 {
4165 int i;
4166 for (i = 0; i < func_param_count; i++)
4167 {
4168 int idx = bb->index * func_param_count + i;
4169 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4170 }
4171 }
4172 fprintf (f, "\n");
4173 }
4174 fprintf (dump_file, "\n");
4175 }
4176
4177 /* Determine what (parts of) parameters passed by reference that are not
4178 assigned to are not certainly dereferenced in this function and thus the
4179 dereferencing cannot be safely moved to the caller without potentially
4180 introducing a segfault. Mark such REPRESENTATIVES as
4181 grp_not_necessarilly_dereferenced.
4182
4183 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4184 part is calculated rather than simple booleans are calculated for each
4185 pointer parameter to handle cases when only a fraction of the whole
4186 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4187 an example).
4188
4189 The maximum dereference distances for each pointer parameter and BB are
4190 already stored in bb_dereference. This routine simply propagates these
4191 values upwards by propagate_dereference_distances and then compares the
4192 distances of individual parameters in the ENTRY BB to the equivalent
4193 distances of each representative of a (fraction of a) parameter. */
4194
4195 static void
4196 analyze_caller_dereference_legality (vec<access_p> representatives)
4197 {
4198 int i;
4199
4200 if (dump_file && (dump_flags & TDF_DETAILS))
4201 dump_dereferences_table (dump_file,
4202 "Dereference table before propagation:\n",
4203 bb_dereferences);
4204
4205 propagate_dereference_distances ();
4206
4207 if (dump_file && (dump_flags & TDF_DETAILS))
4208 dump_dereferences_table (dump_file,
4209 "Dereference table after propagation:\n",
4210 bb_dereferences);
4211
4212 for (i = 0; i < func_param_count; i++)
4213 {
4214 struct access *repr = representatives[i];
4215 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4216
4217 if (!repr || no_accesses_p (repr))
4218 continue;
4219
4220 do
4221 {
4222 if ((repr->offset + repr->size) > bb_dereferences[idx])
4223 repr->grp_not_necessarilly_dereferenced = 1;
4224 repr = repr->next_grp;
4225 }
4226 while (repr);
4227 }
4228 }
4229
4230 /* Return the representative access for the parameter declaration PARM if it is
4231 a scalar passed by reference which is not written to and the pointer value
4232 is not used directly. Thus, if it is legal to dereference it in the caller
4233 and we can rule out modifications through aliases, such parameter should be
4234 turned into one passed by value. Return NULL otherwise. */
4235
4236 static struct access *
4237 unmodified_by_ref_scalar_representative (tree parm)
4238 {
4239 int i, access_count;
4240 struct access *repr;
4241 vec<access_p> *access_vec;
4242
4243 access_vec = get_base_access_vector (parm);
4244 gcc_assert (access_vec);
4245 repr = (*access_vec)[0];
4246 if (repr->write)
4247 return NULL;
4248 repr->group_representative = repr;
4249
4250 access_count = access_vec->length ();
4251 for (i = 1; i < access_count; i++)
4252 {
4253 struct access *access = (*access_vec)[i];
4254 if (access->write)
4255 return NULL;
4256 access->group_representative = repr;
4257 access->next_sibling = repr->next_sibling;
4258 repr->next_sibling = access;
4259 }
4260
4261 repr->grp_read = 1;
4262 repr->grp_scalar_ptr = 1;
4263 return repr;
4264 }
4265
4266 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4267 associated with. REQ_ALIGN is the minimum required alignment. */
4268
4269 static bool
4270 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4271 {
4272 unsigned int exp_align;
4273 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4274 is incompatible assign in a call statement (and possibly even in asm
4275 statements). This can be relaxed by using a new temporary but only for
4276 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4277 intraprocedural SRA we deal with this by keeping the old aggregate around,
4278 something we cannot do in IPA-SRA.) */
4279 if (access->write
4280 && (is_gimple_call (access->stmt)
4281 || gimple_code (access->stmt) == GIMPLE_ASM))
4282 return true;
4283
4284 exp_align = get_object_alignment (access->expr);
4285 if (exp_align < req_align)
4286 return true;
4287
4288 return false;
4289 }
4290
4291
4292 /* Sort collected accesses for parameter PARM, identify representatives for
4293 each accessed region and link them together. Return NULL if there are
4294 different but overlapping accesses, return the special ptr value meaning
4295 there are no accesses for this parameter if that is the case and return the
4296 first representative otherwise. Set *RO_GRP if there is a group of accesses
4297 with only read (i.e. no write) accesses. */
4298
4299 static struct access *
4300 splice_param_accesses (tree parm, bool *ro_grp)
4301 {
4302 int i, j, access_count, group_count;
4303 int agg_size, total_size = 0;
4304 struct access *access, *res, **prev_acc_ptr = &res;
4305 vec<access_p> *access_vec;
4306
4307 access_vec = get_base_access_vector (parm);
4308 if (!access_vec)
4309 return &no_accesses_representant;
4310 access_count = access_vec->length ();
4311
4312 access_vec->qsort (compare_access_positions);
4313
4314 i = 0;
4315 total_size = 0;
4316 group_count = 0;
4317 while (i < access_count)
4318 {
4319 bool modification;
4320 tree a1_alias_type;
4321 access = (*access_vec)[i];
4322 modification = access->write;
4323 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4324 return NULL;
4325 a1_alias_type = reference_alias_ptr_type (access->expr);
4326
4327 /* Access is about to become group representative unless we find some
4328 nasty overlap which would preclude us from breaking this parameter
4329 apart. */
4330
4331 j = i + 1;
4332 while (j < access_count)
4333 {
4334 struct access *ac2 = (*access_vec)[j];
4335 if (ac2->offset != access->offset)
4336 {
4337 /* All or nothing law for parameters. */
4338 if (access->offset + access->size > ac2->offset)
4339 return NULL;
4340 else
4341 break;
4342 }
4343 else if (ac2->size != access->size)
4344 return NULL;
4345
4346 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4347 || (ac2->type != access->type
4348 && (TREE_ADDRESSABLE (ac2->type)
4349 || TREE_ADDRESSABLE (access->type)))
4350 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4351 return NULL;
4352
4353 modification |= ac2->write;
4354 ac2->group_representative = access;
4355 ac2->next_sibling = access->next_sibling;
4356 access->next_sibling = ac2;
4357 j++;
4358 }
4359
4360 group_count++;
4361 access->grp_maybe_modified = modification;
4362 if (!modification)
4363 *ro_grp = true;
4364 *prev_acc_ptr = access;
4365 prev_acc_ptr = &access->next_grp;
4366 total_size += access->size;
4367 i = j;
4368 }
4369
4370 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4371 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4372 else
4373 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4374 if (total_size >= agg_size)
4375 return NULL;
4376
4377 gcc_assert (group_count > 0);
4378 return res;
4379 }
4380
4381 /* Decide whether parameters with representative accesses given by REPR should
4382 be reduced into components. */
4383
4384 static int
4385 decide_one_param_reduction (struct access *repr)
4386 {
4387 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4388 bool by_ref;
4389 tree parm;
4390
4391 parm = repr->base;
4392 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4393 gcc_assert (cur_parm_size > 0);
4394
4395 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4396 {
4397 by_ref = true;
4398 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4399 }
4400 else
4401 {
4402 by_ref = false;
4403 agg_size = cur_parm_size;
4404 }
4405
4406 if (dump_file)
4407 {
4408 struct access *acc;
4409 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4410 print_generic_expr (dump_file, parm, 0);
4411 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4412 for (acc = repr; acc; acc = acc->next_grp)
4413 dump_access (dump_file, acc, true);
4414 }
4415
4416 total_size = 0;
4417 new_param_count = 0;
4418
4419 for (; repr; repr = repr->next_grp)
4420 {
4421 gcc_assert (parm == repr->base);
4422
4423 /* Taking the address of a non-addressable field is verboten. */
4424 if (by_ref && repr->non_addressable)
4425 return 0;
4426
4427 /* Do not decompose a non-BLKmode param in a way that would
4428 create BLKmode params. Especially for by-reference passing
4429 (thus, pointer-type param) this is hardly worthwhile. */
4430 if (DECL_MODE (parm) != BLKmode
4431 && TYPE_MODE (repr->type) == BLKmode)
4432 return 0;
4433
4434 if (!by_ref || (!repr->grp_maybe_modified
4435 && !repr->grp_not_necessarilly_dereferenced))
4436 total_size += repr->size;
4437 else
4438 total_size += cur_parm_size;
4439
4440 new_param_count++;
4441 }
4442
4443 gcc_assert (new_param_count > 0);
4444
4445 if (optimize_function_for_size_p (cfun))
4446 parm_size_limit = cur_parm_size;
4447 else
4448 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4449 * cur_parm_size);
4450
4451 if (total_size < agg_size
4452 && total_size <= parm_size_limit)
4453 {
4454 if (dump_file)
4455 fprintf (dump_file, " ....will be split into %i components\n",
4456 new_param_count);
4457 return new_param_count;
4458 }
4459 else
4460 return 0;
4461 }
4462
4463 /* The order of the following enums is important, we need to do extra work for
4464 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4465 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4466 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4467
4468 /* Identify representatives of all accesses to all candidate parameters for
4469 IPA-SRA. Return result based on what representatives have been found. */
4470
4471 static enum ipa_splicing_result
4472 splice_all_param_accesses (vec<access_p> &representatives)
4473 {
4474 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4475 tree parm;
4476 struct access *repr;
4477
4478 representatives.create (func_param_count);
4479
4480 for (parm = DECL_ARGUMENTS (current_function_decl);
4481 parm;
4482 parm = DECL_CHAIN (parm))
4483 {
4484 if (is_unused_scalar_param (parm))
4485 {
4486 representatives.quick_push (&no_accesses_representant);
4487 if (result == NO_GOOD_ACCESS)
4488 result = UNUSED_PARAMS;
4489 }
4490 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4491 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4492 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4493 {
4494 repr = unmodified_by_ref_scalar_representative (parm);
4495 representatives.quick_push (repr);
4496 if (repr)
4497 result = UNMODIF_BY_REF_ACCESSES;
4498 }
4499 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4500 {
4501 bool ro_grp = false;
4502 repr = splice_param_accesses (parm, &ro_grp);
4503 representatives.quick_push (repr);
4504
4505 if (repr && !no_accesses_p (repr))
4506 {
4507 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4508 {
4509 if (ro_grp)
4510 result = UNMODIF_BY_REF_ACCESSES;
4511 else if (result < MODIF_BY_REF_ACCESSES)
4512 result = MODIF_BY_REF_ACCESSES;
4513 }
4514 else if (result < BY_VAL_ACCESSES)
4515 result = BY_VAL_ACCESSES;
4516 }
4517 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4518 result = UNUSED_PARAMS;
4519 }
4520 else
4521 representatives.quick_push (NULL);
4522 }
4523
4524 if (result == NO_GOOD_ACCESS)
4525 {
4526 representatives.release ();
4527 return NO_GOOD_ACCESS;
4528 }
4529
4530 return result;
4531 }
4532
4533 /* Return the index of BASE in PARMS. Abort if it is not found. */
4534
4535 static inline int
4536 get_param_index (tree base, vec<tree> parms)
4537 {
4538 int i, len;
4539
4540 len = parms.length ();
4541 for (i = 0; i < len; i++)
4542 if (parms[i] == base)
4543 return i;
4544 gcc_unreachable ();
4545 }
4546
4547 /* Convert the decisions made at the representative level into compact
4548 parameter adjustments. REPRESENTATIVES are pointers to first
4549 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4550 final number of adjustments. */
4551
4552 static ipa_parm_adjustment_vec
4553 turn_representatives_into_adjustments (vec<access_p> representatives,
4554 int adjustments_count)
4555 {
4556 vec<tree> parms;
4557 ipa_parm_adjustment_vec adjustments;
4558 tree parm;
4559 int i;
4560
4561 gcc_assert (adjustments_count > 0);
4562 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4563 adjustments.create (adjustments_count);
4564 parm = DECL_ARGUMENTS (current_function_decl);
4565 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4566 {
4567 struct access *repr = representatives[i];
4568
4569 if (!repr || no_accesses_p (repr))
4570 {
4571 struct ipa_parm_adjustment adj;
4572
4573 memset (&adj, 0, sizeof (adj));
4574 adj.base_index = get_param_index (parm, parms);
4575 adj.base = parm;
4576 if (!repr)
4577 adj.op = IPA_PARM_OP_COPY;
4578 else
4579 adj.op = IPA_PARM_OP_REMOVE;
4580 adj.arg_prefix = "ISRA";
4581 adjustments.quick_push (adj);
4582 }
4583 else
4584 {
4585 struct ipa_parm_adjustment adj;
4586 int index = get_param_index (parm, parms);
4587
4588 for (; repr; repr = repr->next_grp)
4589 {
4590 memset (&adj, 0, sizeof (adj));
4591 gcc_assert (repr->base == parm);
4592 adj.base_index = index;
4593 adj.base = repr->base;
4594 adj.type = repr->type;
4595 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4596 adj.offset = repr->offset;
4597 adj.reverse = repr->reverse;
4598 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4599 && (repr->grp_maybe_modified
4600 || repr->grp_not_necessarilly_dereferenced));
4601 adj.arg_prefix = "ISRA";
4602 adjustments.quick_push (adj);
4603 }
4604 }
4605 }
4606 parms.release ();
4607 return adjustments;
4608 }
4609
4610 /* Analyze the collected accesses and produce a plan what to do with the
4611 parameters in the form of adjustments, NULL meaning nothing. */
4612
4613 static ipa_parm_adjustment_vec
4614 analyze_all_param_acesses (void)
4615 {
4616 enum ipa_splicing_result repr_state;
4617 bool proceed = false;
4618 int i, adjustments_count = 0;
4619 vec<access_p> representatives;
4620 ipa_parm_adjustment_vec adjustments;
4621
4622 repr_state = splice_all_param_accesses (representatives);
4623 if (repr_state == NO_GOOD_ACCESS)
4624 return ipa_parm_adjustment_vec ();
4625
4626 /* If there are any parameters passed by reference which are not modified
4627 directly, we need to check whether they can be modified indirectly. */
4628 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4629 {
4630 analyze_caller_dereference_legality (representatives);
4631 analyze_modified_params (representatives);
4632 }
4633
4634 for (i = 0; i < func_param_count; i++)
4635 {
4636 struct access *repr = representatives[i];
4637
4638 if (repr && !no_accesses_p (repr))
4639 {
4640 if (repr->grp_scalar_ptr)
4641 {
4642 adjustments_count++;
4643 if (repr->grp_not_necessarilly_dereferenced
4644 || repr->grp_maybe_modified)
4645 representatives[i] = NULL;
4646 else
4647 {
4648 proceed = true;
4649 sra_stats.scalar_by_ref_to_by_val++;
4650 }
4651 }
4652 else
4653 {
4654 int new_components = decide_one_param_reduction (repr);
4655
4656 if (new_components == 0)
4657 {
4658 representatives[i] = NULL;
4659 adjustments_count++;
4660 }
4661 else
4662 {
4663 adjustments_count += new_components;
4664 sra_stats.aggregate_params_reduced++;
4665 sra_stats.param_reductions_created += new_components;
4666 proceed = true;
4667 }
4668 }
4669 }
4670 else
4671 {
4672 if (no_accesses_p (repr))
4673 {
4674 proceed = true;
4675 sra_stats.deleted_unused_parameters++;
4676 }
4677 adjustments_count++;
4678 }
4679 }
4680
4681 if (!proceed && dump_file)
4682 fprintf (dump_file, "NOT proceeding to change params.\n");
4683
4684 if (proceed)
4685 adjustments = turn_representatives_into_adjustments (representatives,
4686 adjustments_count);
4687 else
4688 adjustments = ipa_parm_adjustment_vec ();
4689
4690 representatives.release ();
4691 return adjustments;
4692 }
4693
4694 /* If a parameter replacement identified by ADJ does not yet exist in the form
4695 of declaration, create it and record it, otherwise return the previously
4696 created one. */
4697
4698 static tree
4699 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4700 {
4701 tree repl;
4702 if (!adj->new_ssa_base)
4703 {
4704 char *pretty_name = make_fancy_name (adj->base);
4705
4706 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4707 DECL_NAME (repl) = get_identifier (pretty_name);
4708 DECL_NAMELESS (repl) = 1;
4709 obstack_free (&name_obstack, pretty_name);
4710
4711 adj->new_ssa_base = repl;
4712 }
4713 else
4714 repl = adj->new_ssa_base;
4715 return repl;
4716 }
4717
4718 /* Find the first adjustment for a particular parameter BASE in a vector of
4719 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4720 adjustment. */
4721
4722 static struct ipa_parm_adjustment *
4723 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4724 {
4725 int i, len;
4726
4727 len = adjustments.length ();
4728 for (i = 0; i < len; i++)
4729 {
4730 struct ipa_parm_adjustment *adj;
4731
4732 adj = &adjustments[i];
4733 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4734 return adj;
4735 }
4736
4737 return NULL;
4738 }
4739
4740 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4741 parameter which is to be removed because its value is not used, create a new
4742 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4743 original with it and return it. If there is no need to re-map, return NULL.
4744 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
4745
4746 static tree
4747 replace_removed_params_ssa_names (tree old_name, gimple *stmt,
4748 ipa_parm_adjustment_vec adjustments)
4749 {
4750 struct ipa_parm_adjustment *adj;
4751 tree decl, repl, new_name;
4752
4753 if (TREE_CODE (old_name) != SSA_NAME)
4754 return NULL;
4755
4756 decl = SSA_NAME_VAR (old_name);
4757 if (decl == NULL_TREE
4758 || TREE_CODE (decl) != PARM_DECL)
4759 return NULL;
4760
4761 adj = get_adjustment_for_base (adjustments, decl);
4762 if (!adj)
4763 return NULL;
4764
4765 repl = get_replaced_param_substitute (adj);
4766 new_name = make_ssa_name (repl, stmt);
4767 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
4768 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
4769
4770 if (dump_file)
4771 {
4772 fprintf (dump_file, "replacing an SSA name of a removed param ");
4773 print_generic_expr (dump_file, old_name, 0);
4774 fprintf (dump_file, " with ");
4775 print_generic_expr (dump_file, new_name, 0);
4776 fprintf (dump_file, "\n");
4777 }
4778
4779 replace_uses_by (old_name, new_name);
4780 return new_name;
4781 }
4782
4783 /* If the statement STMT contains any expressions that need to replaced with a
4784 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4785 incompatibilities (GSI is used to accommodate conversion statements and must
4786 point to the statement). Return true iff the statement was modified. */
4787
4788 static bool
4789 sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
4790 ipa_parm_adjustment_vec adjustments)
4791 {
4792 tree *lhs_p, *rhs_p;
4793 bool any;
4794
4795 if (!gimple_assign_single_p (stmt))
4796 return false;
4797
4798 rhs_p = gimple_assign_rhs1_ptr (stmt);
4799 lhs_p = gimple_assign_lhs_ptr (stmt);
4800
4801 any = ipa_modify_expr (rhs_p, false, adjustments);
4802 any |= ipa_modify_expr (lhs_p, false, adjustments);
4803 if (any)
4804 {
4805 tree new_rhs = NULL_TREE;
4806
4807 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4808 {
4809 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4810 {
4811 /* V_C_Es of constructors can cause trouble (PR 42714). */
4812 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4813 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4814 else
4815 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4816 NULL);
4817 }
4818 else
4819 new_rhs = fold_build1_loc (gimple_location (stmt),
4820 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4821 *rhs_p);
4822 }
4823 else if (REFERENCE_CLASS_P (*rhs_p)
4824 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4825 && !is_gimple_reg (*lhs_p))
4826 /* This can happen when an assignment in between two single field
4827 structures is turned into an assignment in between two pointers to
4828 scalars (PR 42237). */
4829 new_rhs = *rhs_p;
4830
4831 if (new_rhs)
4832 {
4833 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4834 true, GSI_SAME_STMT);
4835
4836 gimple_assign_set_rhs_from_tree (gsi, tmp);
4837 }
4838
4839 return true;
4840 }
4841
4842 return false;
4843 }
4844
4845 /* Traverse the function body and all modifications as described in
4846 ADJUSTMENTS. Return true iff the CFG has been changed. */
4847
4848 bool
4849 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4850 {
4851 bool cfg_changed = false;
4852 basic_block bb;
4853
4854 FOR_EACH_BB_FN (bb, cfun)
4855 {
4856 gimple_stmt_iterator gsi;
4857
4858 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4859 {
4860 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
4861 tree new_lhs, old_lhs = gimple_phi_result (phi);
4862 new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
4863 if (new_lhs)
4864 {
4865 gimple_phi_set_result (phi, new_lhs);
4866 release_ssa_name (old_lhs);
4867 }
4868 }
4869
4870 gsi = gsi_start_bb (bb);
4871 while (!gsi_end_p (gsi))
4872 {
4873 gimple *stmt = gsi_stmt (gsi);
4874 bool modified = false;
4875 tree *t;
4876 unsigned i;
4877
4878 switch (gimple_code (stmt))
4879 {
4880 case GIMPLE_RETURN:
4881 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4882 if (*t != NULL_TREE)
4883 modified |= ipa_modify_expr (t, true, adjustments);
4884 break;
4885
4886 case GIMPLE_ASSIGN:
4887 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4888 break;
4889
4890 case GIMPLE_CALL:
4891 /* Operands must be processed before the lhs. */
4892 for (i = 0; i < gimple_call_num_args (stmt); i++)
4893 {
4894 t = gimple_call_arg_ptr (stmt, i);
4895 modified |= ipa_modify_expr (t, true, adjustments);
4896 }
4897
4898 if (gimple_call_lhs (stmt))
4899 {
4900 t = gimple_call_lhs_ptr (stmt);
4901 modified |= ipa_modify_expr (t, false, adjustments);
4902 }
4903 break;
4904
4905 case GIMPLE_ASM:
4906 {
4907 gasm *asm_stmt = as_a <gasm *> (stmt);
4908 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4909 {
4910 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4911 modified |= ipa_modify_expr (t, true, adjustments);
4912 }
4913 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4914 {
4915 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4916 modified |= ipa_modify_expr (t, false, adjustments);
4917 }
4918 }
4919 break;
4920
4921 default:
4922 break;
4923 }
4924
4925 def_operand_p defp;
4926 ssa_op_iter iter;
4927 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4928 {
4929 tree old_def = DEF_FROM_PTR (defp);
4930 if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
4931 adjustments))
4932 {
4933 SET_DEF (defp, new_def);
4934 release_ssa_name (old_def);
4935 modified = true;
4936 }
4937 }
4938
4939 if (modified)
4940 {
4941 update_stmt (stmt);
4942 if (maybe_clean_eh_stmt (stmt)
4943 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4944 cfg_changed = true;
4945 }
4946 gsi_next (&gsi);
4947 }
4948 }
4949
4950 return cfg_changed;
4951 }
4952
4953 /* Call gimple_debug_bind_reset_value on all debug statements describing
4954 gimple register parameters that are being removed or replaced. */
4955
4956 static void
4957 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4958 {
4959 int i, len;
4960 gimple_stmt_iterator *gsip = NULL, gsi;
4961
4962 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4963 {
4964 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4965 gsip = &gsi;
4966 }
4967 len = adjustments.length ();
4968 for (i = 0; i < len; i++)
4969 {
4970 struct ipa_parm_adjustment *adj;
4971 imm_use_iterator ui;
4972 gimple *stmt;
4973 gdebug *def_temp;
4974 tree name, vexpr, copy = NULL_TREE;
4975 use_operand_p use_p;
4976
4977 adj = &adjustments[i];
4978 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4979 continue;
4980 name = ssa_default_def (cfun, adj->base);
4981 vexpr = NULL;
4982 if (name)
4983 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4984 {
4985 if (gimple_clobber_p (stmt))
4986 {
4987 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4988 unlink_stmt_vdef (stmt);
4989 gsi_remove (&cgsi, true);
4990 release_defs (stmt);
4991 continue;
4992 }
4993 /* All other users must have been removed by
4994 ipa_sra_modify_function_body. */
4995 gcc_assert (is_gimple_debug (stmt));
4996 if (vexpr == NULL && gsip != NULL)
4997 {
4998 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4999 vexpr = make_node (DEBUG_EXPR_DECL);
5000 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
5001 NULL);
5002 DECL_ARTIFICIAL (vexpr) = 1;
5003 TREE_TYPE (vexpr) = TREE_TYPE (name);
5004 DECL_MODE (vexpr) = DECL_MODE (adj->base);
5005 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5006 }
5007 if (vexpr)
5008 {
5009 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
5010 SET_USE (use_p, vexpr);
5011 }
5012 else
5013 gimple_debug_bind_reset_value (stmt);
5014 update_stmt (stmt);
5015 }
5016 /* Create a VAR_DECL for debug info purposes. */
5017 if (!DECL_IGNORED_P (adj->base))
5018 {
5019 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
5020 VAR_DECL, DECL_NAME (adj->base),
5021 TREE_TYPE (adj->base));
5022 if (DECL_PT_UID_SET_P (adj->base))
5023 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
5024 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
5025 TREE_READONLY (copy) = TREE_READONLY (adj->base);
5026 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
5027 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
5028 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
5029 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
5030 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
5031 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
5032 SET_DECL_RTL (copy, 0);
5033 TREE_USED (copy) = 1;
5034 DECL_CONTEXT (copy) = current_function_decl;
5035 add_local_decl (cfun, copy);
5036 DECL_CHAIN (copy) =
5037 BLOCK_VARS (DECL_INITIAL (current_function_decl));
5038 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
5039 }
5040 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
5041 {
5042 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5043 if (vexpr)
5044 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
5045 else
5046 def_temp = gimple_build_debug_source_bind (copy, adj->base,
5047 NULL);
5048 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5049 }
5050 }
5051 }
5052
5053 /* Return false if all callers have at least as many actual arguments as there
5054 are formal parameters in the current function and that their types
5055 match. */
5056
5057 static bool
5058 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
5059 void *data ATTRIBUTE_UNUSED)
5060 {
5061 struct cgraph_edge *cs;
5062 for (cs = node->callers; cs; cs = cs->next_caller)
5063 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
5064 return true;
5065
5066 return false;
5067 }
5068
5069 /* Return false if all callers have vuse attached to a call statement. */
5070
5071 static bool
5072 some_callers_have_no_vuse_p (struct cgraph_node *node,
5073 void *data ATTRIBUTE_UNUSED)
5074 {
5075 struct cgraph_edge *cs;
5076 for (cs = node->callers; cs; cs = cs->next_caller)
5077 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
5078 return true;
5079
5080 return false;
5081 }
5082
5083 /* Convert all callers of NODE. */
5084
5085 static bool
5086 convert_callers_for_node (struct cgraph_node *node,
5087 void *data)
5088 {
5089 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
5090 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
5091 struct cgraph_edge *cs;
5092
5093 for (cs = node->callers; cs; cs = cs->next_caller)
5094 {
5095 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
5096
5097 if (dump_file)
5098 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
5099 xstrdup_for_dump (cs->caller->name ()),
5100 cs->caller->order,
5101 xstrdup_for_dump (cs->callee->name ()),
5102 cs->callee->order);
5103
5104 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
5105
5106 pop_cfun ();
5107 }
5108
5109 for (cs = node->callers; cs; cs = cs->next_caller)
5110 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
5111 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
5112 compute_inline_parameters (cs->caller, true);
5113 BITMAP_FREE (recomputed_callers);
5114
5115 return true;
5116 }
5117
5118 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
5119
5120 static void
5121 convert_callers (struct cgraph_node *node, tree old_decl,
5122 ipa_parm_adjustment_vec adjustments)
5123 {
5124 basic_block this_block;
5125
5126 node->call_for_symbol_and_aliases (convert_callers_for_node,
5127 &adjustments, false);
5128
5129 if (!encountered_recursive_call)
5130 return;
5131
5132 FOR_EACH_BB_FN (this_block, cfun)
5133 {
5134 gimple_stmt_iterator gsi;
5135
5136 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5137 {
5138 gcall *stmt;
5139 tree call_fndecl;
5140 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5141 if (!stmt)
5142 continue;
5143 call_fndecl = gimple_call_fndecl (stmt);
5144 if (call_fndecl == old_decl)
5145 {
5146 if (dump_file)
5147 fprintf (dump_file, "Adjusting recursive call");
5148 gimple_call_set_fndecl (stmt, node->decl);
5149 ipa_modify_call_arguments (NULL, stmt, adjustments);
5150 }
5151 }
5152 }
5153
5154 return;
5155 }
5156
5157 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5158 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
5159
5160 static bool
5161 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5162 {
5163 struct cgraph_node *new_node;
5164 bool cfg_changed;
5165
5166 cgraph_edge::rebuild_edges ();
5167 free_dominance_info (CDI_DOMINATORS);
5168 pop_cfun ();
5169
5170 /* This must be done after rebuilding cgraph edges for node above.
5171 Otherwise any recursive calls to node that are recorded in
5172 redirect_callers will be corrupted. */
5173 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5174 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5175 NULL, false, NULL, NULL,
5176 "isra");
5177 redirect_callers.release ();
5178
5179 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5180 ipa_modify_formal_parameters (current_function_decl, adjustments);
5181 cfg_changed = ipa_sra_modify_function_body (adjustments);
5182 sra_ipa_reset_debug_stmts (adjustments);
5183 convert_callers (new_node, node->decl, adjustments);
5184 new_node->make_local ();
5185 return cfg_changed;
5186 }
5187
5188 /* Means of communication between ipa_sra_check_caller and
5189 ipa_sra_preliminary_function_checks. */
5190
5191 struct ipa_sra_check_caller_data
5192 {
5193 bool has_callers;
5194 bool bad_arg_alignment;
5195 bool has_thunk;
5196 };
5197
5198 /* If NODE has a caller, mark that fact in DATA which is pointer to
5199 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5200 calls if they are unit aligned and if not, set the appropriate flag in DATA
5201 too. */
5202
5203 static bool
5204 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5205 {
5206 if (!node->callers)
5207 return false;
5208
5209 struct ipa_sra_check_caller_data *iscc;
5210 iscc = (struct ipa_sra_check_caller_data *) data;
5211 iscc->has_callers = true;
5212
5213 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5214 {
5215 if (cs->caller->thunk.thunk_p)
5216 {
5217 iscc->has_thunk = true;
5218 return true;
5219 }
5220 gimple *call_stmt = cs->call_stmt;
5221 unsigned count = gimple_call_num_args (call_stmt);
5222 for (unsigned i = 0; i < count; i++)
5223 {
5224 tree arg = gimple_call_arg (call_stmt, i);
5225 if (is_gimple_reg (arg))
5226 continue;
5227
5228 tree offset;
5229 HOST_WIDE_INT bitsize, bitpos;
5230 machine_mode mode;
5231 int unsignedp, reversep, volatilep = 0;
5232 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5233 &unsignedp, &reversep, &volatilep);
5234 if (bitpos % BITS_PER_UNIT)
5235 {
5236 iscc->bad_arg_alignment = true;
5237 return true;
5238 }
5239 }
5240 }
5241
5242 return false;
5243 }
5244
5245 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5246 attributes, return true otherwise. NODE is the cgraph node of the current
5247 function. */
5248
5249 static bool
5250 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5251 {
5252 if (!node->can_be_local_p ())
5253 {
5254 if (dump_file)
5255 fprintf (dump_file, "Function not local to this compilation unit.\n");
5256 return false;
5257 }
5258
5259 if (!node->local.can_change_signature)
5260 {
5261 if (dump_file)
5262 fprintf (dump_file, "Function can not change signature.\n");
5263 return false;
5264 }
5265
5266 if (!tree_versionable_function_p (node->decl))
5267 {
5268 if (dump_file)
5269 fprintf (dump_file, "Function is not versionable.\n");
5270 return false;
5271 }
5272
5273 if (!opt_for_fn (node->decl, optimize)
5274 || !opt_for_fn (node->decl, flag_ipa_sra))
5275 {
5276 if (dump_file)
5277 fprintf (dump_file, "Function not optimized.\n");
5278 return false;
5279 }
5280
5281 if (DECL_VIRTUAL_P (current_function_decl))
5282 {
5283 if (dump_file)
5284 fprintf (dump_file, "Function is a virtual method.\n");
5285 return false;
5286 }
5287
5288 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5289 && inline_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5290 {
5291 if (dump_file)
5292 fprintf (dump_file, "Function too big to be made truly local.\n");
5293 return false;
5294 }
5295
5296 if (cfun->stdarg)
5297 {
5298 if (dump_file)
5299 fprintf (dump_file, "Function uses stdarg. \n");
5300 return false;
5301 }
5302
5303 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5304 return false;
5305
5306 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5307 {
5308 if (dump_file)
5309 fprintf (dump_file, "Always inline function will be inlined "
5310 "anyway. \n");
5311 return false;
5312 }
5313
5314 struct ipa_sra_check_caller_data iscc;
5315 memset (&iscc, 0, sizeof(iscc));
5316 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5317 if (!iscc.has_callers)
5318 {
5319 if (dump_file)
5320 fprintf (dump_file,
5321 "Function has no callers in this compilation unit.\n");
5322 return false;
5323 }
5324
5325 if (iscc.bad_arg_alignment)
5326 {
5327 if (dump_file)
5328 fprintf (dump_file,
5329 "A function call has an argument with non-unit alignment.\n");
5330 return false;
5331 }
5332
5333 if (iscc.has_thunk)
5334 {
5335 if (dump_file)
5336 fprintf (dump_file,
5337 "A has thunk.\n");
5338 return false;
5339 }
5340
5341 return true;
5342 }
5343
5344 /* Perform early interprocedural SRA. */
5345
5346 static unsigned int
5347 ipa_early_sra (void)
5348 {
5349 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5350 ipa_parm_adjustment_vec adjustments;
5351 int ret = 0;
5352
5353 if (!ipa_sra_preliminary_function_checks (node))
5354 return 0;
5355
5356 sra_initialize ();
5357 sra_mode = SRA_MODE_EARLY_IPA;
5358
5359 if (!find_param_candidates ())
5360 {
5361 if (dump_file)
5362 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5363 goto simple_out;
5364 }
5365
5366 if (node->call_for_symbol_and_aliases
5367 (some_callers_have_mismatched_arguments_p, NULL, true))
5368 {
5369 if (dump_file)
5370 fprintf (dump_file, "There are callers with insufficient number of "
5371 "arguments or arguments with type mismatches.\n");
5372 goto simple_out;
5373 }
5374
5375 if (node->call_for_symbol_and_aliases
5376 (some_callers_have_no_vuse_p, NULL, true))
5377 {
5378 if (dump_file)
5379 fprintf (dump_file, "There are callers with no VUSE attached "
5380 "to a call stmt.\n");
5381 goto simple_out;
5382 }
5383
5384 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5385 func_param_count
5386 * last_basic_block_for_fn (cfun));
5387 final_bbs = BITMAP_ALLOC (NULL);
5388
5389 scan_function ();
5390 if (encountered_apply_args)
5391 {
5392 if (dump_file)
5393 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5394 goto out;
5395 }
5396
5397 if (encountered_unchangable_recursive_call)
5398 {
5399 if (dump_file)
5400 fprintf (dump_file, "Function calls itself with insufficient "
5401 "number of arguments.\n");
5402 goto out;
5403 }
5404
5405 adjustments = analyze_all_param_acesses ();
5406 if (!adjustments.exists ())
5407 goto out;
5408 if (dump_file)
5409 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5410
5411 if (modify_function (node, adjustments))
5412 ret = TODO_update_ssa | TODO_cleanup_cfg;
5413 else
5414 ret = TODO_update_ssa;
5415 adjustments.release ();
5416
5417 statistics_counter_event (cfun, "Unused parameters deleted",
5418 sra_stats.deleted_unused_parameters);
5419 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5420 sra_stats.scalar_by_ref_to_by_val);
5421 statistics_counter_event (cfun, "Aggregate parameters broken up",
5422 sra_stats.aggregate_params_reduced);
5423 statistics_counter_event (cfun, "Aggregate parameter components created",
5424 sra_stats.param_reductions_created);
5425
5426 out:
5427 BITMAP_FREE (final_bbs);
5428 free (bb_dereferences);
5429 simple_out:
5430 sra_deinitialize ();
5431 return ret;
5432 }
5433
5434 namespace {
5435
5436 const pass_data pass_data_early_ipa_sra =
5437 {
5438 GIMPLE_PASS, /* type */
5439 "eipa_sra", /* name */
5440 OPTGROUP_NONE, /* optinfo_flags */
5441 TV_IPA_SRA, /* tv_id */
5442 0, /* properties_required */
5443 0, /* properties_provided */
5444 0, /* properties_destroyed */
5445 0, /* todo_flags_start */
5446 TODO_dump_symtab, /* todo_flags_finish */
5447 };
5448
5449 class pass_early_ipa_sra : public gimple_opt_pass
5450 {
5451 public:
5452 pass_early_ipa_sra (gcc::context *ctxt)
5453 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5454 {}
5455
5456 /* opt_pass methods: */
5457 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5458 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5459
5460 }; // class pass_early_ipa_sra
5461
5462 } // anon namespace
5463
5464 gimple_opt_pass *
5465 make_pass_early_ipa_sra (gcc::context *ctxt)
5466 {
5467 return new pass_early_ipa_sra (ctxt);
5468 }