re PR tree-optimization/63747 (icf mis-compares switch gimple)
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2014 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "hash-map.h"
78 #include "hash-table.h"
79 #include "alloc-pool.h"
80 #include "tm.h"
81 #include "tree.h"
82 #include "predict.h"
83 #include "vec.h"
84 #include "hashtab.h"
85 #include "hash-set.h"
86 #include "machmode.h"
87 #include "hard-reg-set.h"
88 #include "input.h"
89 #include "function.h"
90 #include "dominance.h"
91 #include "cfg.h"
92 #include "basic-block.h"
93 #include "tree-ssa-alias.h"
94 #include "internal-fn.h"
95 #include "tree-eh.h"
96 #include "gimple-expr.h"
97 #include "is-a.h"
98 #include "gimple.h"
99 #include "stor-layout.h"
100 #include "gimplify.h"
101 #include "gimple-iterator.h"
102 #include "gimplify-me.h"
103 #include "gimple-walk.h"
104 #include "bitmap.h"
105 #include "gimple-ssa.h"
106 #include "tree-cfg.h"
107 #include "tree-phinodes.h"
108 #include "ssa-iterators.h"
109 #include "stringpool.h"
110 #include "tree-ssanames.h"
111 #include "expr.h"
112 #include "tree-dfa.h"
113 #include "tree-ssa.h"
114 #include "tree-pass.h"
115 #include "plugin-api.h"
116 #include "ipa-ref.h"
117 #include "cgraph.h"
118 #include "ipa-prop.h"
119 #include "statistics.h"
120 #include "params.h"
121 #include "target.h"
122 #include "flags.h"
123 #include "dbgcnt.h"
124 #include "tree-inline.h"
125 #include "gimple-pretty-print.h"
126 #include "ipa-inline.h"
127 #include "ipa-utils.h"
128 #include "builtins.h"
129
130 /* Enumeration of all aggregate reductions we can do. */
131 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
132 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
133 SRA_MODE_INTRA }; /* late intraprocedural SRA */
134
135 /* Global variable describing which aggregate reduction we are performing at
136 the moment. */
137 static enum sra_mode sra_mode;
138
139 struct assign_link;
140
141 /* ACCESS represents each access to an aggregate variable (as a whole or a
142 part). It can also represent a group of accesses that refer to exactly the
143 same fragment of an aggregate (i.e. those that have exactly the same offset
144 and size). Such representatives for a single aggregate, once determined,
145 are linked in a linked list and have the group fields set.
146
147 Moreover, when doing intraprocedural SRA, a tree is built from those
148 representatives (by the means of first_child and next_sibling pointers), in
149 which all items in a subtree are "within" the root, i.e. their offset is
150 greater or equal to offset of the root and offset+size is smaller or equal
151 to offset+size of the root. Children of an access are sorted by offset.
152
153 Note that accesses to parts of vector and complex number types always
154 represented by an access to the whole complex number or a vector. It is a
155 duty of the modifying functions to replace them appropriately. */
156
157 struct access
158 {
159 /* Values returned by `get_ref_base_and_extent' for each component reference
160 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
161 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
162 HOST_WIDE_INT offset;
163 HOST_WIDE_INT size;
164 tree base;
165
166 /* Expression. It is context dependent so do not use it to create new
167 expressions to access the original aggregate. See PR 42154 for a
168 testcase. */
169 tree expr;
170 /* Type. */
171 tree type;
172
173 /* The statement this access belongs to. */
174 gimple stmt;
175
176 /* Next group representative for this aggregate. */
177 struct access *next_grp;
178
179 /* Pointer to the group representative. Pointer to itself if the struct is
180 the representative. */
181 struct access *group_representative;
182
183 /* If this access has any children (in terms of the definition above), this
184 points to the first one. */
185 struct access *first_child;
186
187 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
188 described above. In IPA-SRA this is a pointer to the next access
189 belonging to the same group (having the same representative). */
190 struct access *next_sibling;
191
192 /* Pointers to the first and last element in the linked list of assign
193 links. */
194 struct assign_link *first_link, *last_link;
195
196 /* Pointer to the next access in the work queue. */
197 struct access *next_queued;
198
199 /* Replacement variable for this access "region." Never to be accessed
200 directly, always only by the means of get_access_replacement() and only
201 when grp_to_be_replaced flag is set. */
202 tree replacement_decl;
203
204 /* Is this particular access write access? */
205 unsigned write : 1;
206
207 /* Is this access an access to a non-addressable field? */
208 unsigned non_addressable : 1;
209
210 /* Is this access currently in the work queue? */
211 unsigned grp_queued : 1;
212
213 /* Does this group contain a write access? This flag is propagated down the
214 access tree. */
215 unsigned grp_write : 1;
216
217 /* Does this group contain a read access? This flag is propagated down the
218 access tree. */
219 unsigned grp_read : 1;
220
221 /* Does this group contain a read access that comes from an assignment
222 statement? This flag is propagated down the access tree. */
223 unsigned grp_assignment_read : 1;
224
225 /* Does this group contain a write access that comes from an assignment
226 statement? This flag is propagated down the access tree. */
227 unsigned grp_assignment_write : 1;
228
229 /* Does this group contain a read access through a scalar type? This flag is
230 not propagated in the access tree in any direction. */
231 unsigned grp_scalar_read : 1;
232
233 /* Does this group contain a write access through a scalar type? This flag
234 is not propagated in the access tree in any direction. */
235 unsigned grp_scalar_write : 1;
236
237 /* Is this access an artificial one created to scalarize some record
238 entirely? */
239 unsigned grp_total_scalarization : 1;
240
241 /* Other passes of the analysis use this bit to make function
242 analyze_access_subtree create scalar replacements for this group if
243 possible. */
244 unsigned grp_hint : 1;
245
246 /* Is the subtree rooted in this access fully covered by scalar
247 replacements? */
248 unsigned grp_covered : 1;
249
250 /* If set to true, this access and all below it in an access tree must not be
251 scalarized. */
252 unsigned grp_unscalarizable_region : 1;
253
254 /* Whether data have been written to parts of the aggregate covered by this
255 access which is not to be scalarized. This flag is propagated up in the
256 access tree. */
257 unsigned grp_unscalarized_data : 1;
258
259 /* Does this access and/or group contain a write access through a
260 BIT_FIELD_REF? */
261 unsigned grp_partial_lhs : 1;
262
263 /* Set when a scalar replacement should be created for this variable. */
264 unsigned grp_to_be_replaced : 1;
265
266 /* Set when we want a replacement for the sole purpose of having it in
267 generated debug statements. */
268 unsigned grp_to_be_debug_replaced : 1;
269
270 /* Should TREE_NO_WARNING of a replacement be set? */
271 unsigned grp_no_warning : 1;
272
273 /* Is it possible that the group refers to data which might be (directly or
274 otherwise) modified? */
275 unsigned grp_maybe_modified : 1;
276
277 /* Set when this is a representative of a pointer to scalar (i.e. by
278 reference) parameter which we consider for turning into a plain scalar
279 (i.e. a by value parameter). */
280 unsigned grp_scalar_ptr : 1;
281
282 /* Set when we discover that this pointer is not safe to dereference in the
283 caller. */
284 unsigned grp_not_necessarilly_dereferenced : 1;
285 };
286
287 typedef struct access *access_p;
288
289
290 /* Alloc pool for allocating access structures. */
291 static alloc_pool access_pool;
292
293 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
294 are used to propagate subaccesses from rhs to lhs as long as they don't
295 conflict with what is already there. */
296 struct assign_link
297 {
298 struct access *lacc, *racc;
299 struct assign_link *next;
300 };
301
302 /* Alloc pool for allocating assign link structures. */
303 static alloc_pool link_pool;
304
305 /* Base (tree) -> Vector (vec<access_p> *) map. */
306 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
307
308 /* Candidate hash table helpers. */
309
310 struct uid_decl_hasher : typed_noop_remove <tree_node>
311 {
312 typedef tree_node value_type;
313 typedef tree_node compare_type;
314 static inline hashval_t hash (const value_type *);
315 static inline bool equal (const value_type *, const compare_type *);
316 };
317
318 /* Hash a tree in a uid_decl_map. */
319
320 inline hashval_t
321 uid_decl_hasher::hash (const value_type *item)
322 {
323 return item->decl_minimal.uid;
324 }
325
326 /* Return true if the DECL_UID in both trees are equal. */
327
328 inline bool
329 uid_decl_hasher::equal (const value_type *a, const compare_type *b)
330 {
331 return (a->decl_minimal.uid == b->decl_minimal.uid);
332 }
333
334 /* Set of candidates. */
335 static bitmap candidate_bitmap;
336 static hash_table<uid_decl_hasher> *candidates;
337
338 /* For a candidate UID return the candidates decl. */
339
340 static inline tree
341 candidate (unsigned uid)
342 {
343 tree_node t;
344 t.decl_minimal.uid = uid;
345 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
346 }
347
348 /* Bitmap of candidates which we should try to entirely scalarize away and
349 those which cannot be (because they are and need be used as a whole). */
350 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
351
352 /* Obstack for creation of fancy names. */
353 static struct obstack name_obstack;
354
355 /* Head of a linked list of accesses that need to have its subaccesses
356 propagated to their assignment counterparts. */
357 static struct access *work_queue_head;
358
359 /* Number of parameters of the analyzed function when doing early ipa SRA. */
360 static int func_param_count;
361
362 /* scan_function sets the following to true if it encounters a call to
363 __builtin_apply_args. */
364 static bool encountered_apply_args;
365
366 /* Set by scan_function when it finds a recursive call. */
367 static bool encountered_recursive_call;
368
369 /* Set by scan_function when it finds a recursive call with less actual
370 arguments than formal parameters.. */
371 static bool encountered_unchangable_recursive_call;
372
373 /* This is a table in which for each basic block and parameter there is a
374 distance (offset + size) in that parameter which is dereferenced and
375 accessed in that BB. */
376 static HOST_WIDE_INT *bb_dereferences;
377 /* Bitmap of BBs that can cause the function to "stop" progressing by
378 returning, throwing externally, looping infinitely or calling a function
379 which might abort etc.. */
380 static bitmap final_bbs;
381
382 /* Representative of no accesses at all. */
383 static struct access no_accesses_representant;
384
385 /* Predicate to test the special value. */
386
387 static inline bool
388 no_accesses_p (struct access *access)
389 {
390 return access == &no_accesses_representant;
391 }
392
393 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
394 representative fields are dumped, otherwise those which only describe the
395 individual access are. */
396
397 static struct
398 {
399 /* Number of processed aggregates is readily available in
400 analyze_all_variable_accesses and so is not stored here. */
401
402 /* Number of created scalar replacements. */
403 int replacements;
404
405 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
406 expression. */
407 int exprs;
408
409 /* Number of statements created by generate_subtree_copies. */
410 int subtree_copies;
411
412 /* Number of statements created by load_assign_lhs_subreplacements. */
413 int subreplacements;
414
415 /* Number of times sra_modify_assign has deleted a statement. */
416 int deleted;
417
418 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
419 RHS reparately due to type conversions or nonexistent matching
420 references. */
421 int separate_lhs_rhs_handling;
422
423 /* Number of parameters that were removed because they were unused. */
424 int deleted_unused_parameters;
425
426 /* Number of scalars passed as parameters by reference that have been
427 converted to be passed by value. */
428 int scalar_by_ref_to_by_val;
429
430 /* Number of aggregate parameters that were replaced by one or more of their
431 components. */
432 int aggregate_params_reduced;
433
434 /* Numbber of components created when splitting aggregate parameters. */
435 int param_reductions_created;
436 } sra_stats;
437
438 static void
439 dump_access (FILE *f, struct access *access, bool grp)
440 {
441 fprintf (f, "access { ");
442 fprintf (f, "base = (%d)'", DECL_UID (access->base));
443 print_generic_expr (f, access->base, 0);
444 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
445 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
446 fprintf (f, ", expr = ");
447 print_generic_expr (f, access->expr, 0);
448 fprintf (f, ", type = ");
449 print_generic_expr (f, access->type, 0);
450 if (grp)
451 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
452 "grp_assignment_write = %d, grp_scalar_read = %d, "
453 "grp_scalar_write = %d, grp_total_scalarization = %d, "
454 "grp_hint = %d, grp_covered = %d, "
455 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
456 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
457 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
458 "grp_not_necessarilly_dereferenced = %d\n",
459 access->grp_read, access->grp_write, access->grp_assignment_read,
460 access->grp_assignment_write, access->grp_scalar_read,
461 access->grp_scalar_write, access->grp_total_scalarization,
462 access->grp_hint, access->grp_covered,
463 access->grp_unscalarizable_region, access->grp_unscalarized_data,
464 access->grp_partial_lhs, access->grp_to_be_replaced,
465 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
466 access->grp_not_necessarilly_dereferenced);
467 else
468 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
469 "grp_partial_lhs = %d\n",
470 access->write, access->grp_total_scalarization,
471 access->grp_partial_lhs);
472 }
473
474 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
475
476 static void
477 dump_access_tree_1 (FILE *f, struct access *access, int level)
478 {
479 do
480 {
481 int i;
482
483 for (i = 0; i < level; i++)
484 fputs ("* ", dump_file);
485
486 dump_access (f, access, true);
487
488 if (access->first_child)
489 dump_access_tree_1 (f, access->first_child, level + 1);
490
491 access = access->next_sibling;
492 }
493 while (access);
494 }
495
496 /* Dump all access trees for a variable, given the pointer to the first root in
497 ACCESS. */
498
499 static void
500 dump_access_tree (FILE *f, struct access *access)
501 {
502 for (; access; access = access->next_grp)
503 dump_access_tree_1 (f, access, 0);
504 }
505
506 /* Return true iff ACC is non-NULL and has subaccesses. */
507
508 static inline bool
509 access_has_children_p (struct access *acc)
510 {
511 return acc && acc->first_child;
512 }
513
514 /* Return true iff ACC is (partly) covered by at least one replacement. */
515
516 static bool
517 access_has_replacements_p (struct access *acc)
518 {
519 struct access *child;
520 if (acc->grp_to_be_replaced)
521 return true;
522 for (child = acc->first_child; child; child = child->next_sibling)
523 if (access_has_replacements_p (child))
524 return true;
525 return false;
526 }
527
528 /* Return a vector of pointers to accesses for the variable given in BASE or
529 NULL if there is none. */
530
531 static vec<access_p> *
532 get_base_access_vector (tree base)
533 {
534 return base_access_vec->get (base);
535 }
536
537 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
538 in ACCESS. Return NULL if it cannot be found. */
539
540 static struct access *
541 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
542 HOST_WIDE_INT size)
543 {
544 while (access && (access->offset != offset || access->size != size))
545 {
546 struct access *child = access->first_child;
547
548 while (child && (child->offset + child->size <= offset))
549 child = child->next_sibling;
550 access = child;
551 }
552
553 return access;
554 }
555
556 /* Return the first group representative for DECL or NULL if none exists. */
557
558 static struct access *
559 get_first_repr_for_decl (tree base)
560 {
561 vec<access_p> *access_vec;
562
563 access_vec = get_base_access_vector (base);
564 if (!access_vec)
565 return NULL;
566
567 return (*access_vec)[0];
568 }
569
570 /* Find an access representative for the variable BASE and given OFFSET and
571 SIZE. Requires that access trees have already been built. Return NULL if
572 it cannot be found. */
573
574 static struct access *
575 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
576 HOST_WIDE_INT size)
577 {
578 struct access *access;
579
580 access = get_first_repr_for_decl (base);
581 while (access && (access->offset + access->size <= offset))
582 access = access->next_grp;
583 if (!access)
584 return NULL;
585
586 return find_access_in_subtree (access, offset, size);
587 }
588
589 /* Add LINK to the linked list of assign links of RACC. */
590 static void
591 add_link_to_rhs (struct access *racc, struct assign_link *link)
592 {
593 gcc_assert (link->racc == racc);
594
595 if (!racc->first_link)
596 {
597 gcc_assert (!racc->last_link);
598 racc->first_link = link;
599 }
600 else
601 racc->last_link->next = link;
602
603 racc->last_link = link;
604 link->next = NULL;
605 }
606
607 /* Move all link structures in their linked list in OLD_RACC to the linked list
608 in NEW_RACC. */
609 static void
610 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
611 {
612 if (!old_racc->first_link)
613 {
614 gcc_assert (!old_racc->last_link);
615 return;
616 }
617
618 if (new_racc->first_link)
619 {
620 gcc_assert (!new_racc->last_link->next);
621 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
622
623 new_racc->last_link->next = old_racc->first_link;
624 new_racc->last_link = old_racc->last_link;
625 }
626 else
627 {
628 gcc_assert (!new_racc->last_link);
629
630 new_racc->first_link = old_racc->first_link;
631 new_racc->last_link = old_racc->last_link;
632 }
633 old_racc->first_link = old_racc->last_link = NULL;
634 }
635
636 /* Add ACCESS to the work queue (which is actually a stack). */
637
638 static void
639 add_access_to_work_queue (struct access *access)
640 {
641 if (!access->grp_queued)
642 {
643 gcc_assert (!access->next_queued);
644 access->next_queued = work_queue_head;
645 access->grp_queued = 1;
646 work_queue_head = access;
647 }
648 }
649
650 /* Pop an access from the work queue, and return it, assuming there is one. */
651
652 static struct access *
653 pop_access_from_work_queue (void)
654 {
655 struct access *access = work_queue_head;
656
657 work_queue_head = access->next_queued;
658 access->next_queued = NULL;
659 access->grp_queued = 0;
660 return access;
661 }
662
663
664 /* Allocate necessary structures. */
665
666 static void
667 sra_initialize (void)
668 {
669 candidate_bitmap = BITMAP_ALLOC (NULL);
670 candidates = new hash_table<uid_decl_hasher>
671 (vec_safe_length (cfun->local_decls) / 2);
672 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
673 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
674 gcc_obstack_init (&name_obstack);
675 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
676 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
677 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
678 memset (&sra_stats, 0, sizeof (sra_stats));
679 encountered_apply_args = false;
680 encountered_recursive_call = false;
681 encountered_unchangable_recursive_call = false;
682 }
683
684 /* Deallocate all general structures. */
685
686 static void
687 sra_deinitialize (void)
688 {
689 BITMAP_FREE (candidate_bitmap);
690 delete candidates;
691 candidates = NULL;
692 BITMAP_FREE (should_scalarize_away_bitmap);
693 BITMAP_FREE (cannot_scalarize_away_bitmap);
694 free_alloc_pool (access_pool);
695 free_alloc_pool (link_pool);
696 obstack_free (&name_obstack, NULL);
697
698 delete base_access_vec;
699 }
700
701 /* Remove DECL from candidates for SRA and write REASON to the dump file if
702 there is one. */
703 static void
704 disqualify_candidate (tree decl, const char *reason)
705 {
706 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
707 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
708
709 if (dump_file && (dump_flags & TDF_DETAILS))
710 {
711 fprintf (dump_file, "! Disqualifying ");
712 print_generic_expr (dump_file, decl, 0);
713 fprintf (dump_file, " - %s\n", reason);
714 }
715 }
716
717 /* Return true iff the type contains a field or an element which does not allow
718 scalarization. */
719
720 static bool
721 type_internals_preclude_sra_p (tree type, const char **msg)
722 {
723 tree fld;
724 tree et;
725
726 switch (TREE_CODE (type))
727 {
728 case RECORD_TYPE:
729 case UNION_TYPE:
730 case QUAL_UNION_TYPE:
731 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
732 if (TREE_CODE (fld) == FIELD_DECL)
733 {
734 tree ft = TREE_TYPE (fld);
735
736 if (TREE_THIS_VOLATILE (fld))
737 {
738 *msg = "volatile structure field";
739 return true;
740 }
741 if (!DECL_FIELD_OFFSET (fld))
742 {
743 *msg = "no structure field offset";
744 return true;
745 }
746 if (!DECL_SIZE (fld))
747 {
748 *msg = "zero structure field size";
749 return true;
750 }
751 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
752 {
753 *msg = "structure field offset not fixed";
754 return true;
755 }
756 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
757 {
758 *msg = "structure field size not fixed";
759 return true;
760 }
761 if (!tree_fits_shwi_p (bit_position (fld)))
762 {
763 *msg = "structure field size too big";
764 return true;
765 }
766 if (AGGREGATE_TYPE_P (ft)
767 && int_bit_position (fld) % BITS_PER_UNIT != 0)
768 {
769 *msg = "structure field is bit field";
770 return true;
771 }
772
773 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
774 return true;
775 }
776
777 return false;
778
779 case ARRAY_TYPE:
780 et = TREE_TYPE (type);
781
782 if (TYPE_VOLATILE (et))
783 {
784 *msg = "element type is volatile";
785 return true;
786 }
787
788 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
789 return true;
790
791 return false;
792
793 default:
794 return false;
795 }
796 }
797
798 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
799 base variable if it is. Return T if it is not an SSA_NAME. */
800
801 static tree
802 get_ssa_base_param (tree t)
803 {
804 if (TREE_CODE (t) == SSA_NAME)
805 {
806 if (SSA_NAME_IS_DEFAULT_DEF (t))
807 return SSA_NAME_VAR (t);
808 else
809 return NULL_TREE;
810 }
811 return t;
812 }
813
814 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
815 belongs to, unless the BB has already been marked as a potentially
816 final. */
817
818 static void
819 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
820 {
821 basic_block bb = gimple_bb (stmt);
822 int idx, parm_index = 0;
823 tree parm;
824
825 if (bitmap_bit_p (final_bbs, bb->index))
826 return;
827
828 for (parm = DECL_ARGUMENTS (current_function_decl);
829 parm && parm != base;
830 parm = DECL_CHAIN (parm))
831 parm_index++;
832
833 gcc_assert (parm_index < func_param_count);
834
835 idx = bb->index * func_param_count + parm_index;
836 if (bb_dereferences[idx] < dist)
837 bb_dereferences[idx] = dist;
838 }
839
840 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
841 the three fields. Also add it to the vector of accesses corresponding to
842 the base. Finally, return the new access. */
843
844 static struct access *
845 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
846 {
847 struct access *access;
848
849 access = (struct access *) pool_alloc (access_pool);
850 memset (access, 0, sizeof (struct access));
851 access->base = base;
852 access->offset = offset;
853 access->size = size;
854
855 base_access_vec->get_or_insert (base).safe_push (access);
856
857 return access;
858 }
859
860 /* Create and insert access for EXPR. Return created access, or NULL if it is
861 not possible. */
862
863 static struct access *
864 create_access (tree expr, gimple stmt, bool write)
865 {
866 struct access *access;
867 HOST_WIDE_INT offset, size, max_size;
868 tree base = expr;
869 bool ptr, unscalarizable_region = false;
870
871 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
872
873 if (sra_mode == SRA_MODE_EARLY_IPA
874 && TREE_CODE (base) == MEM_REF)
875 {
876 base = get_ssa_base_param (TREE_OPERAND (base, 0));
877 if (!base)
878 return NULL;
879 ptr = true;
880 }
881 else
882 ptr = false;
883
884 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
885 return NULL;
886
887 if (sra_mode == SRA_MODE_EARLY_IPA)
888 {
889 if (size < 0 || size != max_size)
890 {
891 disqualify_candidate (base, "Encountered a variable sized access.");
892 return NULL;
893 }
894 if (TREE_CODE (expr) == COMPONENT_REF
895 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
896 {
897 disqualify_candidate (base, "Encountered a bit-field access.");
898 return NULL;
899 }
900 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
901
902 if (ptr)
903 mark_parm_dereference (base, offset + size, stmt);
904 }
905 else
906 {
907 if (size != max_size)
908 {
909 size = max_size;
910 unscalarizable_region = true;
911 }
912 if (size < 0)
913 {
914 disqualify_candidate (base, "Encountered an unconstrained access.");
915 return NULL;
916 }
917 }
918
919 access = create_access_1 (base, offset, size);
920 access->expr = expr;
921 access->type = TREE_TYPE (expr);
922 access->write = write;
923 access->grp_unscalarizable_region = unscalarizable_region;
924 access->stmt = stmt;
925
926 if (TREE_CODE (expr) == COMPONENT_REF
927 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
928 access->non_addressable = 1;
929
930 return access;
931 }
932
933
934 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
935 register types or (recursively) records with only these two kinds of fields.
936 It also returns false if any of these records contains a bit-field. */
937
938 static bool
939 type_consists_of_records_p (tree type)
940 {
941 tree fld;
942
943 if (TREE_CODE (type) != RECORD_TYPE)
944 return false;
945
946 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
947 if (TREE_CODE (fld) == FIELD_DECL)
948 {
949 tree ft = TREE_TYPE (fld);
950
951 if (DECL_BIT_FIELD (fld))
952 return false;
953
954 if (!is_gimple_reg_type (ft)
955 && !type_consists_of_records_p (ft))
956 return false;
957 }
958
959 return true;
960 }
961
962 /* Create total_scalarization accesses for all scalar type fields in DECL that
963 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
964 must be the top-most VAR_DECL representing the variable, OFFSET must be the
965 offset of DECL within BASE. REF must be the memory reference expression for
966 the given decl. */
967
968 static void
969 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
970 tree ref)
971 {
972 tree fld, decl_type = TREE_TYPE (decl);
973
974 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
975 if (TREE_CODE (fld) == FIELD_DECL)
976 {
977 HOST_WIDE_INT pos = offset + int_bit_position (fld);
978 tree ft = TREE_TYPE (fld);
979 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
980 NULL_TREE);
981
982 if (is_gimple_reg_type (ft))
983 {
984 struct access *access;
985 HOST_WIDE_INT size;
986
987 size = tree_to_uhwi (DECL_SIZE (fld));
988 access = create_access_1 (base, pos, size);
989 access->expr = nref;
990 access->type = ft;
991 access->grp_total_scalarization = 1;
992 /* Accesses for intraprocedural SRA can have their stmt NULL. */
993 }
994 else
995 completely_scalarize_record (base, fld, pos, nref);
996 }
997 }
998
999 /* Create total_scalarization accesses for all scalar type fields in VAR and
1000 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
1001 type_consists_of_records_p. */
1002
1003 static void
1004 completely_scalarize_var (tree var)
1005 {
1006 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1007 struct access *access;
1008
1009 access = create_access_1 (var, 0, size);
1010 access->expr = var;
1011 access->type = TREE_TYPE (var);
1012 access->grp_total_scalarization = 1;
1013
1014 completely_scalarize_record (var, var, 0, var);
1015 }
1016
1017 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1018
1019 static inline bool
1020 contains_view_convert_expr_p (const_tree ref)
1021 {
1022 while (handled_component_p (ref))
1023 {
1024 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1025 return true;
1026 ref = TREE_OPERAND (ref, 0);
1027 }
1028
1029 return false;
1030 }
1031
1032 /* Search the given tree for a declaration by skipping handled components and
1033 exclude it from the candidates. */
1034
1035 static void
1036 disqualify_base_of_expr (tree t, const char *reason)
1037 {
1038 t = get_base_address (t);
1039 if (sra_mode == SRA_MODE_EARLY_IPA
1040 && TREE_CODE (t) == MEM_REF)
1041 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1042
1043 if (t && DECL_P (t))
1044 disqualify_candidate (t, reason);
1045 }
1046
1047 /* Scan expression EXPR and create access structures for all accesses to
1048 candidates for scalarization. Return the created access or NULL if none is
1049 created. */
1050
1051 static struct access *
1052 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1053 {
1054 struct access *ret = NULL;
1055 bool partial_ref;
1056
1057 if (TREE_CODE (expr) == BIT_FIELD_REF
1058 || TREE_CODE (expr) == IMAGPART_EXPR
1059 || TREE_CODE (expr) == REALPART_EXPR)
1060 {
1061 expr = TREE_OPERAND (expr, 0);
1062 partial_ref = true;
1063 }
1064 else
1065 partial_ref = false;
1066
1067 /* We need to dive through V_C_Es in order to get the size of its parameter
1068 and not the result type. Ada produces such statements. We are also
1069 capable of handling the topmost V_C_E but not any of those buried in other
1070 handled components. */
1071 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1072 expr = TREE_OPERAND (expr, 0);
1073
1074 if (contains_view_convert_expr_p (expr))
1075 {
1076 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1077 "component.");
1078 return NULL;
1079 }
1080 if (TREE_THIS_VOLATILE (expr))
1081 {
1082 disqualify_base_of_expr (expr, "part of a volatile reference.");
1083 return NULL;
1084 }
1085
1086 switch (TREE_CODE (expr))
1087 {
1088 case MEM_REF:
1089 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1090 && sra_mode != SRA_MODE_EARLY_IPA)
1091 return NULL;
1092 /* fall through */
1093 case VAR_DECL:
1094 case PARM_DECL:
1095 case RESULT_DECL:
1096 case COMPONENT_REF:
1097 case ARRAY_REF:
1098 case ARRAY_RANGE_REF:
1099 ret = create_access (expr, stmt, write);
1100 break;
1101
1102 default:
1103 break;
1104 }
1105
1106 if (write && partial_ref && ret)
1107 ret->grp_partial_lhs = 1;
1108
1109 return ret;
1110 }
1111
1112 /* Scan expression EXPR and create access structures for all accesses to
1113 candidates for scalarization. Return true if any access has been inserted.
1114 STMT must be the statement from which the expression is taken, WRITE must be
1115 true if the expression is a store and false otherwise. */
1116
1117 static bool
1118 build_access_from_expr (tree expr, gimple stmt, bool write)
1119 {
1120 struct access *access;
1121
1122 access = build_access_from_expr_1 (expr, stmt, write);
1123 if (access)
1124 {
1125 /* This means the aggregate is accesses as a whole in a way other than an
1126 assign statement and thus cannot be removed even if we had a scalar
1127 replacement for everything. */
1128 if (cannot_scalarize_away_bitmap)
1129 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1130 return true;
1131 }
1132 return false;
1133 }
1134
1135 /* Return the single non-EH successor edge of BB or NULL if there is none or
1136 more than one. */
1137
1138 static edge
1139 single_non_eh_succ (basic_block bb)
1140 {
1141 edge e, res = NULL;
1142 edge_iterator ei;
1143
1144 FOR_EACH_EDGE (e, ei, bb->succs)
1145 if (!(e->flags & EDGE_EH))
1146 {
1147 if (res)
1148 return NULL;
1149 res = e;
1150 }
1151
1152 return res;
1153 }
1154
1155 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1156 there is no alternative spot where to put statements SRA might need to
1157 generate after it. The spot we are looking for is an edge leading to a
1158 single non-EH successor, if it exists and is indeed single. RHS may be
1159 NULL, in that case ignore it. */
1160
1161 static bool
1162 disqualify_if_bad_bb_terminating_stmt (gimple stmt, tree lhs, tree rhs)
1163 {
1164 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1165 && stmt_ends_bb_p (stmt))
1166 {
1167 if (single_non_eh_succ (gimple_bb (stmt)))
1168 return false;
1169
1170 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1171 if (rhs)
1172 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1173 return true;
1174 }
1175 return false;
1176 }
1177
1178 /* Scan expressions occurring in STMT, create access structures for all accesses
1179 to candidates for scalarization and remove those candidates which occur in
1180 statements or expressions that prevent them from being split apart. Return
1181 true if any access has been inserted. */
1182
1183 static bool
1184 build_accesses_from_assign (gimple stmt)
1185 {
1186 tree lhs, rhs;
1187 struct access *lacc, *racc;
1188
1189 if (!gimple_assign_single_p (stmt)
1190 /* Scope clobbers don't influence scalarization. */
1191 || gimple_clobber_p (stmt))
1192 return false;
1193
1194 lhs = gimple_assign_lhs (stmt);
1195 rhs = gimple_assign_rhs1 (stmt);
1196
1197 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1198 return false;
1199
1200 racc = build_access_from_expr_1 (rhs, stmt, false);
1201 lacc = build_access_from_expr_1 (lhs, stmt, true);
1202
1203 if (lacc)
1204 lacc->grp_assignment_write = 1;
1205
1206 if (racc)
1207 {
1208 racc->grp_assignment_read = 1;
1209 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1210 && !is_gimple_reg_type (racc->type))
1211 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1212 }
1213
1214 if (lacc && racc
1215 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1216 && !lacc->grp_unscalarizable_region
1217 && !racc->grp_unscalarizable_region
1218 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1219 && lacc->size == racc->size
1220 && useless_type_conversion_p (lacc->type, racc->type))
1221 {
1222 struct assign_link *link;
1223
1224 link = (struct assign_link *) pool_alloc (link_pool);
1225 memset (link, 0, sizeof (struct assign_link));
1226
1227 link->lacc = lacc;
1228 link->racc = racc;
1229
1230 add_link_to_rhs (racc, link);
1231 }
1232
1233 return lacc || racc;
1234 }
1235
1236 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1237 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1238
1239 static bool
1240 asm_visit_addr (gimple, tree op, tree, void *)
1241 {
1242 op = get_base_address (op);
1243 if (op
1244 && DECL_P (op))
1245 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1246
1247 return false;
1248 }
1249
1250 /* Return true iff callsite CALL has at least as many actual arguments as there
1251 are formal parameters of the function currently processed by IPA-SRA and
1252 that their types match. */
1253
1254 static inline bool
1255 callsite_arguments_match_p (gimple call)
1256 {
1257 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1258 return false;
1259
1260 tree parm;
1261 int i;
1262 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1263 parm;
1264 parm = DECL_CHAIN (parm), i++)
1265 {
1266 tree arg = gimple_call_arg (call, i);
1267 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1268 return false;
1269 }
1270 return true;
1271 }
1272
1273 /* Scan function and look for interesting expressions and create access
1274 structures for them. Return true iff any access is created. */
1275
1276 static bool
1277 scan_function (void)
1278 {
1279 basic_block bb;
1280 bool ret = false;
1281
1282 FOR_EACH_BB_FN (bb, cfun)
1283 {
1284 gimple_stmt_iterator gsi;
1285 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1286 {
1287 gimple stmt = gsi_stmt (gsi);
1288 tree t;
1289 unsigned i;
1290
1291 if (final_bbs && stmt_can_throw_external (stmt))
1292 bitmap_set_bit (final_bbs, bb->index);
1293 switch (gimple_code (stmt))
1294 {
1295 case GIMPLE_RETURN:
1296 t = gimple_return_retval (stmt);
1297 if (t != NULL_TREE)
1298 ret |= build_access_from_expr (t, stmt, false);
1299 if (final_bbs)
1300 bitmap_set_bit (final_bbs, bb->index);
1301 break;
1302
1303 case GIMPLE_ASSIGN:
1304 ret |= build_accesses_from_assign (stmt);
1305 break;
1306
1307 case GIMPLE_CALL:
1308 for (i = 0; i < gimple_call_num_args (stmt); i++)
1309 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1310 stmt, false);
1311
1312 if (sra_mode == SRA_MODE_EARLY_IPA)
1313 {
1314 tree dest = gimple_call_fndecl (stmt);
1315 int flags = gimple_call_flags (stmt);
1316
1317 if (dest)
1318 {
1319 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1320 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1321 encountered_apply_args = true;
1322 if (recursive_call_p (current_function_decl, dest))
1323 {
1324 encountered_recursive_call = true;
1325 if (!callsite_arguments_match_p (stmt))
1326 encountered_unchangable_recursive_call = true;
1327 }
1328 }
1329
1330 if (final_bbs
1331 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1332 bitmap_set_bit (final_bbs, bb->index);
1333 }
1334
1335 t = gimple_call_lhs (stmt);
1336 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1337 ret |= build_access_from_expr (t, stmt, true);
1338 break;
1339
1340 case GIMPLE_ASM:
1341 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1342 asm_visit_addr);
1343 if (final_bbs)
1344 bitmap_set_bit (final_bbs, bb->index);
1345
1346 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1347 {
1348 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1349 ret |= build_access_from_expr (t, stmt, false);
1350 }
1351 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1352 {
1353 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1354 ret |= build_access_from_expr (t, stmt, true);
1355 }
1356 break;
1357
1358 default:
1359 break;
1360 }
1361 }
1362 }
1363
1364 return ret;
1365 }
1366
1367 /* Helper of QSORT function. There are pointers to accesses in the array. An
1368 access is considered smaller than another if it has smaller offset or if the
1369 offsets are the same but is size is bigger. */
1370
1371 static int
1372 compare_access_positions (const void *a, const void *b)
1373 {
1374 const access_p *fp1 = (const access_p *) a;
1375 const access_p *fp2 = (const access_p *) b;
1376 const access_p f1 = *fp1;
1377 const access_p f2 = *fp2;
1378
1379 if (f1->offset != f2->offset)
1380 return f1->offset < f2->offset ? -1 : 1;
1381
1382 if (f1->size == f2->size)
1383 {
1384 if (f1->type == f2->type)
1385 return 0;
1386 /* Put any non-aggregate type before any aggregate type. */
1387 else if (!is_gimple_reg_type (f1->type)
1388 && is_gimple_reg_type (f2->type))
1389 return 1;
1390 else if (is_gimple_reg_type (f1->type)
1391 && !is_gimple_reg_type (f2->type))
1392 return -1;
1393 /* Put any complex or vector type before any other scalar type. */
1394 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1395 && TREE_CODE (f1->type) != VECTOR_TYPE
1396 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1397 || TREE_CODE (f2->type) == VECTOR_TYPE))
1398 return 1;
1399 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1400 || TREE_CODE (f1->type) == VECTOR_TYPE)
1401 && TREE_CODE (f2->type) != COMPLEX_TYPE
1402 && TREE_CODE (f2->type) != VECTOR_TYPE)
1403 return -1;
1404 /* Put the integral type with the bigger precision first. */
1405 else if (INTEGRAL_TYPE_P (f1->type)
1406 && INTEGRAL_TYPE_P (f2->type))
1407 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1408 /* Put any integral type with non-full precision last. */
1409 else if (INTEGRAL_TYPE_P (f1->type)
1410 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1411 != TYPE_PRECISION (f1->type)))
1412 return 1;
1413 else if (INTEGRAL_TYPE_P (f2->type)
1414 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1415 != TYPE_PRECISION (f2->type)))
1416 return -1;
1417 /* Stabilize the sort. */
1418 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1419 }
1420
1421 /* We want the bigger accesses first, thus the opposite operator in the next
1422 line: */
1423 return f1->size > f2->size ? -1 : 1;
1424 }
1425
1426
1427 /* Append a name of the declaration to the name obstack. A helper function for
1428 make_fancy_name. */
1429
1430 static void
1431 make_fancy_decl_name (tree decl)
1432 {
1433 char buffer[32];
1434
1435 tree name = DECL_NAME (decl);
1436 if (name)
1437 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1438 IDENTIFIER_LENGTH (name));
1439 else
1440 {
1441 sprintf (buffer, "D%u", DECL_UID (decl));
1442 obstack_grow (&name_obstack, buffer, strlen (buffer));
1443 }
1444 }
1445
1446 /* Helper for make_fancy_name. */
1447
1448 static void
1449 make_fancy_name_1 (tree expr)
1450 {
1451 char buffer[32];
1452 tree index;
1453
1454 if (DECL_P (expr))
1455 {
1456 make_fancy_decl_name (expr);
1457 return;
1458 }
1459
1460 switch (TREE_CODE (expr))
1461 {
1462 case COMPONENT_REF:
1463 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1464 obstack_1grow (&name_obstack, '$');
1465 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1466 break;
1467
1468 case ARRAY_REF:
1469 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1470 obstack_1grow (&name_obstack, '$');
1471 /* Arrays with only one element may not have a constant as their
1472 index. */
1473 index = TREE_OPERAND (expr, 1);
1474 if (TREE_CODE (index) != INTEGER_CST)
1475 break;
1476 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1477 obstack_grow (&name_obstack, buffer, strlen (buffer));
1478 break;
1479
1480 case ADDR_EXPR:
1481 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1482 break;
1483
1484 case MEM_REF:
1485 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1486 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1487 {
1488 obstack_1grow (&name_obstack, '$');
1489 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1490 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1491 obstack_grow (&name_obstack, buffer, strlen (buffer));
1492 }
1493 break;
1494
1495 case BIT_FIELD_REF:
1496 case REALPART_EXPR:
1497 case IMAGPART_EXPR:
1498 gcc_unreachable (); /* we treat these as scalars. */
1499 break;
1500 default:
1501 break;
1502 }
1503 }
1504
1505 /* Create a human readable name for replacement variable of ACCESS. */
1506
1507 static char *
1508 make_fancy_name (tree expr)
1509 {
1510 make_fancy_name_1 (expr);
1511 obstack_1grow (&name_obstack, '\0');
1512 return XOBFINISH (&name_obstack, char *);
1513 }
1514
1515 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1516 EXP_TYPE at the given OFFSET. If BASE is something for which
1517 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1518 to insert new statements either before or below the current one as specified
1519 by INSERT_AFTER. This function is not capable of handling bitfields.
1520
1521 BASE must be either a declaration or a memory reference that has correct
1522 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1523
1524 tree
1525 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1526 tree exp_type, gimple_stmt_iterator *gsi,
1527 bool insert_after)
1528 {
1529 tree prev_base = base;
1530 tree off;
1531 tree mem_ref;
1532 HOST_WIDE_INT base_offset;
1533 unsigned HOST_WIDE_INT misalign;
1534 unsigned int align;
1535
1536 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1537 get_object_alignment_1 (base, &align, &misalign);
1538 base = get_addr_base_and_unit_offset (base, &base_offset);
1539
1540 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1541 offset such as array[var_index]. */
1542 if (!base)
1543 {
1544 gimple stmt;
1545 tree tmp, addr;
1546
1547 gcc_checking_assert (gsi);
1548 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1549 addr = build_fold_addr_expr (unshare_expr (prev_base));
1550 STRIP_USELESS_TYPE_CONVERSION (addr);
1551 stmt = gimple_build_assign (tmp, addr);
1552 gimple_set_location (stmt, loc);
1553 if (insert_after)
1554 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1555 else
1556 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1557
1558 off = build_int_cst (reference_alias_ptr_type (prev_base),
1559 offset / BITS_PER_UNIT);
1560 base = tmp;
1561 }
1562 else if (TREE_CODE (base) == MEM_REF)
1563 {
1564 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1565 base_offset + offset / BITS_PER_UNIT);
1566 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1567 base = unshare_expr (TREE_OPERAND (base, 0));
1568 }
1569 else
1570 {
1571 off = build_int_cst (reference_alias_ptr_type (base),
1572 base_offset + offset / BITS_PER_UNIT);
1573 base = build_fold_addr_expr (unshare_expr (base));
1574 }
1575
1576 misalign = (misalign + offset) & (align - 1);
1577 if (misalign != 0)
1578 align = (misalign & -misalign);
1579 if (align < TYPE_ALIGN (exp_type))
1580 exp_type = build_aligned_type (exp_type, align);
1581
1582 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1583 if (TREE_THIS_VOLATILE (prev_base))
1584 TREE_THIS_VOLATILE (mem_ref) = 1;
1585 if (TREE_SIDE_EFFECTS (prev_base))
1586 TREE_SIDE_EFFECTS (mem_ref) = 1;
1587 return mem_ref;
1588 }
1589
1590 /* Construct a memory reference to a part of an aggregate BASE at the given
1591 OFFSET and of the same type as MODEL. In case this is a reference to a
1592 bit-field, the function will replicate the last component_ref of model's
1593 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1594 build_ref_for_offset. */
1595
1596 static tree
1597 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1598 struct access *model, gimple_stmt_iterator *gsi,
1599 bool insert_after)
1600 {
1601 if (TREE_CODE (model->expr) == COMPONENT_REF
1602 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1603 {
1604 /* This access represents a bit-field. */
1605 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1606
1607 offset -= int_bit_position (fld);
1608 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1609 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1610 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1611 NULL_TREE);
1612 }
1613 else
1614 return build_ref_for_offset (loc, base, offset, model->type,
1615 gsi, insert_after);
1616 }
1617
1618 /* Attempt to build a memory reference that we could but into a gimple
1619 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1620 create statements and return s NULL instead. This function also ignores
1621 alignment issues and so its results should never end up in non-debug
1622 statements. */
1623
1624 static tree
1625 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1626 struct access *model)
1627 {
1628 HOST_WIDE_INT base_offset;
1629 tree off;
1630
1631 if (TREE_CODE (model->expr) == COMPONENT_REF
1632 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1633 return NULL_TREE;
1634
1635 base = get_addr_base_and_unit_offset (base, &base_offset);
1636 if (!base)
1637 return NULL_TREE;
1638 if (TREE_CODE (base) == MEM_REF)
1639 {
1640 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1641 base_offset + offset / BITS_PER_UNIT);
1642 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1643 base = unshare_expr (TREE_OPERAND (base, 0));
1644 }
1645 else
1646 {
1647 off = build_int_cst (reference_alias_ptr_type (base),
1648 base_offset + offset / BITS_PER_UNIT);
1649 base = build_fold_addr_expr (unshare_expr (base));
1650 }
1651
1652 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1653 }
1654
1655 /* Construct a memory reference consisting of component_refs and array_refs to
1656 a part of an aggregate *RES (which is of type TYPE). The requested part
1657 should have type EXP_TYPE at be the given OFFSET. This function might not
1658 succeed, it returns true when it does and only then *RES points to something
1659 meaningful. This function should be used only to build expressions that we
1660 might need to present to user (e.g. in warnings). In all other situations,
1661 build_ref_for_model or build_ref_for_offset should be used instead. */
1662
1663 static bool
1664 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1665 tree exp_type)
1666 {
1667 while (1)
1668 {
1669 tree fld;
1670 tree tr_size, index, minidx;
1671 HOST_WIDE_INT el_size;
1672
1673 if (offset == 0 && exp_type
1674 && types_compatible_p (exp_type, type))
1675 return true;
1676
1677 switch (TREE_CODE (type))
1678 {
1679 case UNION_TYPE:
1680 case QUAL_UNION_TYPE:
1681 case RECORD_TYPE:
1682 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1683 {
1684 HOST_WIDE_INT pos, size;
1685 tree tr_pos, expr, *expr_ptr;
1686
1687 if (TREE_CODE (fld) != FIELD_DECL)
1688 continue;
1689
1690 tr_pos = bit_position (fld);
1691 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1692 continue;
1693 pos = tree_to_uhwi (tr_pos);
1694 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1695 tr_size = DECL_SIZE (fld);
1696 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1697 continue;
1698 size = tree_to_uhwi (tr_size);
1699 if (size == 0)
1700 {
1701 if (pos != offset)
1702 continue;
1703 }
1704 else if (pos > offset || (pos + size) <= offset)
1705 continue;
1706
1707 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1708 NULL_TREE);
1709 expr_ptr = &expr;
1710 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1711 offset - pos, exp_type))
1712 {
1713 *res = expr;
1714 return true;
1715 }
1716 }
1717 return false;
1718
1719 case ARRAY_TYPE:
1720 tr_size = TYPE_SIZE (TREE_TYPE (type));
1721 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1722 return false;
1723 el_size = tree_to_uhwi (tr_size);
1724
1725 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1726 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1727 return false;
1728 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1729 if (!integer_zerop (minidx))
1730 index = int_const_binop (PLUS_EXPR, index, minidx);
1731 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1732 NULL_TREE, NULL_TREE);
1733 offset = offset % el_size;
1734 type = TREE_TYPE (type);
1735 break;
1736
1737 default:
1738 if (offset != 0)
1739 return false;
1740
1741 if (exp_type)
1742 return false;
1743 else
1744 return true;
1745 }
1746 }
1747 }
1748
1749 /* Return true iff TYPE is stdarg va_list type. */
1750
1751 static inline bool
1752 is_va_list_type (tree type)
1753 {
1754 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1755 }
1756
1757 /* Print message to dump file why a variable was rejected. */
1758
1759 static void
1760 reject (tree var, const char *msg)
1761 {
1762 if (dump_file && (dump_flags & TDF_DETAILS))
1763 {
1764 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1765 print_generic_expr (dump_file, var, 0);
1766 fprintf (dump_file, "\n");
1767 }
1768 }
1769
1770 /* Return true if VAR is a candidate for SRA. */
1771
1772 static bool
1773 maybe_add_sra_candidate (tree var)
1774 {
1775 tree type = TREE_TYPE (var);
1776 const char *msg;
1777 tree_node **slot;
1778
1779 if (!AGGREGATE_TYPE_P (type))
1780 {
1781 reject (var, "not aggregate");
1782 return false;
1783 }
1784 if (needs_to_live_in_memory (var))
1785 {
1786 reject (var, "needs to live in memory");
1787 return false;
1788 }
1789 if (TREE_THIS_VOLATILE (var))
1790 {
1791 reject (var, "is volatile");
1792 return false;
1793 }
1794 if (!COMPLETE_TYPE_P (type))
1795 {
1796 reject (var, "has incomplete type");
1797 return false;
1798 }
1799 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1800 {
1801 reject (var, "type size not fixed");
1802 return false;
1803 }
1804 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1805 {
1806 reject (var, "type size is zero");
1807 return false;
1808 }
1809 if (type_internals_preclude_sra_p (type, &msg))
1810 {
1811 reject (var, msg);
1812 return false;
1813 }
1814 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1815 we also want to schedule it rather late. Thus we ignore it in
1816 the early pass. */
1817 (sra_mode == SRA_MODE_EARLY_INTRA
1818 && is_va_list_type (type)))
1819 {
1820 reject (var, "is va_list");
1821 return false;
1822 }
1823
1824 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1825 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1826 *slot = var;
1827
1828 if (dump_file && (dump_flags & TDF_DETAILS))
1829 {
1830 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1831 print_generic_expr (dump_file, var, 0);
1832 fprintf (dump_file, "\n");
1833 }
1834
1835 return true;
1836 }
1837
1838 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1839 those with type which is suitable for scalarization. */
1840
1841 static bool
1842 find_var_candidates (void)
1843 {
1844 tree var, parm;
1845 unsigned int i;
1846 bool ret = false;
1847
1848 for (parm = DECL_ARGUMENTS (current_function_decl);
1849 parm;
1850 parm = DECL_CHAIN (parm))
1851 ret |= maybe_add_sra_candidate (parm);
1852
1853 FOR_EACH_LOCAL_DECL (cfun, i, var)
1854 {
1855 if (TREE_CODE (var) != VAR_DECL)
1856 continue;
1857
1858 ret |= maybe_add_sra_candidate (var);
1859 }
1860
1861 return ret;
1862 }
1863
1864 /* Sort all accesses for the given variable, check for partial overlaps and
1865 return NULL if there are any. If there are none, pick a representative for
1866 each combination of offset and size and create a linked list out of them.
1867 Return the pointer to the first representative and make sure it is the first
1868 one in the vector of accesses. */
1869
1870 static struct access *
1871 sort_and_splice_var_accesses (tree var)
1872 {
1873 int i, j, access_count;
1874 struct access *res, **prev_acc_ptr = &res;
1875 vec<access_p> *access_vec;
1876 bool first = true;
1877 HOST_WIDE_INT low = -1, high = 0;
1878
1879 access_vec = get_base_access_vector (var);
1880 if (!access_vec)
1881 return NULL;
1882 access_count = access_vec->length ();
1883
1884 /* Sort by <OFFSET, SIZE>. */
1885 access_vec->qsort (compare_access_positions);
1886
1887 i = 0;
1888 while (i < access_count)
1889 {
1890 struct access *access = (*access_vec)[i];
1891 bool grp_write = access->write;
1892 bool grp_read = !access->write;
1893 bool grp_scalar_write = access->write
1894 && is_gimple_reg_type (access->type);
1895 bool grp_scalar_read = !access->write
1896 && is_gimple_reg_type (access->type);
1897 bool grp_assignment_read = access->grp_assignment_read;
1898 bool grp_assignment_write = access->grp_assignment_write;
1899 bool multiple_scalar_reads = false;
1900 bool total_scalarization = access->grp_total_scalarization;
1901 bool grp_partial_lhs = access->grp_partial_lhs;
1902 bool first_scalar = is_gimple_reg_type (access->type);
1903 bool unscalarizable_region = access->grp_unscalarizable_region;
1904
1905 if (first || access->offset >= high)
1906 {
1907 first = false;
1908 low = access->offset;
1909 high = access->offset + access->size;
1910 }
1911 else if (access->offset > low && access->offset + access->size > high)
1912 return NULL;
1913 else
1914 gcc_assert (access->offset >= low
1915 && access->offset + access->size <= high);
1916
1917 j = i + 1;
1918 while (j < access_count)
1919 {
1920 struct access *ac2 = (*access_vec)[j];
1921 if (ac2->offset != access->offset || ac2->size != access->size)
1922 break;
1923 if (ac2->write)
1924 {
1925 grp_write = true;
1926 grp_scalar_write = (grp_scalar_write
1927 || is_gimple_reg_type (ac2->type));
1928 }
1929 else
1930 {
1931 grp_read = true;
1932 if (is_gimple_reg_type (ac2->type))
1933 {
1934 if (grp_scalar_read)
1935 multiple_scalar_reads = true;
1936 else
1937 grp_scalar_read = true;
1938 }
1939 }
1940 grp_assignment_read |= ac2->grp_assignment_read;
1941 grp_assignment_write |= ac2->grp_assignment_write;
1942 grp_partial_lhs |= ac2->grp_partial_lhs;
1943 unscalarizable_region |= ac2->grp_unscalarizable_region;
1944 total_scalarization |= ac2->grp_total_scalarization;
1945 relink_to_new_repr (access, ac2);
1946
1947 /* If there are both aggregate-type and scalar-type accesses with
1948 this combination of size and offset, the comparison function
1949 should have put the scalars first. */
1950 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1951 ac2->group_representative = access;
1952 j++;
1953 }
1954
1955 i = j;
1956
1957 access->group_representative = access;
1958 access->grp_write = grp_write;
1959 access->grp_read = grp_read;
1960 access->grp_scalar_read = grp_scalar_read;
1961 access->grp_scalar_write = grp_scalar_write;
1962 access->grp_assignment_read = grp_assignment_read;
1963 access->grp_assignment_write = grp_assignment_write;
1964 access->grp_hint = multiple_scalar_reads || total_scalarization;
1965 access->grp_total_scalarization = total_scalarization;
1966 access->grp_partial_lhs = grp_partial_lhs;
1967 access->grp_unscalarizable_region = unscalarizable_region;
1968 if (access->first_link)
1969 add_access_to_work_queue (access);
1970
1971 *prev_acc_ptr = access;
1972 prev_acc_ptr = &access->next_grp;
1973 }
1974
1975 gcc_assert (res == (*access_vec)[0]);
1976 return res;
1977 }
1978
1979 /* Create a variable for the given ACCESS which determines the type, name and a
1980 few other properties. Return the variable declaration and store it also to
1981 ACCESS->replacement. */
1982
1983 static tree
1984 create_access_replacement (struct access *access)
1985 {
1986 tree repl;
1987
1988 if (access->grp_to_be_debug_replaced)
1989 {
1990 repl = create_tmp_var_raw (access->type, NULL);
1991 DECL_CONTEXT (repl) = current_function_decl;
1992 }
1993 else
1994 repl = create_tmp_var (access->type, "SR");
1995 if (TREE_CODE (access->type) == COMPLEX_TYPE
1996 || TREE_CODE (access->type) == VECTOR_TYPE)
1997 {
1998 if (!access->grp_partial_lhs)
1999 DECL_GIMPLE_REG_P (repl) = 1;
2000 }
2001 else if (access->grp_partial_lhs
2002 && is_gimple_reg_type (access->type))
2003 TREE_ADDRESSABLE (repl) = 1;
2004
2005 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2006 DECL_ARTIFICIAL (repl) = 1;
2007 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2008
2009 if (DECL_NAME (access->base)
2010 && !DECL_IGNORED_P (access->base)
2011 && !DECL_ARTIFICIAL (access->base))
2012 {
2013 char *pretty_name = make_fancy_name (access->expr);
2014 tree debug_expr = unshare_expr_without_location (access->expr), d;
2015 bool fail = false;
2016
2017 DECL_NAME (repl) = get_identifier (pretty_name);
2018 obstack_free (&name_obstack, pretty_name);
2019
2020 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2021 as DECL_DEBUG_EXPR isn't considered when looking for still
2022 used SSA_NAMEs and thus they could be freed. All debug info
2023 generation cares is whether something is constant or variable
2024 and that get_ref_base_and_extent works properly on the
2025 expression. It cannot handle accesses at a non-constant offset
2026 though, so just give up in those cases. */
2027 for (d = debug_expr;
2028 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2029 d = TREE_OPERAND (d, 0))
2030 switch (TREE_CODE (d))
2031 {
2032 case ARRAY_REF:
2033 case ARRAY_RANGE_REF:
2034 if (TREE_OPERAND (d, 1)
2035 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2036 fail = true;
2037 if (TREE_OPERAND (d, 3)
2038 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2039 fail = true;
2040 /* FALLTHRU */
2041 case COMPONENT_REF:
2042 if (TREE_OPERAND (d, 2)
2043 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2044 fail = true;
2045 break;
2046 case MEM_REF:
2047 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2048 fail = true;
2049 else
2050 d = TREE_OPERAND (d, 0);
2051 break;
2052 default:
2053 break;
2054 }
2055 if (!fail)
2056 {
2057 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2058 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2059 }
2060 if (access->grp_no_warning)
2061 TREE_NO_WARNING (repl) = 1;
2062 else
2063 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2064 }
2065 else
2066 TREE_NO_WARNING (repl) = 1;
2067
2068 if (dump_file)
2069 {
2070 if (access->grp_to_be_debug_replaced)
2071 {
2072 fprintf (dump_file, "Created a debug-only replacement for ");
2073 print_generic_expr (dump_file, access->base, 0);
2074 fprintf (dump_file, " offset: %u, size: %u\n",
2075 (unsigned) access->offset, (unsigned) access->size);
2076 }
2077 else
2078 {
2079 fprintf (dump_file, "Created a replacement for ");
2080 print_generic_expr (dump_file, access->base, 0);
2081 fprintf (dump_file, " offset: %u, size: %u: ",
2082 (unsigned) access->offset, (unsigned) access->size);
2083 print_generic_expr (dump_file, repl, 0);
2084 fprintf (dump_file, "\n");
2085 }
2086 }
2087 sra_stats.replacements++;
2088
2089 return repl;
2090 }
2091
2092 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2093
2094 static inline tree
2095 get_access_replacement (struct access *access)
2096 {
2097 gcc_checking_assert (access->replacement_decl);
2098 return access->replacement_decl;
2099 }
2100
2101
2102 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2103 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2104 to it is not "within" the root. Return false iff some accesses partially
2105 overlap. */
2106
2107 static bool
2108 build_access_subtree (struct access **access)
2109 {
2110 struct access *root = *access, *last_child = NULL;
2111 HOST_WIDE_INT limit = root->offset + root->size;
2112
2113 *access = (*access)->next_grp;
2114 while (*access && (*access)->offset + (*access)->size <= limit)
2115 {
2116 if (!last_child)
2117 root->first_child = *access;
2118 else
2119 last_child->next_sibling = *access;
2120 last_child = *access;
2121
2122 if (!build_access_subtree (access))
2123 return false;
2124 }
2125
2126 if (*access && (*access)->offset < limit)
2127 return false;
2128
2129 return true;
2130 }
2131
2132 /* Build a tree of access representatives, ACCESS is the pointer to the first
2133 one, others are linked in a list by the next_grp field. Return false iff
2134 some accesses partially overlap. */
2135
2136 static bool
2137 build_access_trees (struct access *access)
2138 {
2139 while (access)
2140 {
2141 struct access *root = access;
2142
2143 if (!build_access_subtree (&access))
2144 return false;
2145 root->next_grp = access;
2146 }
2147 return true;
2148 }
2149
2150 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2151 array. */
2152
2153 static bool
2154 expr_with_var_bounded_array_refs_p (tree expr)
2155 {
2156 while (handled_component_p (expr))
2157 {
2158 if (TREE_CODE (expr) == ARRAY_REF
2159 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2160 return true;
2161 expr = TREE_OPERAND (expr, 0);
2162 }
2163 return false;
2164 }
2165
2166 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2167 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2168 sorts of access flags appropriately along the way, notably always set
2169 grp_read and grp_assign_read according to MARK_READ and grp_write when
2170 MARK_WRITE is true.
2171
2172 Creating a replacement for a scalar access is considered beneficial if its
2173 grp_hint is set (this means we are either attempting total scalarization or
2174 there is more than one direct read access) or according to the following
2175 table:
2176
2177 Access written to through a scalar type (once or more times)
2178 |
2179 | Written to in an assignment statement
2180 | |
2181 | | Access read as scalar _once_
2182 | | |
2183 | | | Read in an assignment statement
2184 | | | |
2185 | | | | Scalarize Comment
2186 -----------------------------------------------------------------------------
2187 0 0 0 0 No access for the scalar
2188 0 0 0 1 No access for the scalar
2189 0 0 1 0 No Single read - won't help
2190 0 0 1 1 No The same case
2191 0 1 0 0 No access for the scalar
2192 0 1 0 1 No access for the scalar
2193 0 1 1 0 Yes s = *g; return s.i;
2194 0 1 1 1 Yes The same case as above
2195 1 0 0 0 No Won't help
2196 1 0 0 1 Yes s.i = 1; *g = s;
2197 1 0 1 0 Yes s.i = 5; g = s.i;
2198 1 0 1 1 Yes The same case as above
2199 1 1 0 0 No Won't help.
2200 1 1 0 1 Yes s.i = 1; *g = s;
2201 1 1 1 0 Yes s = *g; return s.i;
2202 1 1 1 1 Yes Any of the above yeses */
2203
2204 static bool
2205 analyze_access_subtree (struct access *root, struct access *parent,
2206 bool allow_replacements)
2207 {
2208 struct access *child;
2209 HOST_WIDE_INT limit = root->offset + root->size;
2210 HOST_WIDE_INT covered_to = root->offset;
2211 bool scalar = is_gimple_reg_type (root->type);
2212 bool hole = false, sth_created = false;
2213
2214 if (parent)
2215 {
2216 if (parent->grp_read)
2217 root->grp_read = 1;
2218 if (parent->grp_assignment_read)
2219 root->grp_assignment_read = 1;
2220 if (parent->grp_write)
2221 root->grp_write = 1;
2222 if (parent->grp_assignment_write)
2223 root->grp_assignment_write = 1;
2224 if (parent->grp_total_scalarization)
2225 root->grp_total_scalarization = 1;
2226 }
2227
2228 if (root->grp_unscalarizable_region)
2229 allow_replacements = false;
2230
2231 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2232 allow_replacements = false;
2233
2234 for (child = root->first_child; child; child = child->next_sibling)
2235 {
2236 hole |= covered_to < child->offset;
2237 sth_created |= analyze_access_subtree (child, root,
2238 allow_replacements && !scalar);
2239
2240 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2241 root->grp_total_scalarization &= child->grp_total_scalarization;
2242 if (child->grp_covered)
2243 covered_to += child->size;
2244 else
2245 hole = true;
2246 }
2247
2248 if (allow_replacements && scalar && !root->first_child
2249 && (root->grp_hint
2250 || ((root->grp_scalar_read || root->grp_assignment_read)
2251 && (root->grp_scalar_write || root->grp_assignment_write))))
2252 {
2253 /* Always create access replacements that cover the whole access.
2254 For integral types this means the precision has to match.
2255 Avoid assumptions based on the integral type kind, too. */
2256 if (INTEGRAL_TYPE_P (root->type)
2257 && (TREE_CODE (root->type) != INTEGER_TYPE
2258 || TYPE_PRECISION (root->type) != root->size)
2259 /* But leave bitfield accesses alone. */
2260 && (TREE_CODE (root->expr) != COMPONENT_REF
2261 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2262 {
2263 tree rt = root->type;
2264 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2265 && (root->size % BITS_PER_UNIT) == 0);
2266 root->type = build_nonstandard_integer_type (root->size,
2267 TYPE_UNSIGNED (rt));
2268 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2269 root->base, root->offset,
2270 root->type, NULL, false);
2271
2272 if (dump_file && (dump_flags & TDF_DETAILS))
2273 {
2274 fprintf (dump_file, "Changing the type of a replacement for ");
2275 print_generic_expr (dump_file, root->base, 0);
2276 fprintf (dump_file, " offset: %u, size: %u ",
2277 (unsigned) root->offset, (unsigned) root->size);
2278 fprintf (dump_file, " to an integer.\n");
2279 }
2280 }
2281
2282 root->grp_to_be_replaced = 1;
2283 root->replacement_decl = create_access_replacement (root);
2284 sth_created = true;
2285 hole = false;
2286 }
2287 else
2288 {
2289 if (allow_replacements
2290 && scalar && !root->first_child
2291 && (root->grp_scalar_write || root->grp_assignment_write)
2292 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2293 DECL_UID (root->base)))
2294 {
2295 gcc_checking_assert (!root->grp_scalar_read
2296 && !root->grp_assignment_read);
2297 sth_created = true;
2298 if (MAY_HAVE_DEBUG_STMTS)
2299 {
2300 root->grp_to_be_debug_replaced = 1;
2301 root->replacement_decl = create_access_replacement (root);
2302 }
2303 }
2304
2305 if (covered_to < limit)
2306 hole = true;
2307 if (scalar)
2308 root->grp_total_scalarization = 0;
2309 }
2310
2311 if (!hole || root->grp_total_scalarization)
2312 root->grp_covered = 1;
2313 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2314 root->grp_unscalarized_data = 1; /* not covered and written to */
2315 return sth_created;
2316 }
2317
2318 /* Analyze all access trees linked by next_grp by the means of
2319 analyze_access_subtree. */
2320 static bool
2321 analyze_access_trees (struct access *access)
2322 {
2323 bool ret = false;
2324
2325 while (access)
2326 {
2327 if (analyze_access_subtree (access, NULL, true))
2328 ret = true;
2329 access = access->next_grp;
2330 }
2331
2332 return ret;
2333 }
2334
2335 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2336 SIZE would conflict with an already existing one. If exactly such a child
2337 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2338
2339 static bool
2340 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2341 HOST_WIDE_INT size, struct access **exact_match)
2342 {
2343 struct access *child;
2344
2345 for (child = lacc->first_child; child; child = child->next_sibling)
2346 {
2347 if (child->offset == norm_offset && child->size == size)
2348 {
2349 *exact_match = child;
2350 return true;
2351 }
2352
2353 if (child->offset < norm_offset + size
2354 && child->offset + child->size > norm_offset)
2355 return true;
2356 }
2357
2358 return false;
2359 }
2360
2361 /* Create a new child access of PARENT, with all properties just like MODEL
2362 except for its offset and with its grp_write false and grp_read true.
2363 Return the new access or NULL if it cannot be created. Note that this access
2364 is created long after all splicing and sorting, it's not located in any
2365 access vector and is automatically a representative of its group. */
2366
2367 static struct access *
2368 create_artificial_child_access (struct access *parent, struct access *model,
2369 HOST_WIDE_INT new_offset)
2370 {
2371 struct access *access;
2372 struct access **child;
2373 tree expr = parent->base;
2374
2375 gcc_assert (!model->grp_unscalarizable_region);
2376
2377 access = (struct access *) pool_alloc (access_pool);
2378 memset (access, 0, sizeof (struct access));
2379 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2380 model->type))
2381 {
2382 access->grp_no_warning = true;
2383 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2384 new_offset, model, NULL, false);
2385 }
2386
2387 access->base = parent->base;
2388 access->expr = expr;
2389 access->offset = new_offset;
2390 access->size = model->size;
2391 access->type = model->type;
2392 access->grp_write = true;
2393 access->grp_read = false;
2394
2395 child = &parent->first_child;
2396 while (*child && (*child)->offset < new_offset)
2397 child = &(*child)->next_sibling;
2398
2399 access->next_sibling = *child;
2400 *child = access;
2401
2402 return access;
2403 }
2404
2405
2406 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2407 true if any new subaccess was created. Additionally, if RACC is a scalar
2408 access but LACC is not, change the type of the latter, if possible. */
2409
2410 static bool
2411 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2412 {
2413 struct access *rchild;
2414 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2415 bool ret = false;
2416
2417 if (is_gimple_reg_type (lacc->type)
2418 || lacc->grp_unscalarizable_region
2419 || racc->grp_unscalarizable_region)
2420 return false;
2421
2422 if (is_gimple_reg_type (racc->type))
2423 {
2424 if (!lacc->first_child && !racc->first_child)
2425 {
2426 tree t = lacc->base;
2427
2428 lacc->type = racc->type;
2429 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2430 lacc->offset, racc->type))
2431 lacc->expr = t;
2432 else
2433 {
2434 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2435 lacc->base, lacc->offset,
2436 racc, NULL, false);
2437 lacc->grp_no_warning = true;
2438 }
2439 }
2440 return false;
2441 }
2442
2443 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2444 {
2445 struct access *new_acc = NULL;
2446 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2447
2448 if (rchild->grp_unscalarizable_region)
2449 continue;
2450
2451 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2452 &new_acc))
2453 {
2454 if (new_acc)
2455 {
2456 rchild->grp_hint = 1;
2457 new_acc->grp_hint |= new_acc->grp_read;
2458 if (rchild->first_child)
2459 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2460 }
2461 continue;
2462 }
2463
2464 rchild->grp_hint = 1;
2465 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2466 if (new_acc)
2467 {
2468 ret = true;
2469 if (racc->first_child)
2470 propagate_subaccesses_across_link (new_acc, rchild);
2471 }
2472 }
2473
2474 return ret;
2475 }
2476
2477 /* Propagate all subaccesses across assignment links. */
2478
2479 static void
2480 propagate_all_subaccesses (void)
2481 {
2482 while (work_queue_head)
2483 {
2484 struct access *racc = pop_access_from_work_queue ();
2485 struct assign_link *link;
2486
2487 gcc_assert (racc->first_link);
2488
2489 for (link = racc->first_link; link; link = link->next)
2490 {
2491 struct access *lacc = link->lacc;
2492
2493 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2494 continue;
2495 lacc = lacc->group_representative;
2496 if (propagate_subaccesses_across_link (lacc, racc)
2497 && lacc->first_link)
2498 add_access_to_work_queue (lacc);
2499 }
2500 }
2501 }
2502
2503 /* Go through all accesses collected throughout the (intraprocedural) analysis
2504 stage, exclude overlapping ones, identify representatives and build trees
2505 out of them, making decisions about scalarization on the way. Return true
2506 iff there are any to-be-scalarized variables after this stage. */
2507
2508 static bool
2509 analyze_all_variable_accesses (void)
2510 {
2511 int res = 0;
2512 bitmap tmp = BITMAP_ALLOC (NULL);
2513 bitmap_iterator bi;
2514 unsigned i;
2515 unsigned max_scalarization_size
2516 = (optimize_function_for_size_p (cfun)
2517 ? PARAM_VALUE (PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE)
2518 : PARAM_VALUE (PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED))
2519 * BITS_PER_UNIT;
2520
2521 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2522 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2523 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2524 {
2525 tree var = candidate (i);
2526
2527 if (TREE_CODE (var) == VAR_DECL
2528 && type_consists_of_records_p (TREE_TYPE (var)))
2529 {
2530 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2531 <= max_scalarization_size)
2532 {
2533 completely_scalarize_var (var);
2534 if (dump_file && (dump_flags & TDF_DETAILS))
2535 {
2536 fprintf (dump_file, "Will attempt to totally scalarize ");
2537 print_generic_expr (dump_file, var, 0);
2538 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2539 }
2540 }
2541 else if (dump_file && (dump_flags & TDF_DETAILS))
2542 {
2543 fprintf (dump_file, "Too big to totally scalarize: ");
2544 print_generic_expr (dump_file, var, 0);
2545 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2546 }
2547 }
2548 }
2549
2550 bitmap_copy (tmp, candidate_bitmap);
2551 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2552 {
2553 tree var = candidate (i);
2554 struct access *access;
2555
2556 access = sort_and_splice_var_accesses (var);
2557 if (!access || !build_access_trees (access))
2558 disqualify_candidate (var,
2559 "No or inhibitingly overlapping accesses.");
2560 }
2561
2562 propagate_all_subaccesses ();
2563
2564 bitmap_copy (tmp, candidate_bitmap);
2565 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2566 {
2567 tree var = candidate (i);
2568 struct access *access = get_first_repr_for_decl (var);
2569
2570 if (analyze_access_trees (access))
2571 {
2572 res++;
2573 if (dump_file && (dump_flags & TDF_DETAILS))
2574 {
2575 fprintf (dump_file, "\nAccess trees for ");
2576 print_generic_expr (dump_file, var, 0);
2577 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2578 dump_access_tree (dump_file, access);
2579 fprintf (dump_file, "\n");
2580 }
2581 }
2582 else
2583 disqualify_candidate (var, "No scalar replacements to be created.");
2584 }
2585
2586 BITMAP_FREE (tmp);
2587
2588 if (res)
2589 {
2590 statistics_counter_event (cfun, "Scalarized aggregates", res);
2591 return true;
2592 }
2593 else
2594 return false;
2595 }
2596
2597 /* Generate statements copying scalar replacements of accesses within a subtree
2598 into or out of AGG. ACCESS, all its children, siblings and their children
2599 are to be processed. AGG is an aggregate type expression (can be a
2600 declaration but does not have to be, it can for example also be a mem_ref or
2601 a series of handled components). TOP_OFFSET is the offset of the processed
2602 subtree which has to be subtracted from offsets of individual accesses to
2603 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2604 replacements in the interval <start_offset, start_offset + chunk_size>,
2605 otherwise copy all. GSI is a statement iterator used to place the new
2606 statements. WRITE should be true when the statements should write from AGG
2607 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2608 statements will be added after the current statement in GSI, they will be
2609 added before the statement otherwise. */
2610
2611 static void
2612 generate_subtree_copies (struct access *access, tree agg,
2613 HOST_WIDE_INT top_offset,
2614 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2615 gimple_stmt_iterator *gsi, bool write,
2616 bool insert_after, location_t loc)
2617 {
2618 do
2619 {
2620 if (chunk_size && access->offset >= start_offset + chunk_size)
2621 return;
2622
2623 if (access->grp_to_be_replaced
2624 && (chunk_size == 0
2625 || access->offset + access->size > start_offset))
2626 {
2627 tree expr, repl = get_access_replacement (access);
2628 gimple stmt;
2629
2630 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2631 access, gsi, insert_after);
2632
2633 if (write)
2634 {
2635 if (access->grp_partial_lhs)
2636 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2637 !insert_after,
2638 insert_after ? GSI_NEW_STMT
2639 : GSI_SAME_STMT);
2640 stmt = gimple_build_assign (repl, expr);
2641 }
2642 else
2643 {
2644 TREE_NO_WARNING (repl) = 1;
2645 if (access->grp_partial_lhs)
2646 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2647 !insert_after,
2648 insert_after ? GSI_NEW_STMT
2649 : GSI_SAME_STMT);
2650 stmt = gimple_build_assign (expr, repl);
2651 }
2652 gimple_set_location (stmt, loc);
2653
2654 if (insert_after)
2655 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2656 else
2657 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2658 update_stmt (stmt);
2659 sra_stats.subtree_copies++;
2660 }
2661 else if (write
2662 && access->grp_to_be_debug_replaced
2663 && (chunk_size == 0
2664 || access->offset + access->size > start_offset))
2665 {
2666 gimple ds;
2667 tree drhs = build_debug_ref_for_model (loc, agg,
2668 access->offset - top_offset,
2669 access);
2670 ds = gimple_build_debug_bind (get_access_replacement (access),
2671 drhs, gsi_stmt (*gsi));
2672 if (insert_after)
2673 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2674 else
2675 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2676 }
2677
2678 if (access->first_child)
2679 generate_subtree_copies (access->first_child, agg, top_offset,
2680 start_offset, chunk_size, gsi,
2681 write, insert_after, loc);
2682
2683 access = access->next_sibling;
2684 }
2685 while (access);
2686 }
2687
2688 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2689 the root of the subtree to be processed. GSI is the statement iterator used
2690 for inserting statements which are added after the current statement if
2691 INSERT_AFTER is true or before it otherwise. */
2692
2693 static void
2694 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2695 bool insert_after, location_t loc)
2696
2697 {
2698 struct access *child;
2699
2700 if (access->grp_to_be_replaced)
2701 {
2702 gimple stmt;
2703
2704 stmt = gimple_build_assign (get_access_replacement (access),
2705 build_zero_cst (access->type));
2706 if (insert_after)
2707 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2708 else
2709 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2710 update_stmt (stmt);
2711 gimple_set_location (stmt, loc);
2712 }
2713 else if (access->grp_to_be_debug_replaced)
2714 {
2715 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2716 build_zero_cst (access->type),
2717 gsi_stmt (*gsi));
2718 if (insert_after)
2719 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2720 else
2721 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2722 }
2723
2724 for (child = access->first_child; child; child = child->next_sibling)
2725 init_subtree_with_zero (child, gsi, insert_after, loc);
2726 }
2727
2728 /* Search for an access representative for the given expression EXPR and
2729 return it or NULL if it cannot be found. */
2730
2731 static struct access *
2732 get_access_for_expr (tree expr)
2733 {
2734 HOST_WIDE_INT offset, size, max_size;
2735 tree base;
2736
2737 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2738 a different size than the size of its argument and we need the latter
2739 one. */
2740 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2741 expr = TREE_OPERAND (expr, 0);
2742
2743 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2744 if (max_size == -1 || !DECL_P (base))
2745 return NULL;
2746
2747 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2748 return NULL;
2749
2750 return get_var_base_offset_size_access (base, offset, max_size);
2751 }
2752
2753 /* Replace the expression EXPR with a scalar replacement if there is one and
2754 generate other statements to do type conversion or subtree copying if
2755 necessary. GSI is used to place newly created statements, WRITE is true if
2756 the expression is being written to (it is on a LHS of a statement or output
2757 in an assembly statement). */
2758
2759 static bool
2760 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2761 {
2762 location_t loc;
2763 struct access *access;
2764 tree type, bfr, orig_expr;
2765
2766 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2767 {
2768 bfr = *expr;
2769 expr = &TREE_OPERAND (*expr, 0);
2770 }
2771 else
2772 bfr = NULL_TREE;
2773
2774 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2775 expr = &TREE_OPERAND (*expr, 0);
2776 access = get_access_for_expr (*expr);
2777 if (!access)
2778 return false;
2779 type = TREE_TYPE (*expr);
2780 orig_expr = *expr;
2781
2782 loc = gimple_location (gsi_stmt (*gsi));
2783 gimple_stmt_iterator alt_gsi = gsi_none ();
2784 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2785 {
2786 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2787 gsi = &alt_gsi;
2788 }
2789
2790 if (access->grp_to_be_replaced)
2791 {
2792 tree repl = get_access_replacement (access);
2793 /* If we replace a non-register typed access simply use the original
2794 access expression to extract the scalar component afterwards.
2795 This happens if scalarizing a function return value or parameter
2796 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2797 gcc.c-torture/compile/20011217-1.c.
2798
2799 We also want to use this when accessing a complex or vector which can
2800 be accessed as a different type too, potentially creating a need for
2801 type conversion (see PR42196) and when scalarized unions are involved
2802 in assembler statements (see PR42398). */
2803 if (!useless_type_conversion_p (type, access->type))
2804 {
2805 tree ref;
2806
2807 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
2808
2809 if (write)
2810 {
2811 gimple stmt;
2812
2813 if (access->grp_partial_lhs)
2814 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2815 false, GSI_NEW_STMT);
2816 stmt = gimple_build_assign (repl, ref);
2817 gimple_set_location (stmt, loc);
2818 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2819 }
2820 else
2821 {
2822 gimple stmt;
2823
2824 if (access->grp_partial_lhs)
2825 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2826 true, GSI_SAME_STMT);
2827 stmt = gimple_build_assign (ref, repl);
2828 gimple_set_location (stmt, loc);
2829 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2830 }
2831 }
2832 else
2833 *expr = repl;
2834 sra_stats.exprs++;
2835 }
2836 else if (write && access->grp_to_be_debug_replaced)
2837 {
2838 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2839 NULL_TREE,
2840 gsi_stmt (*gsi));
2841 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2842 }
2843
2844 if (access->first_child)
2845 {
2846 HOST_WIDE_INT start_offset, chunk_size;
2847 if (bfr
2848 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
2849 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
2850 {
2851 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
2852 start_offset = access->offset
2853 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
2854 }
2855 else
2856 start_offset = chunk_size = 0;
2857
2858 generate_subtree_copies (access->first_child, orig_expr, access->offset,
2859 start_offset, chunk_size, gsi, write, write,
2860 loc);
2861 }
2862 return true;
2863 }
2864
2865 /* Where scalar replacements of the RHS have been written to when a replacement
2866 of a LHS of an assigments cannot be direclty loaded from a replacement of
2867 the RHS. */
2868 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2869 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2870 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2871
2872 struct subreplacement_assignment_data
2873 {
2874 /* Offset of the access representing the lhs of the assignment. */
2875 HOST_WIDE_INT left_offset;
2876
2877 /* LHS and RHS of the original assignment. */
2878 tree assignment_lhs, assignment_rhs;
2879
2880 /* Access representing the rhs of the whole assignment. */
2881 struct access *top_racc;
2882
2883 /* Stmt iterator used for statement insertions after the original assignment.
2884 It points to the main GSI used to traverse a BB during function body
2885 modification. */
2886 gimple_stmt_iterator *new_gsi;
2887
2888 /* Stmt iterator used for statement insertions before the original
2889 assignment. Keeps on pointing to the original statement. */
2890 gimple_stmt_iterator old_gsi;
2891
2892 /* Location of the assignment. */
2893 location_t loc;
2894
2895 /* Keeps the information whether we have needed to refresh replacements of
2896 the LHS and from which side of the assignments this takes place. */
2897 enum unscalarized_data_handling refreshed;
2898 };
2899
2900 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2901 base aggregate if there are unscalarized data or directly to LHS of the
2902 statement that is pointed to by GSI otherwise. */
2903
2904 static void
2905 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
2906 {
2907 tree src;
2908 if (sad->top_racc->grp_unscalarized_data)
2909 {
2910 src = sad->assignment_rhs;
2911 sad->refreshed = SRA_UDH_RIGHT;
2912 }
2913 else
2914 {
2915 src = sad->assignment_lhs;
2916 sad->refreshed = SRA_UDH_LEFT;
2917 }
2918 generate_subtree_copies (sad->top_racc->first_child, src,
2919 sad->top_racc->offset, 0, 0,
2920 &sad->old_gsi, false, false, sad->loc);
2921 }
2922
2923 /* Try to generate statements to load all sub-replacements in an access subtree
2924 formed by children of LACC from scalar replacements in the SAD->top_racc
2925 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
2926 and load the accesses from it. */
2927
2928 static void
2929 load_assign_lhs_subreplacements (struct access *lacc,
2930 struct subreplacement_assignment_data *sad)
2931 {
2932 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2933 {
2934 HOST_WIDE_INT offset;
2935 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
2936
2937 if (lacc->grp_to_be_replaced)
2938 {
2939 struct access *racc;
2940 gimple stmt;
2941 tree rhs;
2942
2943 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
2944 if (racc && racc->grp_to_be_replaced)
2945 {
2946 rhs = get_access_replacement (racc);
2947 if (!useless_type_conversion_p (lacc->type, racc->type))
2948 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
2949 lacc->type, rhs);
2950
2951 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2952 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
2953 NULL_TREE, true, GSI_SAME_STMT);
2954 }
2955 else
2956 {
2957 /* No suitable access on the right hand side, need to load from
2958 the aggregate. See if we have to update it first... */
2959 if (sad->refreshed == SRA_UDH_NONE)
2960 handle_unscalarized_data_in_subtree (sad);
2961
2962 if (sad->refreshed == SRA_UDH_LEFT)
2963 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
2964 lacc->offset - sad->left_offset,
2965 lacc, sad->new_gsi, true);
2966 else
2967 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
2968 lacc->offset - sad->left_offset,
2969 lacc, sad->new_gsi, true);
2970 if (lacc->grp_partial_lhs)
2971 rhs = force_gimple_operand_gsi (sad->new_gsi,
2972 rhs, true, NULL_TREE,
2973 false, GSI_NEW_STMT);
2974 }
2975
2976 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2977 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
2978 gimple_set_location (stmt, sad->loc);
2979 update_stmt (stmt);
2980 sra_stats.subreplacements++;
2981 }
2982 else
2983 {
2984 if (sad->refreshed == SRA_UDH_NONE
2985 && lacc->grp_read && !lacc->grp_covered)
2986 handle_unscalarized_data_in_subtree (sad);
2987
2988 if (lacc && lacc->grp_to_be_debug_replaced)
2989 {
2990 gimple ds;
2991 tree drhs;
2992 struct access *racc = find_access_in_subtree (sad->top_racc,
2993 offset,
2994 lacc->size);
2995
2996 if (racc && racc->grp_to_be_replaced)
2997 {
2998 if (racc->grp_write)
2999 drhs = get_access_replacement (racc);
3000 else
3001 drhs = NULL;
3002 }
3003 else if (sad->refreshed == SRA_UDH_LEFT)
3004 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3005 lacc->offset, lacc);
3006 else if (sad->refreshed == SRA_UDH_RIGHT)
3007 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3008 offset, lacc);
3009 else
3010 drhs = NULL_TREE;
3011 if (drhs
3012 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3013 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3014 lacc->type, drhs);
3015 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3016 drhs, gsi_stmt (sad->old_gsi));
3017 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3018 }
3019 }
3020
3021 if (lacc->first_child)
3022 load_assign_lhs_subreplacements (lacc, sad);
3023 }
3024 }
3025
3026 /* Result code for SRA assignment modification. */
3027 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3028 SRA_AM_MODIFIED, /* stmt changed but not
3029 removed */
3030 SRA_AM_REMOVED }; /* stmt eliminated */
3031
3032 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3033 to the assignment and GSI is the statement iterator pointing at it. Returns
3034 the same values as sra_modify_assign. */
3035
3036 static enum assignment_mod_result
3037 sra_modify_constructor_assign (gimple stmt, gimple_stmt_iterator *gsi)
3038 {
3039 tree lhs = gimple_assign_lhs (stmt);
3040 struct access *acc;
3041 location_t loc;
3042
3043 acc = get_access_for_expr (lhs);
3044 if (!acc)
3045 return SRA_AM_NONE;
3046
3047 if (gimple_clobber_p (stmt))
3048 {
3049 /* Remove clobbers of fully scalarized variables, otherwise
3050 do nothing. */
3051 if (acc->grp_covered)
3052 {
3053 unlink_stmt_vdef (stmt);
3054 gsi_remove (gsi, true);
3055 release_defs (stmt);
3056 return SRA_AM_REMOVED;
3057 }
3058 else
3059 return SRA_AM_NONE;
3060 }
3061
3062 loc = gimple_location (stmt);
3063 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt))) > 0)
3064 {
3065 /* I have never seen this code path trigger but if it can happen the
3066 following should handle it gracefully. */
3067 if (access_has_children_p (acc))
3068 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3069 true, true, loc);
3070 return SRA_AM_MODIFIED;
3071 }
3072
3073 if (acc->grp_covered)
3074 {
3075 init_subtree_with_zero (acc, gsi, false, loc);
3076 unlink_stmt_vdef (stmt);
3077 gsi_remove (gsi, true);
3078 release_defs (stmt);
3079 return SRA_AM_REMOVED;
3080 }
3081 else
3082 {
3083 init_subtree_with_zero (acc, gsi, true, loc);
3084 return SRA_AM_MODIFIED;
3085 }
3086 }
3087
3088 /* Create and return a new suitable default definition SSA_NAME for RACC which
3089 is an access describing an uninitialized part of an aggregate that is being
3090 loaded. */
3091
3092 static tree
3093 get_repl_default_def_ssa_name (struct access *racc)
3094 {
3095 gcc_checking_assert (!racc->grp_to_be_replaced
3096 && !racc->grp_to_be_debug_replaced);
3097 if (!racc->replacement_decl)
3098 racc->replacement_decl = create_access_replacement (racc);
3099 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3100 }
3101
3102 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3103 bit-field field declaration somewhere in it. */
3104
3105 static inline bool
3106 contains_vce_or_bfcref_p (const_tree ref)
3107 {
3108 while (handled_component_p (ref))
3109 {
3110 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3111 || (TREE_CODE (ref) == COMPONENT_REF
3112 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3113 return true;
3114 ref = TREE_OPERAND (ref, 0);
3115 }
3116
3117 return false;
3118 }
3119
3120 /* Examine both sides of the assignment statement pointed to by STMT, replace
3121 them with a scalare replacement if there is one and generate copying of
3122 replacements if scalarized aggregates have been used in the assignment. GSI
3123 is used to hold generated statements for type conversions and subtree
3124 copying. */
3125
3126 static enum assignment_mod_result
3127 sra_modify_assign (gimple stmt, gimple_stmt_iterator *gsi)
3128 {
3129 struct access *lacc, *racc;
3130 tree lhs, rhs;
3131 bool modify_this_stmt = false;
3132 bool force_gimple_rhs = false;
3133 location_t loc;
3134 gimple_stmt_iterator orig_gsi = *gsi;
3135
3136 if (!gimple_assign_single_p (stmt))
3137 return SRA_AM_NONE;
3138 lhs = gimple_assign_lhs (stmt);
3139 rhs = gimple_assign_rhs1 (stmt);
3140
3141 if (TREE_CODE (rhs) == CONSTRUCTOR)
3142 return sra_modify_constructor_assign (stmt, gsi);
3143
3144 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3145 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3146 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3147 {
3148 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3149 gsi, false);
3150 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3151 gsi, true);
3152 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3153 }
3154
3155 lacc = get_access_for_expr (lhs);
3156 racc = get_access_for_expr (rhs);
3157 if (!lacc && !racc)
3158 return SRA_AM_NONE;
3159
3160 loc = gimple_location (stmt);
3161 if (lacc && lacc->grp_to_be_replaced)
3162 {
3163 lhs = get_access_replacement (lacc);
3164 gimple_assign_set_lhs (stmt, lhs);
3165 modify_this_stmt = true;
3166 if (lacc->grp_partial_lhs)
3167 force_gimple_rhs = true;
3168 sra_stats.exprs++;
3169 }
3170
3171 if (racc && racc->grp_to_be_replaced)
3172 {
3173 rhs = get_access_replacement (racc);
3174 modify_this_stmt = true;
3175 if (racc->grp_partial_lhs)
3176 force_gimple_rhs = true;
3177 sra_stats.exprs++;
3178 }
3179 else if (racc
3180 && !racc->grp_unscalarized_data
3181 && TREE_CODE (lhs) == SSA_NAME
3182 && !access_has_replacements_p (racc))
3183 {
3184 rhs = get_repl_default_def_ssa_name (racc);
3185 modify_this_stmt = true;
3186 sra_stats.exprs++;
3187 }
3188
3189 if (modify_this_stmt)
3190 {
3191 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3192 {
3193 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3194 ??? This should move to fold_stmt which we simply should
3195 call after building a VIEW_CONVERT_EXPR here. */
3196 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3197 && !contains_bitfld_component_ref_p (lhs))
3198 {
3199 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3200 gimple_assign_set_lhs (stmt, lhs);
3201 }
3202 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3203 && !contains_vce_or_bfcref_p (rhs))
3204 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3205
3206 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3207 {
3208 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3209 rhs);
3210 if (is_gimple_reg_type (TREE_TYPE (lhs))
3211 && TREE_CODE (lhs) != SSA_NAME)
3212 force_gimple_rhs = true;
3213 }
3214 }
3215 }
3216
3217 if (lacc && lacc->grp_to_be_debug_replaced)
3218 {
3219 tree dlhs = get_access_replacement (lacc);
3220 tree drhs = unshare_expr (rhs);
3221 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3222 {
3223 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3224 && !contains_vce_or_bfcref_p (drhs))
3225 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3226 if (drhs
3227 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3228 TREE_TYPE (drhs)))
3229 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3230 TREE_TYPE (dlhs), drhs);
3231 }
3232 gimple ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3233 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3234 }
3235
3236 /* From this point on, the function deals with assignments in between
3237 aggregates when at least one has scalar reductions of some of its
3238 components. There are three possible scenarios: Both the LHS and RHS have
3239 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3240
3241 In the first case, we would like to load the LHS components from RHS
3242 components whenever possible. If that is not possible, we would like to
3243 read it directly from the RHS (after updating it by storing in it its own
3244 components). If there are some necessary unscalarized data in the LHS,
3245 those will be loaded by the original assignment too. If neither of these
3246 cases happen, the original statement can be removed. Most of this is done
3247 by load_assign_lhs_subreplacements.
3248
3249 In the second case, we would like to store all RHS scalarized components
3250 directly into LHS and if they cover the aggregate completely, remove the
3251 statement too. In the third case, we want the LHS components to be loaded
3252 directly from the RHS (DSE will remove the original statement if it
3253 becomes redundant).
3254
3255 This is a bit complex but manageable when types match and when unions do
3256 not cause confusion in a way that we cannot really load a component of LHS
3257 from the RHS or vice versa (the access representing this level can have
3258 subaccesses that are accessible only through a different union field at a
3259 higher level - different from the one used in the examined expression).
3260 Unions are fun.
3261
3262 Therefore, I specially handle a fourth case, happening when there is a
3263 specific type cast or it is impossible to locate a scalarized subaccess on
3264 the other side of the expression. If that happens, I simply "refresh" the
3265 RHS by storing in it is scalarized components leave the original statement
3266 there to do the copying and then load the scalar replacements of the LHS.
3267 This is what the first branch does. */
3268
3269 if (modify_this_stmt
3270 || gimple_has_volatile_ops (stmt)
3271 || contains_vce_or_bfcref_p (rhs)
3272 || contains_vce_or_bfcref_p (lhs)
3273 || stmt_ends_bb_p (stmt))
3274 {
3275 if (access_has_children_p (racc))
3276 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3277 gsi, false, false, loc);
3278 if (access_has_children_p (lacc))
3279 {
3280 gimple_stmt_iterator alt_gsi = gsi_none ();
3281 if (stmt_ends_bb_p (stmt))
3282 {
3283 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3284 gsi = &alt_gsi;
3285 }
3286 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3287 gsi, true, true, loc);
3288 }
3289 sra_stats.separate_lhs_rhs_handling++;
3290
3291 /* This gimplification must be done after generate_subtree_copies,
3292 lest we insert the subtree copies in the middle of the gimplified
3293 sequence. */
3294 if (force_gimple_rhs)
3295 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3296 true, GSI_SAME_STMT);
3297 if (gimple_assign_rhs1 (stmt) != rhs)
3298 {
3299 modify_this_stmt = true;
3300 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3301 gcc_assert (stmt == gsi_stmt (orig_gsi));
3302 }
3303
3304 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3305 }
3306 else
3307 {
3308 if (access_has_children_p (lacc)
3309 && access_has_children_p (racc)
3310 /* When an access represents an unscalarizable region, it usually
3311 represents accesses with variable offset and thus must not be used
3312 to generate new memory accesses. */
3313 && !lacc->grp_unscalarizable_region
3314 && !racc->grp_unscalarizable_region)
3315 {
3316 struct subreplacement_assignment_data sad;
3317
3318 sad.left_offset = lacc->offset;
3319 sad.assignment_lhs = lhs;
3320 sad.assignment_rhs = rhs;
3321 sad.top_racc = racc;
3322 sad.old_gsi = *gsi;
3323 sad.new_gsi = gsi;
3324 sad.loc = gimple_location (stmt);
3325 sad.refreshed = SRA_UDH_NONE;
3326
3327 if (lacc->grp_read && !lacc->grp_covered)
3328 handle_unscalarized_data_in_subtree (&sad);
3329
3330 load_assign_lhs_subreplacements (lacc, &sad);
3331 if (sad.refreshed != SRA_UDH_RIGHT)
3332 {
3333 gsi_next (gsi);
3334 unlink_stmt_vdef (stmt);
3335 gsi_remove (&sad.old_gsi, true);
3336 release_defs (stmt);
3337 sra_stats.deleted++;
3338 return SRA_AM_REMOVED;
3339 }
3340 }
3341 else
3342 {
3343 if (access_has_children_p (racc)
3344 && !racc->grp_unscalarized_data)
3345 {
3346 if (dump_file)
3347 {
3348 fprintf (dump_file, "Removing load: ");
3349 print_gimple_stmt (dump_file, stmt, 0, 0);
3350 }
3351 generate_subtree_copies (racc->first_child, lhs,
3352 racc->offset, 0, 0, gsi,
3353 false, false, loc);
3354 gcc_assert (stmt == gsi_stmt (*gsi));
3355 unlink_stmt_vdef (stmt);
3356 gsi_remove (gsi, true);
3357 release_defs (stmt);
3358 sra_stats.deleted++;
3359 return SRA_AM_REMOVED;
3360 }
3361 /* Restore the aggregate RHS from its components so the
3362 prevailing aggregate copy does the right thing. */
3363 if (access_has_children_p (racc))
3364 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3365 gsi, false, false, loc);
3366 /* Re-load the components of the aggregate copy destination.
3367 But use the RHS aggregate to load from to expose more
3368 optimization opportunities. */
3369 if (access_has_children_p (lacc))
3370 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3371 0, 0, gsi, true, true, loc);
3372 }
3373
3374 return SRA_AM_NONE;
3375 }
3376 }
3377
3378 /* Traverse the function body and all modifications as decided in
3379 analyze_all_variable_accesses. Return true iff the CFG has been
3380 changed. */
3381
3382 static bool
3383 sra_modify_function_body (void)
3384 {
3385 bool cfg_changed = false;
3386 basic_block bb;
3387
3388 FOR_EACH_BB_FN (bb, cfun)
3389 {
3390 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3391 while (!gsi_end_p (gsi))
3392 {
3393 gimple stmt = gsi_stmt (gsi);
3394 enum assignment_mod_result assign_result;
3395 bool modified = false, deleted = false;
3396 tree *t;
3397 unsigned i;
3398
3399 switch (gimple_code (stmt))
3400 {
3401 case GIMPLE_RETURN:
3402 t = gimple_return_retval_ptr (stmt);
3403 if (*t != NULL_TREE)
3404 modified |= sra_modify_expr (t, &gsi, false);
3405 break;
3406
3407 case GIMPLE_ASSIGN:
3408 assign_result = sra_modify_assign (stmt, &gsi);
3409 modified |= assign_result == SRA_AM_MODIFIED;
3410 deleted = assign_result == SRA_AM_REMOVED;
3411 break;
3412
3413 case GIMPLE_CALL:
3414 /* Operands must be processed before the lhs. */
3415 for (i = 0; i < gimple_call_num_args (stmt); i++)
3416 {
3417 t = gimple_call_arg_ptr (stmt, i);
3418 modified |= sra_modify_expr (t, &gsi, false);
3419 }
3420
3421 if (gimple_call_lhs (stmt))
3422 {
3423 t = gimple_call_lhs_ptr (stmt);
3424 modified |= sra_modify_expr (t, &gsi, true);
3425 }
3426 break;
3427
3428 case GIMPLE_ASM:
3429 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3430 {
3431 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3432 modified |= sra_modify_expr (t, &gsi, false);
3433 }
3434 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3435 {
3436 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3437 modified |= sra_modify_expr (t, &gsi, true);
3438 }
3439 break;
3440
3441 default:
3442 break;
3443 }
3444
3445 if (modified)
3446 {
3447 update_stmt (stmt);
3448 if (maybe_clean_eh_stmt (stmt)
3449 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3450 cfg_changed = true;
3451 }
3452 if (!deleted)
3453 gsi_next (&gsi);
3454 }
3455 }
3456
3457 gsi_commit_edge_inserts ();
3458 return cfg_changed;
3459 }
3460
3461 /* Generate statements initializing scalar replacements of parts of function
3462 parameters. */
3463
3464 static void
3465 initialize_parameter_reductions (void)
3466 {
3467 gimple_stmt_iterator gsi;
3468 gimple_seq seq = NULL;
3469 tree parm;
3470
3471 gsi = gsi_start (seq);
3472 for (parm = DECL_ARGUMENTS (current_function_decl);
3473 parm;
3474 parm = DECL_CHAIN (parm))
3475 {
3476 vec<access_p> *access_vec;
3477 struct access *access;
3478
3479 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3480 continue;
3481 access_vec = get_base_access_vector (parm);
3482 if (!access_vec)
3483 continue;
3484
3485 for (access = (*access_vec)[0];
3486 access;
3487 access = access->next_grp)
3488 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3489 EXPR_LOCATION (parm));
3490 }
3491
3492 seq = gsi_seq (gsi);
3493 if (seq)
3494 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3495 }
3496
3497 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3498 it reveals there are components of some aggregates to be scalarized, it runs
3499 the required transformations. */
3500 static unsigned int
3501 perform_intra_sra (void)
3502 {
3503 int ret = 0;
3504 sra_initialize ();
3505
3506 if (!find_var_candidates ())
3507 goto out;
3508
3509 if (!scan_function ())
3510 goto out;
3511
3512 if (!analyze_all_variable_accesses ())
3513 goto out;
3514
3515 if (sra_modify_function_body ())
3516 ret = TODO_update_ssa | TODO_cleanup_cfg;
3517 else
3518 ret = TODO_update_ssa;
3519 initialize_parameter_reductions ();
3520
3521 statistics_counter_event (cfun, "Scalar replacements created",
3522 sra_stats.replacements);
3523 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3524 statistics_counter_event (cfun, "Subtree copy stmts",
3525 sra_stats.subtree_copies);
3526 statistics_counter_event (cfun, "Subreplacement stmts",
3527 sra_stats.subreplacements);
3528 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3529 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3530 sra_stats.separate_lhs_rhs_handling);
3531
3532 out:
3533 sra_deinitialize ();
3534 return ret;
3535 }
3536
3537 /* Perform early intraprocedural SRA. */
3538 static unsigned int
3539 early_intra_sra (void)
3540 {
3541 sra_mode = SRA_MODE_EARLY_INTRA;
3542 return perform_intra_sra ();
3543 }
3544
3545 /* Perform "late" intraprocedural SRA. */
3546 static unsigned int
3547 late_intra_sra (void)
3548 {
3549 sra_mode = SRA_MODE_INTRA;
3550 return perform_intra_sra ();
3551 }
3552
3553
3554 static bool
3555 gate_intra_sra (void)
3556 {
3557 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3558 }
3559
3560
3561 namespace {
3562
3563 const pass_data pass_data_sra_early =
3564 {
3565 GIMPLE_PASS, /* type */
3566 "esra", /* name */
3567 OPTGROUP_NONE, /* optinfo_flags */
3568 TV_TREE_SRA, /* tv_id */
3569 ( PROP_cfg | PROP_ssa ), /* properties_required */
3570 0, /* properties_provided */
3571 0, /* properties_destroyed */
3572 0, /* todo_flags_start */
3573 TODO_update_ssa, /* todo_flags_finish */
3574 };
3575
3576 class pass_sra_early : public gimple_opt_pass
3577 {
3578 public:
3579 pass_sra_early (gcc::context *ctxt)
3580 : gimple_opt_pass (pass_data_sra_early, ctxt)
3581 {}
3582
3583 /* opt_pass methods: */
3584 virtual bool gate (function *) { return gate_intra_sra (); }
3585 virtual unsigned int execute (function *) { return early_intra_sra (); }
3586
3587 }; // class pass_sra_early
3588
3589 } // anon namespace
3590
3591 gimple_opt_pass *
3592 make_pass_sra_early (gcc::context *ctxt)
3593 {
3594 return new pass_sra_early (ctxt);
3595 }
3596
3597 namespace {
3598
3599 const pass_data pass_data_sra =
3600 {
3601 GIMPLE_PASS, /* type */
3602 "sra", /* name */
3603 OPTGROUP_NONE, /* optinfo_flags */
3604 TV_TREE_SRA, /* tv_id */
3605 ( PROP_cfg | PROP_ssa ), /* properties_required */
3606 0, /* properties_provided */
3607 0, /* properties_destroyed */
3608 TODO_update_address_taken, /* todo_flags_start */
3609 TODO_update_ssa, /* todo_flags_finish */
3610 };
3611
3612 class pass_sra : public gimple_opt_pass
3613 {
3614 public:
3615 pass_sra (gcc::context *ctxt)
3616 : gimple_opt_pass (pass_data_sra, ctxt)
3617 {}
3618
3619 /* opt_pass methods: */
3620 virtual bool gate (function *) { return gate_intra_sra (); }
3621 virtual unsigned int execute (function *) { return late_intra_sra (); }
3622
3623 }; // class pass_sra
3624
3625 } // anon namespace
3626
3627 gimple_opt_pass *
3628 make_pass_sra (gcc::context *ctxt)
3629 {
3630 return new pass_sra (ctxt);
3631 }
3632
3633
3634 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3635 parameter. */
3636
3637 static bool
3638 is_unused_scalar_param (tree parm)
3639 {
3640 tree name;
3641 return (is_gimple_reg (parm)
3642 && (!(name = ssa_default_def (cfun, parm))
3643 || has_zero_uses (name)));
3644 }
3645
3646 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3647 examine whether there are any direct or otherwise infeasible ones. If so,
3648 return true, otherwise return false. PARM must be a gimple register with a
3649 non-NULL default definition. */
3650
3651 static bool
3652 ptr_parm_has_direct_uses (tree parm)
3653 {
3654 imm_use_iterator ui;
3655 gimple stmt;
3656 tree name = ssa_default_def (cfun, parm);
3657 bool ret = false;
3658
3659 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3660 {
3661 int uses_ok = 0;
3662 use_operand_p use_p;
3663
3664 if (is_gimple_debug (stmt))
3665 continue;
3666
3667 /* Valid uses include dereferences on the lhs and the rhs. */
3668 if (gimple_has_lhs (stmt))
3669 {
3670 tree lhs = gimple_get_lhs (stmt);
3671 while (handled_component_p (lhs))
3672 lhs = TREE_OPERAND (lhs, 0);
3673 if (TREE_CODE (lhs) == MEM_REF
3674 && TREE_OPERAND (lhs, 0) == name
3675 && integer_zerop (TREE_OPERAND (lhs, 1))
3676 && types_compatible_p (TREE_TYPE (lhs),
3677 TREE_TYPE (TREE_TYPE (name)))
3678 && !TREE_THIS_VOLATILE (lhs))
3679 uses_ok++;
3680 }
3681 if (gimple_assign_single_p (stmt))
3682 {
3683 tree rhs = gimple_assign_rhs1 (stmt);
3684 while (handled_component_p (rhs))
3685 rhs = TREE_OPERAND (rhs, 0);
3686 if (TREE_CODE (rhs) == MEM_REF
3687 && TREE_OPERAND (rhs, 0) == name
3688 && integer_zerop (TREE_OPERAND (rhs, 1))
3689 && types_compatible_p (TREE_TYPE (rhs),
3690 TREE_TYPE (TREE_TYPE (name)))
3691 && !TREE_THIS_VOLATILE (rhs))
3692 uses_ok++;
3693 }
3694 else if (is_gimple_call (stmt))
3695 {
3696 unsigned i;
3697 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3698 {
3699 tree arg = gimple_call_arg (stmt, i);
3700 while (handled_component_p (arg))
3701 arg = TREE_OPERAND (arg, 0);
3702 if (TREE_CODE (arg) == MEM_REF
3703 && TREE_OPERAND (arg, 0) == name
3704 && integer_zerop (TREE_OPERAND (arg, 1))
3705 && types_compatible_p (TREE_TYPE (arg),
3706 TREE_TYPE (TREE_TYPE (name)))
3707 && !TREE_THIS_VOLATILE (arg))
3708 uses_ok++;
3709 }
3710 }
3711
3712 /* If the number of valid uses does not match the number of
3713 uses in this stmt there is an unhandled use. */
3714 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3715 --uses_ok;
3716
3717 if (uses_ok != 0)
3718 ret = true;
3719
3720 if (ret)
3721 BREAK_FROM_IMM_USE_STMT (ui);
3722 }
3723
3724 return ret;
3725 }
3726
3727 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3728 them in candidate_bitmap. Note that these do not necessarily include
3729 parameter which are unused and thus can be removed. Return true iff any
3730 such candidate has been found. */
3731
3732 static bool
3733 find_param_candidates (void)
3734 {
3735 tree parm;
3736 int count = 0;
3737 bool ret = false;
3738 const char *msg;
3739
3740 for (parm = DECL_ARGUMENTS (current_function_decl);
3741 parm;
3742 parm = DECL_CHAIN (parm))
3743 {
3744 tree type = TREE_TYPE (parm);
3745 tree_node **slot;
3746
3747 count++;
3748
3749 if (TREE_THIS_VOLATILE (parm)
3750 || TREE_ADDRESSABLE (parm)
3751 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3752 continue;
3753
3754 if (is_unused_scalar_param (parm))
3755 {
3756 ret = true;
3757 continue;
3758 }
3759
3760 if (POINTER_TYPE_P (type))
3761 {
3762 type = TREE_TYPE (type);
3763
3764 if (TREE_CODE (type) == FUNCTION_TYPE
3765 || TYPE_VOLATILE (type)
3766 || (TREE_CODE (type) == ARRAY_TYPE
3767 && TYPE_NONALIASED_COMPONENT (type))
3768 || !is_gimple_reg (parm)
3769 || is_va_list_type (type)
3770 || ptr_parm_has_direct_uses (parm))
3771 continue;
3772 }
3773 else if (!AGGREGATE_TYPE_P (type))
3774 continue;
3775
3776 if (!COMPLETE_TYPE_P (type)
3777 || !tree_fits_uhwi_p (TYPE_SIZE (type))
3778 || tree_to_uhwi (TYPE_SIZE (type)) == 0
3779 || (AGGREGATE_TYPE_P (type)
3780 && type_internals_preclude_sra_p (type, &msg)))
3781 continue;
3782
3783 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3784 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
3785 *slot = parm;
3786
3787 ret = true;
3788 if (dump_file && (dump_flags & TDF_DETAILS))
3789 {
3790 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3791 print_generic_expr (dump_file, parm, 0);
3792 fprintf (dump_file, "\n");
3793 }
3794 }
3795
3796 func_param_count = count;
3797 return ret;
3798 }
3799
3800 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3801 maybe_modified. */
3802
3803 static bool
3804 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3805 void *data)
3806 {
3807 struct access *repr = (struct access *) data;
3808
3809 repr->grp_maybe_modified = 1;
3810 return true;
3811 }
3812
3813 /* Analyze what representatives (in linked lists accessible from
3814 REPRESENTATIVES) can be modified by side effects of statements in the
3815 current function. */
3816
3817 static void
3818 analyze_modified_params (vec<access_p> representatives)
3819 {
3820 int i;
3821
3822 for (i = 0; i < func_param_count; i++)
3823 {
3824 struct access *repr;
3825
3826 for (repr = representatives[i];
3827 repr;
3828 repr = repr->next_grp)
3829 {
3830 struct access *access;
3831 bitmap visited;
3832 ao_ref ar;
3833
3834 if (no_accesses_p (repr))
3835 continue;
3836 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3837 || repr->grp_maybe_modified)
3838 continue;
3839
3840 ao_ref_init (&ar, repr->expr);
3841 visited = BITMAP_ALLOC (NULL);
3842 for (access = repr; access; access = access->next_sibling)
3843 {
3844 /* All accesses are read ones, otherwise grp_maybe_modified would
3845 be trivially set. */
3846 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3847 mark_maybe_modified, repr, &visited);
3848 if (repr->grp_maybe_modified)
3849 break;
3850 }
3851 BITMAP_FREE (visited);
3852 }
3853 }
3854 }
3855
3856 /* Propagate distances in bb_dereferences in the opposite direction than the
3857 control flow edges, in each step storing the maximum of the current value
3858 and the minimum of all successors. These steps are repeated until the table
3859 stabilizes. Note that BBs which might terminate the functions (according to
3860 final_bbs bitmap) never updated in this way. */
3861
3862 static void
3863 propagate_dereference_distances (void)
3864 {
3865 basic_block bb;
3866
3867 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
3868 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3869 FOR_EACH_BB_FN (bb, cfun)
3870 {
3871 queue.quick_push (bb);
3872 bb->aux = bb;
3873 }
3874
3875 while (!queue.is_empty ())
3876 {
3877 edge_iterator ei;
3878 edge e;
3879 bool change = false;
3880 int i;
3881
3882 bb = queue.pop ();
3883 bb->aux = NULL;
3884
3885 if (bitmap_bit_p (final_bbs, bb->index))
3886 continue;
3887
3888 for (i = 0; i < func_param_count; i++)
3889 {
3890 int idx = bb->index * func_param_count + i;
3891 bool first = true;
3892 HOST_WIDE_INT inh = 0;
3893
3894 FOR_EACH_EDGE (e, ei, bb->succs)
3895 {
3896 int succ_idx = e->dest->index * func_param_count + i;
3897
3898 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
3899 continue;
3900
3901 if (first)
3902 {
3903 first = false;
3904 inh = bb_dereferences [succ_idx];
3905 }
3906 else if (bb_dereferences [succ_idx] < inh)
3907 inh = bb_dereferences [succ_idx];
3908 }
3909
3910 if (!first && bb_dereferences[idx] < inh)
3911 {
3912 bb_dereferences[idx] = inh;
3913 change = true;
3914 }
3915 }
3916
3917 if (change && !bitmap_bit_p (final_bbs, bb->index))
3918 FOR_EACH_EDGE (e, ei, bb->preds)
3919 {
3920 if (e->src->aux)
3921 continue;
3922
3923 e->src->aux = e->src;
3924 queue.quick_push (e->src);
3925 }
3926 }
3927 }
3928
3929 /* Dump a dereferences TABLE with heading STR to file F. */
3930
3931 static void
3932 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3933 {
3934 basic_block bb;
3935
3936 fprintf (dump_file, str);
3937 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
3938 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3939 {
3940 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3941 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3942 {
3943 int i;
3944 for (i = 0; i < func_param_count; i++)
3945 {
3946 int idx = bb->index * func_param_count + i;
3947 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3948 }
3949 }
3950 fprintf (f, "\n");
3951 }
3952 fprintf (dump_file, "\n");
3953 }
3954
3955 /* Determine what (parts of) parameters passed by reference that are not
3956 assigned to are not certainly dereferenced in this function and thus the
3957 dereferencing cannot be safely moved to the caller without potentially
3958 introducing a segfault. Mark such REPRESENTATIVES as
3959 grp_not_necessarilly_dereferenced.
3960
3961 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3962 part is calculated rather than simple booleans are calculated for each
3963 pointer parameter to handle cases when only a fraction of the whole
3964 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3965 an example).
3966
3967 The maximum dereference distances for each pointer parameter and BB are
3968 already stored in bb_dereference. This routine simply propagates these
3969 values upwards by propagate_dereference_distances and then compares the
3970 distances of individual parameters in the ENTRY BB to the equivalent
3971 distances of each representative of a (fraction of a) parameter. */
3972
3973 static void
3974 analyze_caller_dereference_legality (vec<access_p> representatives)
3975 {
3976 int i;
3977
3978 if (dump_file && (dump_flags & TDF_DETAILS))
3979 dump_dereferences_table (dump_file,
3980 "Dereference table before propagation:\n",
3981 bb_dereferences);
3982
3983 propagate_dereference_distances ();
3984
3985 if (dump_file && (dump_flags & TDF_DETAILS))
3986 dump_dereferences_table (dump_file,
3987 "Dereference table after propagation:\n",
3988 bb_dereferences);
3989
3990 for (i = 0; i < func_param_count; i++)
3991 {
3992 struct access *repr = representatives[i];
3993 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
3994
3995 if (!repr || no_accesses_p (repr))
3996 continue;
3997
3998 do
3999 {
4000 if ((repr->offset + repr->size) > bb_dereferences[idx])
4001 repr->grp_not_necessarilly_dereferenced = 1;
4002 repr = repr->next_grp;
4003 }
4004 while (repr);
4005 }
4006 }
4007
4008 /* Return the representative access for the parameter declaration PARM if it is
4009 a scalar passed by reference which is not written to and the pointer value
4010 is not used directly. Thus, if it is legal to dereference it in the caller
4011 and we can rule out modifications through aliases, such parameter should be
4012 turned into one passed by value. Return NULL otherwise. */
4013
4014 static struct access *
4015 unmodified_by_ref_scalar_representative (tree parm)
4016 {
4017 int i, access_count;
4018 struct access *repr;
4019 vec<access_p> *access_vec;
4020
4021 access_vec = get_base_access_vector (parm);
4022 gcc_assert (access_vec);
4023 repr = (*access_vec)[0];
4024 if (repr->write)
4025 return NULL;
4026 repr->group_representative = repr;
4027
4028 access_count = access_vec->length ();
4029 for (i = 1; i < access_count; i++)
4030 {
4031 struct access *access = (*access_vec)[i];
4032 if (access->write)
4033 return NULL;
4034 access->group_representative = repr;
4035 access->next_sibling = repr->next_sibling;
4036 repr->next_sibling = access;
4037 }
4038
4039 repr->grp_read = 1;
4040 repr->grp_scalar_ptr = 1;
4041 return repr;
4042 }
4043
4044 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4045 associated with. REQ_ALIGN is the minimum required alignment. */
4046
4047 static bool
4048 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4049 {
4050 unsigned int exp_align;
4051 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4052 is incompatible assign in a call statement (and possibly even in asm
4053 statements). This can be relaxed by using a new temporary but only for
4054 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4055 intraprocedural SRA we deal with this by keeping the old aggregate around,
4056 something we cannot do in IPA-SRA.) */
4057 if (access->write
4058 && (is_gimple_call (access->stmt)
4059 || gimple_code (access->stmt) == GIMPLE_ASM))
4060 return true;
4061
4062 exp_align = get_object_alignment (access->expr);
4063 if (exp_align < req_align)
4064 return true;
4065
4066 return false;
4067 }
4068
4069
4070 /* Sort collected accesses for parameter PARM, identify representatives for
4071 each accessed region and link them together. Return NULL if there are
4072 different but overlapping accesses, return the special ptr value meaning
4073 there are no accesses for this parameter if that is the case and return the
4074 first representative otherwise. Set *RO_GRP if there is a group of accesses
4075 with only read (i.e. no write) accesses. */
4076
4077 static struct access *
4078 splice_param_accesses (tree parm, bool *ro_grp)
4079 {
4080 int i, j, access_count, group_count;
4081 int agg_size, total_size = 0;
4082 struct access *access, *res, **prev_acc_ptr = &res;
4083 vec<access_p> *access_vec;
4084
4085 access_vec = get_base_access_vector (parm);
4086 if (!access_vec)
4087 return &no_accesses_representant;
4088 access_count = access_vec->length ();
4089
4090 access_vec->qsort (compare_access_positions);
4091
4092 i = 0;
4093 total_size = 0;
4094 group_count = 0;
4095 while (i < access_count)
4096 {
4097 bool modification;
4098 tree a1_alias_type;
4099 access = (*access_vec)[i];
4100 modification = access->write;
4101 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4102 return NULL;
4103 a1_alias_type = reference_alias_ptr_type (access->expr);
4104
4105 /* Access is about to become group representative unless we find some
4106 nasty overlap which would preclude us from breaking this parameter
4107 apart. */
4108
4109 j = i + 1;
4110 while (j < access_count)
4111 {
4112 struct access *ac2 = (*access_vec)[j];
4113 if (ac2->offset != access->offset)
4114 {
4115 /* All or nothing law for parameters. */
4116 if (access->offset + access->size > ac2->offset)
4117 return NULL;
4118 else
4119 break;
4120 }
4121 else if (ac2->size != access->size)
4122 return NULL;
4123
4124 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4125 || (ac2->type != access->type
4126 && (TREE_ADDRESSABLE (ac2->type)
4127 || TREE_ADDRESSABLE (access->type)))
4128 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4129 return NULL;
4130
4131 modification |= ac2->write;
4132 ac2->group_representative = access;
4133 ac2->next_sibling = access->next_sibling;
4134 access->next_sibling = ac2;
4135 j++;
4136 }
4137
4138 group_count++;
4139 access->grp_maybe_modified = modification;
4140 if (!modification)
4141 *ro_grp = true;
4142 *prev_acc_ptr = access;
4143 prev_acc_ptr = &access->next_grp;
4144 total_size += access->size;
4145 i = j;
4146 }
4147
4148 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4149 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4150 else
4151 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4152 if (total_size >= agg_size)
4153 return NULL;
4154
4155 gcc_assert (group_count > 0);
4156 return res;
4157 }
4158
4159 /* Decide whether parameters with representative accesses given by REPR should
4160 be reduced into components. */
4161
4162 static int
4163 decide_one_param_reduction (struct access *repr)
4164 {
4165 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4166 bool by_ref;
4167 tree parm;
4168
4169 parm = repr->base;
4170 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4171 gcc_assert (cur_parm_size > 0);
4172
4173 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4174 {
4175 by_ref = true;
4176 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4177 }
4178 else
4179 {
4180 by_ref = false;
4181 agg_size = cur_parm_size;
4182 }
4183
4184 if (dump_file)
4185 {
4186 struct access *acc;
4187 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4188 print_generic_expr (dump_file, parm, 0);
4189 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4190 for (acc = repr; acc; acc = acc->next_grp)
4191 dump_access (dump_file, acc, true);
4192 }
4193
4194 total_size = 0;
4195 new_param_count = 0;
4196
4197 for (; repr; repr = repr->next_grp)
4198 {
4199 gcc_assert (parm == repr->base);
4200
4201 /* Taking the address of a non-addressable field is verboten. */
4202 if (by_ref && repr->non_addressable)
4203 return 0;
4204
4205 /* Do not decompose a non-BLKmode param in a way that would
4206 create BLKmode params. Especially for by-reference passing
4207 (thus, pointer-type param) this is hardly worthwhile. */
4208 if (DECL_MODE (parm) != BLKmode
4209 && TYPE_MODE (repr->type) == BLKmode)
4210 return 0;
4211
4212 if (!by_ref || (!repr->grp_maybe_modified
4213 && !repr->grp_not_necessarilly_dereferenced))
4214 total_size += repr->size;
4215 else
4216 total_size += cur_parm_size;
4217
4218 new_param_count++;
4219 }
4220
4221 gcc_assert (new_param_count > 0);
4222
4223 if (optimize_function_for_size_p (cfun))
4224 parm_size_limit = cur_parm_size;
4225 else
4226 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4227 * cur_parm_size);
4228
4229 if (total_size < agg_size
4230 && total_size <= parm_size_limit)
4231 {
4232 if (dump_file)
4233 fprintf (dump_file, " ....will be split into %i components\n",
4234 new_param_count);
4235 return new_param_count;
4236 }
4237 else
4238 return 0;
4239 }
4240
4241 /* The order of the following enums is important, we need to do extra work for
4242 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4243 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4244 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4245
4246 /* Identify representatives of all accesses to all candidate parameters for
4247 IPA-SRA. Return result based on what representatives have been found. */
4248
4249 static enum ipa_splicing_result
4250 splice_all_param_accesses (vec<access_p> &representatives)
4251 {
4252 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4253 tree parm;
4254 struct access *repr;
4255
4256 representatives.create (func_param_count);
4257
4258 for (parm = DECL_ARGUMENTS (current_function_decl);
4259 parm;
4260 parm = DECL_CHAIN (parm))
4261 {
4262 if (is_unused_scalar_param (parm))
4263 {
4264 representatives.quick_push (&no_accesses_representant);
4265 if (result == NO_GOOD_ACCESS)
4266 result = UNUSED_PARAMS;
4267 }
4268 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4269 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4270 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4271 {
4272 repr = unmodified_by_ref_scalar_representative (parm);
4273 representatives.quick_push (repr);
4274 if (repr)
4275 result = UNMODIF_BY_REF_ACCESSES;
4276 }
4277 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4278 {
4279 bool ro_grp = false;
4280 repr = splice_param_accesses (parm, &ro_grp);
4281 representatives.quick_push (repr);
4282
4283 if (repr && !no_accesses_p (repr))
4284 {
4285 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4286 {
4287 if (ro_grp)
4288 result = UNMODIF_BY_REF_ACCESSES;
4289 else if (result < MODIF_BY_REF_ACCESSES)
4290 result = MODIF_BY_REF_ACCESSES;
4291 }
4292 else if (result < BY_VAL_ACCESSES)
4293 result = BY_VAL_ACCESSES;
4294 }
4295 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4296 result = UNUSED_PARAMS;
4297 }
4298 else
4299 representatives.quick_push (NULL);
4300 }
4301
4302 if (result == NO_GOOD_ACCESS)
4303 {
4304 representatives.release ();
4305 return NO_GOOD_ACCESS;
4306 }
4307
4308 return result;
4309 }
4310
4311 /* Return the index of BASE in PARMS. Abort if it is not found. */
4312
4313 static inline int
4314 get_param_index (tree base, vec<tree> parms)
4315 {
4316 int i, len;
4317
4318 len = parms.length ();
4319 for (i = 0; i < len; i++)
4320 if (parms[i] == base)
4321 return i;
4322 gcc_unreachable ();
4323 }
4324
4325 /* Convert the decisions made at the representative level into compact
4326 parameter adjustments. REPRESENTATIVES are pointers to first
4327 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4328 final number of adjustments. */
4329
4330 static ipa_parm_adjustment_vec
4331 turn_representatives_into_adjustments (vec<access_p> representatives,
4332 int adjustments_count)
4333 {
4334 vec<tree> parms;
4335 ipa_parm_adjustment_vec adjustments;
4336 tree parm;
4337 int i;
4338
4339 gcc_assert (adjustments_count > 0);
4340 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4341 adjustments.create (adjustments_count);
4342 parm = DECL_ARGUMENTS (current_function_decl);
4343 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4344 {
4345 struct access *repr = representatives[i];
4346
4347 if (!repr || no_accesses_p (repr))
4348 {
4349 struct ipa_parm_adjustment adj;
4350
4351 memset (&adj, 0, sizeof (adj));
4352 adj.base_index = get_param_index (parm, parms);
4353 adj.base = parm;
4354 if (!repr)
4355 adj.op = IPA_PARM_OP_COPY;
4356 else
4357 adj.op = IPA_PARM_OP_REMOVE;
4358 adj.arg_prefix = "ISRA";
4359 adjustments.quick_push (adj);
4360 }
4361 else
4362 {
4363 struct ipa_parm_adjustment adj;
4364 int index = get_param_index (parm, parms);
4365
4366 for (; repr; repr = repr->next_grp)
4367 {
4368 memset (&adj, 0, sizeof (adj));
4369 gcc_assert (repr->base == parm);
4370 adj.base_index = index;
4371 adj.base = repr->base;
4372 adj.type = repr->type;
4373 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4374 adj.offset = repr->offset;
4375 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4376 && (repr->grp_maybe_modified
4377 || repr->grp_not_necessarilly_dereferenced));
4378 adj.arg_prefix = "ISRA";
4379 adjustments.quick_push (adj);
4380 }
4381 }
4382 }
4383 parms.release ();
4384 return adjustments;
4385 }
4386
4387 /* Analyze the collected accesses and produce a plan what to do with the
4388 parameters in the form of adjustments, NULL meaning nothing. */
4389
4390 static ipa_parm_adjustment_vec
4391 analyze_all_param_acesses (void)
4392 {
4393 enum ipa_splicing_result repr_state;
4394 bool proceed = false;
4395 int i, adjustments_count = 0;
4396 vec<access_p> representatives;
4397 ipa_parm_adjustment_vec adjustments;
4398
4399 repr_state = splice_all_param_accesses (representatives);
4400 if (repr_state == NO_GOOD_ACCESS)
4401 return ipa_parm_adjustment_vec ();
4402
4403 /* If there are any parameters passed by reference which are not modified
4404 directly, we need to check whether they can be modified indirectly. */
4405 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4406 {
4407 analyze_caller_dereference_legality (representatives);
4408 analyze_modified_params (representatives);
4409 }
4410
4411 for (i = 0; i < func_param_count; i++)
4412 {
4413 struct access *repr = representatives[i];
4414
4415 if (repr && !no_accesses_p (repr))
4416 {
4417 if (repr->grp_scalar_ptr)
4418 {
4419 adjustments_count++;
4420 if (repr->grp_not_necessarilly_dereferenced
4421 || repr->grp_maybe_modified)
4422 representatives[i] = NULL;
4423 else
4424 {
4425 proceed = true;
4426 sra_stats.scalar_by_ref_to_by_val++;
4427 }
4428 }
4429 else
4430 {
4431 int new_components = decide_one_param_reduction (repr);
4432
4433 if (new_components == 0)
4434 {
4435 representatives[i] = NULL;
4436 adjustments_count++;
4437 }
4438 else
4439 {
4440 adjustments_count += new_components;
4441 sra_stats.aggregate_params_reduced++;
4442 sra_stats.param_reductions_created += new_components;
4443 proceed = true;
4444 }
4445 }
4446 }
4447 else
4448 {
4449 if (no_accesses_p (repr))
4450 {
4451 proceed = true;
4452 sra_stats.deleted_unused_parameters++;
4453 }
4454 adjustments_count++;
4455 }
4456 }
4457
4458 if (!proceed && dump_file)
4459 fprintf (dump_file, "NOT proceeding to change params.\n");
4460
4461 if (proceed)
4462 adjustments = turn_representatives_into_adjustments (representatives,
4463 adjustments_count);
4464 else
4465 adjustments = ipa_parm_adjustment_vec ();
4466
4467 representatives.release ();
4468 return adjustments;
4469 }
4470
4471 /* If a parameter replacement identified by ADJ does not yet exist in the form
4472 of declaration, create it and record it, otherwise return the previously
4473 created one. */
4474
4475 static tree
4476 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4477 {
4478 tree repl;
4479 if (!adj->new_ssa_base)
4480 {
4481 char *pretty_name = make_fancy_name (adj->base);
4482
4483 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4484 DECL_NAME (repl) = get_identifier (pretty_name);
4485 obstack_free (&name_obstack, pretty_name);
4486
4487 adj->new_ssa_base = repl;
4488 }
4489 else
4490 repl = adj->new_ssa_base;
4491 return repl;
4492 }
4493
4494 /* Find the first adjustment for a particular parameter BASE in a vector of
4495 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4496 adjustment. */
4497
4498 static struct ipa_parm_adjustment *
4499 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4500 {
4501 int i, len;
4502
4503 len = adjustments.length ();
4504 for (i = 0; i < len; i++)
4505 {
4506 struct ipa_parm_adjustment *adj;
4507
4508 adj = &adjustments[i];
4509 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4510 return adj;
4511 }
4512
4513 return NULL;
4514 }
4515
4516 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4517 removed because its value is not used, replace the SSA_NAME with a one
4518 relating to a created VAR_DECL together all of its uses and return true.
4519 ADJUSTMENTS is a pointer to an adjustments vector. */
4520
4521 static bool
4522 replace_removed_params_ssa_names (gimple stmt,
4523 ipa_parm_adjustment_vec adjustments)
4524 {
4525 struct ipa_parm_adjustment *adj;
4526 tree lhs, decl, repl, name;
4527
4528 if (gimple_code (stmt) == GIMPLE_PHI)
4529 lhs = gimple_phi_result (stmt);
4530 else if (is_gimple_assign (stmt))
4531 lhs = gimple_assign_lhs (stmt);
4532 else if (is_gimple_call (stmt))
4533 lhs = gimple_call_lhs (stmt);
4534 else
4535 gcc_unreachable ();
4536
4537 if (TREE_CODE (lhs) != SSA_NAME)
4538 return false;
4539
4540 decl = SSA_NAME_VAR (lhs);
4541 if (decl == NULL_TREE
4542 || TREE_CODE (decl) != PARM_DECL)
4543 return false;
4544
4545 adj = get_adjustment_for_base (adjustments, decl);
4546 if (!adj)
4547 return false;
4548
4549 repl = get_replaced_param_substitute (adj);
4550 name = make_ssa_name (repl, stmt);
4551
4552 if (dump_file)
4553 {
4554 fprintf (dump_file, "replacing an SSA name of a removed param ");
4555 print_generic_expr (dump_file, lhs, 0);
4556 fprintf (dump_file, " with ");
4557 print_generic_expr (dump_file, name, 0);
4558 fprintf (dump_file, "\n");
4559 }
4560
4561 if (is_gimple_assign (stmt))
4562 gimple_assign_set_lhs (stmt, name);
4563 else if (is_gimple_call (stmt))
4564 gimple_call_set_lhs (stmt, name);
4565 else
4566 gimple_phi_set_result (stmt, name);
4567
4568 replace_uses_by (lhs, name);
4569 release_ssa_name (lhs);
4570 return true;
4571 }
4572
4573 /* If the statement STMT contains any expressions that need to replaced with a
4574 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4575 incompatibilities (GSI is used to accommodate conversion statements and must
4576 point to the statement). Return true iff the statement was modified. */
4577
4578 static bool
4579 sra_ipa_modify_assign (gimple stmt, gimple_stmt_iterator *gsi,
4580 ipa_parm_adjustment_vec adjustments)
4581 {
4582 tree *lhs_p, *rhs_p;
4583 bool any;
4584
4585 if (!gimple_assign_single_p (stmt))
4586 return false;
4587
4588 rhs_p = gimple_assign_rhs1_ptr (stmt);
4589 lhs_p = gimple_assign_lhs_ptr (stmt);
4590
4591 any = ipa_modify_expr (rhs_p, false, adjustments);
4592 any |= ipa_modify_expr (lhs_p, false, adjustments);
4593 if (any)
4594 {
4595 tree new_rhs = NULL_TREE;
4596
4597 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4598 {
4599 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4600 {
4601 /* V_C_Es of constructors can cause trouble (PR 42714). */
4602 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4603 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4604 else
4605 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4606 NULL);
4607 }
4608 else
4609 new_rhs = fold_build1_loc (gimple_location (stmt),
4610 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4611 *rhs_p);
4612 }
4613 else if (REFERENCE_CLASS_P (*rhs_p)
4614 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4615 && !is_gimple_reg (*lhs_p))
4616 /* This can happen when an assignment in between two single field
4617 structures is turned into an assignment in between two pointers to
4618 scalars (PR 42237). */
4619 new_rhs = *rhs_p;
4620
4621 if (new_rhs)
4622 {
4623 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4624 true, GSI_SAME_STMT);
4625
4626 gimple_assign_set_rhs_from_tree (gsi, tmp);
4627 }
4628
4629 return true;
4630 }
4631
4632 return false;
4633 }
4634
4635 /* Traverse the function body and all modifications as described in
4636 ADJUSTMENTS. Return true iff the CFG has been changed. */
4637
4638 bool
4639 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4640 {
4641 bool cfg_changed = false;
4642 basic_block bb;
4643
4644 FOR_EACH_BB_FN (bb, cfun)
4645 {
4646 gimple_stmt_iterator gsi;
4647
4648 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4649 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4650
4651 gsi = gsi_start_bb (bb);
4652 while (!gsi_end_p (gsi))
4653 {
4654 gimple stmt = gsi_stmt (gsi);
4655 bool modified = false;
4656 tree *t;
4657 unsigned i;
4658
4659 switch (gimple_code (stmt))
4660 {
4661 case GIMPLE_RETURN:
4662 t = gimple_return_retval_ptr (stmt);
4663 if (*t != NULL_TREE)
4664 modified |= ipa_modify_expr (t, true, adjustments);
4665 break;
4666
4667 case GIMPLE_ASSIGN:
4668 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4669 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4670 break;
4671
4672 case GIMPLE_CALL:
4673 /* Operands must be processed before the lhs. */
4674 for (i = 0; i < gimple_call_num_args (stmt); i++)
4675 {
4676 t = gimple_call_arg_ptr (stmt, i);
4677 modified |= ipa_modify_expr (t, true, adjustments);
4678 }
4679
4680 if (gimple_call_lhs (stmt))
4681 {
4682 t = gimple_call_lhs_ptr (stmt);
4683 modified |= ipa_modify_expr (t, false, adjustments);
4684 modified |= replace_removed_params_ssa_names (stmt,
4685 adjustments);
4686 }
4687 break;
4688
4689 case GIMPLE_ASM:
4690 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4691 {
4692 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4693 modified |= ipa_modify_expr (t, true, adjustments);
4694 }
4695 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4696 {
4697 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4698 modified |= ipa_modify_expr (t, false, adjustments);
4699 }
4700 break;
4701
4702 default:
4703 break;
4704 }
4705
4706 if (modified)
4707 {
4708 update_stmt (stmt);
4709 if (maybe_clean_eh_stmt (stmt)
4710 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4711 cfg_changed = true;
4712 }
4713 gsi_next (&gsi);
4714 }
4715 }
4716
4717 return cfg_changed;
4718 }
4719
4720 /* Call gimple_debug_bind_reset_value on all debug statements describing
4721 gimple register parameters that are being removed or replaced. */
4722
4723 static void
4724 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4725 {
4726 int i, len;
4727 gimple_stmt_iterator *gsip = NULL, gsi;
4728
4729 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4730 {
4731 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4732 gsip = &gsi;
4733 }
4734 len = adjustments.length ();
4735 for (i = 0; i < len; i++)
4736 {
4737 struct ipa_parm_adjustment *adj;
4738 imm_use_iterator ui;
4739 gimple stmt, def_temp;
4740 tree name, vexpr, copy = NULL_TREE;
4741 use_operand_p use_p;
4742
4743 adj = &adjustments[i];
4744 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4745 continue;
4746 name = ssa_default_def (cfun, adj->base);
4747 vexpr = NULL;
4748 if (name)
4749 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4750 {
4751 if (gimple_clobber_p (stmt))
4752 {
4753 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4754 unlink_stmt_vdef (stmt);
4755 gsi_remove (&cgsi, true);
4756 release_defs (stmt);
4757 continue;
4758 }
4759 /* All other users must have been removed by
4760 ipa_sra_modify_function_body. */
4761 gcc_assert (is_gimple_debug (stmt));
4762 if (vexpr == NULL && gsip != NULL)
4763 {
4764 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4765 vexpr = make_node (DEBUG_EXPR_DECL);
4766 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4767 NULL);
4768 DECL_ARTIFICIAL (vexpr) = 1;
4769 TREE_TYPE (vexpr) = TREE_TYPE (name);
4770 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4771 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4772 }
4773 if (vexpr)
4774 {
4775 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4776 SET_USE (use_p, vexpr);
4777 }
4778 else
4779 gimple_debug_bind_reset_value (stmt);
4780 update_stmt (stmt);
4781 }
4782 /* Create a VAR_DECL for debug info purposes. */
4783 if (!DECL_IGNORED_P (adj->base))
4784 {
4785 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4786 VAR_DECL, DECL_NAME (adj->base),
4787 TREE_TYPE (adj->base));
4788 if (DECL_PT_UID_SET_P (adj->base))
4789 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4790 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4791 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4792 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4793 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4794 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4795 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4796 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4797 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4798 SET_DECL_RTL (copy, 0);
4799 TREE_USED (copy) = 1;
4800 DECL_CONTEXT (copy) = current_function_decl;
4801 add_local_decl (cfun, copy);
4802 DECL_CHAIN (copy) =
4803 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4804 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4805 }
4806 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4807 {
4808 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4809 if (vexpr)
4810 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4811 else
4812 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4813 NULL);
4814 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4815 }
4816 }
4817 }
4818
4819 /* Return false if all callers have at least as many actual arguments as there
4820 are formal parameters in the current function and that their types
4821 match. */
4822
4823 static bool
4824 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
4825 void *data ATTRIBUTE_UNUSED)
4826 {
4827 struct cgraph_edge *cs;
4828 for (cs = node->callers; cs; cs = cs->next_caller)
4829 if (!callsite_arguments_match_p (cs->call_stmt))
4830 return true;
4831
4832 return false;
4833 }
4834
4835 /* Convert all callers of NODE. */
4836
4837 static bool
4838 convert_callers_for_node (struct cgraph_node *node,
4839 void *data)
4840 {
4841 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4842 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4843 struct cgraph_edge *cs;
4844
4845 for (cs = node->callers; cs; cs = cs->next_caller)
4846 {
4847 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4848
4849 if (dump_file)
4850 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
4851 xstrdup (cs->caller->name ()),
4852 cs->caller->order,
4853 xstrdup (cs->callee->name ()),
4854 cs->callee->order);
4855
4856 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4857
4858 pop_cfun ();
4859 }
4860
4861 for (cs = node->callers; cs; cs = cs->next_caller)
4862 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4863 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4864 compute_inline_parameters (cs->caller, true);
4865 BITMAP_FREE (recomputed_callers);
4866
4867 return true;
4868 }
4869
4870 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4871
4872 static void
4873 convert_callers (struct cgraph_node *node, tree old_decl,
4874 ipa_parm_adjustment_vec adjustments)
4875 {
4876 basic_block this_block;
4877
4878 node->call_for_symbol_thunks_and_aliases (convert_callers_for_node,
4879 &adjustments, false);
4880
4881 if (!encountered_recursive_call)
4882 return;
4883
4884 FOR_EACH_BB_FN (this_block, cfun)
4885 {
4886 gimple_stmt_iterator gsi;
4887
4888 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4889 {
4890 gimple stmt = gsi_stmt (gsi);
4891 tree call_fndecl;
4892 if (gimple_code (stmt) != GIMPLE_CALL)
4893 continue;
4894 call_fndecl = gimple_call_fndecl (stmt);
4895 if (call_fndecl == old_decl)
4896 {
4897 if (dump_file)
4898 fprintf (dump_file, "Adjusting recursive call");
4899 gimple_call_set_fndecl (stmt, node->decl);
4900 ipa_modify_call_arguments (NULL, stmt, adjustments);
4901 }
4902 }
4903 }
4904
4905 return;
4906 }
4907
4908 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4909 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4910
4911 static bool
4912 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4913 {
4914 struct cgraph_node *new_node;
4915 bool cfg_changed;
4916
4917 cgraph_edge::rebuild_edges ();
4918 free_dominance_info (CDI_DOMINATORS);
4919 pop_cfun ();
4920
4921 /* This must be done after rebuilding cgraph edges for node above.
4922 Otherwise any recursive calls to node that are recorded in
4923 redirect_callers will be corrupted. */
4924 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
4925 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
4926 NULL, false, NULL, NULL,
4927 "isra");
4928 redirect_callers.release ();
4929
4930 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4931 ipa_modify_formal_parameters (current_function_decl, adjustments);
4932 cfg_changed = ipa_sra_modify_function_body (adjustments);
4933 sra_ipa_reset_debug_stmts (adjustments);
4934 convert_callers (new_node, node->decl, adjustments);
4935 new_node->make_local ();
4936 return cfg_changed;
4937 }
4938
4939 /* If NODE has a caller, return true. */
4940
4941 static bool
4942 has_caller_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4943 {
4944 if (node->callers)
4945 return true;
4946 return false;
4947 }
4948
4949 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4950 attributes, return true otherwise. NODE is the cgraph node of the current
4951 function. */
4952
4953 static bool
4954 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4955 {
4956 if (!node->can_be_local_p ())
4957 {
4958 if (dump_file)
4959 fprintf (dump_file, "Function not local to this compilation unit.\n");
4960 return false;
4961 }
4962
4963 if (!node->local.can_change_signature)
4964 {
4965 if (dump_file)
4966 fprintf (dump_file, "Function can not change signature.\n");
4967 return false;
4968 }
4969
4970 if (!tree_versionable_function_p (node->decl))
4971 {
4972 if (dump_file)
4973 fprintf (dump_file, "Function is not versionable.\n");
4974 return false;
4975 }
4976
4977 if (!opt_for_fn (node->decl, optimize)
4978 || !opt_for_fn (node->decl, flag_ipa_sra))
4979 {
4980 if (dump_file)
4981 fprintf (dump_file, "Function not optimized.\n");
4982 return false;
4983 }
4984
4985 if (DECL_VIRTUAL_P (current_function_decl))
4986 {
4987 if (dump_file)
4988 fprintf (dump_file, "Function is a virtual method.\n");
4989 return false;
4990 }
4991
4992 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4993 && inline_summary (node)->size >= MAX_INLINE_INSNS_AUTO)
4994 {
4995 if (dump_file)
4996 fprintf (dump_file, "Function too big to be made truly local.\n");
4997 return false;
4998 }
4999
5000 if (!node->call_for_symbol_thunks_and_aliases (has_caller_p, NULL, true))
5001 {
5002 if (dump_file)
5003 fprintf (dump_file,
5004 "Function has no callers in this compilation unit.\n");
5005 return false;
5006 }
5007
5008 if (cfun->stdarg)
5009 {
5010 if (dump_file)
5011 fprintf (dump_file, "Function uses stdarg. \n");
5012 return false;
5013 }
5014
5015 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5016 return false;
5017
5018 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5019 {
5020 if (dump_file)
5021 fprintf (dump_file, "Always inline function will be inlined "
5022 "anyway. \n");
5023 return false;
5024 }
5025
5026 return true;
5027 }
5028
5029 /* Perform early interprocedural SRA. */
5030
5031 static unsigned int
5032 ipa_early_sra (void)
5033 {
5034 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5035 ipa_parm_adjustment_vec adjustments;
5036 int ret = 0;
5037
5038 if (!ipa_sra_preliminary_function_checks (node))
5039 return 0;
5040
5041 sra_initialize ();
5042 sra_mode = SRA_MODE_EARLY_IPA;
5043
5044 if (!find_param_candidates ())
5045 {
5046 if (dump_file)
5047 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5048 goto simple_out;
5049 }
5050
5051 if (node->call_for_symbol_thunks_and_aliases
5052 (some_callers_have_mismatched_arguments_p, NULL, true))
5053 {
5054 if (dump_file)
5055 fprintf (dump_file, "There are callers with insufficient number of "
5056 "arguments or arguments with type mismatches.\n");
5057 goto simple_out;
5058 }
5059
5060 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5061 func_param_count
5062 * last_basic_block_for_fn (cfun));
5063 final_bbs = BITMAP_ALLOC (NULL);
5064
5065 scan_function ();
5066 if (encountered_apply_args)
5067 {
5068 if (dump_file)
5069 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5070 goto out;
5071 }
5072
5073 if (encountered_unchangable_recursive_call)
5074 {
5075 if (dump_file)
5076 fprintf (dump_file, "Function calls itself with insufficient "
5077 "number of arguments.\n");
5078 goto out;
5079 }
5080
5081 adjustments = analyze_all_param_acesses ();
5082 if (!adjustments.exists ())
5083 goto out;
5084 if (dump_file)
5085 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5086
5087 if (modify_function (node, adjustments))
5088 ret = TODO_update_ssa | TODO_cleanup_cfg;
5089 else
5090 ret = TODO_update_ssa;
5091 adjustments.release ();
5092
5093 statistics_counter_event (cfun, "Unused parameters deleted",
5094 sra_stats.deleted_unused_parameters);
5095 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5096 sra_stats.scalar_by_ref_to_by_val);
5097 statistics_counter_event (cfun, "Aggregate parameters broken up",
5098 sra_stats.aggregate_params_reduced);
5099 statistics_counter_event (cfun, "Aggregate parameter components created",
5100 sra_stats.param_reductions_created);
5101
5102 out:
5103 BITMAP_FREE (final_bbs);
5104 free (bb_dereferences);
5105 simple_out:
5106 sra_deinitialize ();
5107 return ret;
5108 }
5109
5110 namespace {
5111
5112 const pass_data pass_data_early_ipa_sra =
5113 {
5114 GIMPLE_PASS, /* type */
5115 "eipa_sra", /* name */
5116 OPTGROUP_NONE, /* optinfo_flags */
5117 TV_IPA_SRA, /* tv_id */
5118 0, /* properties_required */
5119 0, /* properties_provided */
5120 0, /* properties_destroyed */
5121 0, /* todo_flags_start */
5122 TODO_dump_symtab, /* todo_flags_finish */
5123 };
5124
5125 class pass_early_ipa_sra : public gimple_opt_pass
5126 {
5127 public:
5128 pass_early_ipa_sra (gcc::context *ctxt)
5129 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5130 {}
5131
5132 /* opt_pass methods: */
5133 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5134 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5135
5136 }; // class pass_early_ipa_sra
5137
5138 } // anon namespace
5139
5140 gimple_opt_pass *
5141 make_pass_early_ipa_sra (gcc::context *ctxt)
5142 {
5143 return new pass_early_ipa_sra (ctxt);
5144 }