sra: Bail out when encountering accesses with negative offsets (PR 96730)
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2020 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "dbgcnt.h"
100 #include "builtins.h"
101 #include "tree-sra.h"
102
103
104 /* Enumeration of all aggregate reductions we can do. */
105 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
106 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
107 SRA_MODE_INTRA }; /* late intraprocedural SRA */
108
109 /* Global variable describing which aggregate reduction we are performing at
110 the moment. */
111 static enum sra_mode sra_mode;
112
113 struct assign_link;
114
115 /* ACCESS represents each access to an aggregate variable (as a whole or a
116 part). It can also represent a group of accesses that refer to exactly the
117 same fragment of an aggregate (i.e. those that have exactly the same offset
118 and size). Such representatives for a single aggregate, once determined,
119 are linked in a linked list and have the group fields set.
120
121 Moreover, when doing intraprocedural SRA, a tree is built from those
122 representatives (by the means of first_child and next_sibling pointers), in
123 which all items in a subtree are "within" the root, i.e. their offset is
124 greater or equal to offset of the root and offset+size is smaller or equal
125 to offset+size of the root. Children of an access are sorted by offset.
126
127 Note that accesses to parts of vector and complex number types always
128 represented by an access to the whole complex number or a vector. It is a
129 duty of the modifying functions to replace them appropriately. */
130
131 struct access
132 {
133 /* Values returned by `get_ref_base_and_extent' for each component reference
134 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
135 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
136 HOST_WIDE_INT offset;
137 HOST_WIDE_INT size;
138 tree base;
139
140 /* Expression. It is context dependent so do not use it to create new
141 expressions to access the original aggregate. See PR 42154 for a
142 testcase. */
143 tree expr;
144 /* Type. */
145 tree type;
146
147 /* The statement this access belongs to. */
148 gimple *stmt;
149
150 /* Next group representative for this aggregate. */
151 struct access *next_grp;
152
153 /* Pointer to the group representative. Pointer to itself if the struct is
154 the representative. */
155 struct access *group_representative;
156
157 /* After access tree has been constructed, this points to the parent of the
158 current access, if there is one. NULL for roots. */
159 struct access *parent;
160
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access *first_child;
164
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
166 described above. */
167 struct access *next_sibling;
168
169 /* Pointers to the first and last element in the linked list of assign
170 links for propagation from LHS to RHS. */
171 struct assign_link *first_rhs_link, *last_rhs_link;
172
173 /* Pointers to the first and last element in the linked list of assign
174 links for propagation from LHS to RHS. */
175 struct assign_link *first_lhs_link, *last_lhs_link;
176
177 /* Pointer to the next access in the work queues. */
178 struct access *next_rhs_queued, *next_lhs_queued;
179
180 /* Replacement variable for this access "region." Never to be accessed
181 directly, always only by the means of get_access_replacement() and only
182 when grp_to_be_replaced flag is set. */
183 tree replacement_decl;
184
185 /* Is this access made in reverse storage order? */
186 unsigned reverse : 1;
187
188 /* Is this particular access write access? */
189 unsigned write : 1;
190
191 /* Is this access currently in the rhs work queue? */
192 unsigned grp_rhs_queued : 1;
193
194 /* Is this access currently in the lhs work queue? */
195 unsigned grp_lhs_queued : 1;
196
197 /* Does this group contain a write access? This flag is propagated down the
198 access tree. */
199 unsigned grp_write : 1;
200
201 /* Does this group contain a read access? This flag is propagated down the
202 access tree. */
203 unsigned grp_read : 1;
204
205 /* Does this group contain a read access that comes from an assignment
206 statement? This flag is propagated down the access tree. */
207 unsigned grp_assignment_read : 1;
208
209 /* Does this group contain a write access that comes from an assignment
210 statement? This flag is propagated down the access tree. */
211 unsigned grp_assignment_write : 1;
212
213 /* Does this group contain a read access through a scalar type? This flag is
214 not propagated in the access tree in any direction. */
215 unsigned grp_scalar_read : 1;
216
217 /* Does this group contain a write access through a scalar type? This flag
218 is not propagated in the access tree in any direction. */
219 unsigned grp_scalar_write : 1;
220
221 /* In a root of an access tree, true means that the entire tree should be
222 totally scalarized - that all scalar leafs should be scalarized and
223 non-root grp_total_scalarization accesses should be honored. Otherwise,
224 non-root accesses with grp_total_scalarization should never get scalar
225 replacements. */
226 unsigned grp_total_scalarization : 1;
227
228 /* Other passes of the analysis use this bit to make function
229 analyze_access_subtree create scalar replacements for this group if
230 possible. */
231 unsigned grp_hint : 1;
232
233 /* Is the subtree rooted in this access fully covered by scalar
234 replacements? */
235 unsigned grp_covered : 1;
236
237 /* If set to true, this access and all below it in an access tree must not be
238 scalarized. */
239 unsigned grp_unscalarizable_region : 1;
240
241 /* Whether data have been written to parts of the aggregate covered by this
242 access which is not to be scalarized. This flag is propagated up in the
243 access tree. */
244 unsigned grp_unscalarized_data : 1;
245
246 /* Set if all accesses in the group consist of the same chain of
247 COMPONENT_REFs and ARRAY_REFs. */
248 unsigned grp_same_access_path : 1;
249
250 /* Does this access and/or group contain a write access through a
251 BIT_FIELD_REF? */
252 unsigned grp_partial_lhs : 1;
253
254 /* Set when a scalar replacement should be created for this variable. */
255 unsigned grp_to_be_replaced : 1;
256
257 /* Set when we want a replacement for the sole purpose of having it in
258 generated debug statements. */
259 unsigned grp_to_be_debug_replaced : 1;
260
261 /* Should TREE_NO_WARNING of a replacement be set? */
262 unsigned grp_no_warning : 1;
263 };
264
265 typedef struct access *access_p;
266
267
268 /* Alloc pool for allocating access structures. */
269 static object_allocator<struct access> access_pool ("SRA accesses");
270
271 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
272 are used to propagate subaccesses from rhs to lhs and vice versa as long as
273 they don't conflict with what is already there. In the RHS->LHS direction,
274 we also propagate grp_write flag to lazily mark that the access contains any
275 meaningful data. */
276 struct assign_link
277 {
278 struct access *lacc, *racc;
279 struct assign_link *next_rhs, *next_lhs;
280 };
281
282 /* Alloc pool for allocating assign link structures. */
283 static object_allocator<assign_link> assign_link_pool ("SRA links");
284
285 /* Base (tree) -> Vector (vec<access_p> *) map. */
286 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
287
288 /* Hash to limit creation of artificial accesses */
289 static hash_map<tree, unsigned> *propagation_budget;
290
291 /* Candidate hash table helpers. */
292
293 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
294 {
295 static inline hashval_t hash (const tree_node *);
296 static inline bool equal (const tree_node *, const tree_node *);
297 };
298
299 /* Hash a tree in a uid_decl_map. */
300
301 inline hashval_t
302 uid_decl_hasher::hash (const tree_node *item)
303 {
304 return item->decl_minimal.uid;
305 }
306
307 /* Return true if the DECL_UID in both trees are equal. */
308
309 inline bool
310 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
311 {
312 return (a->decl_minimal.uid == b->decl_minimal.uid);
313 }
314
315 /* Set of candidates. */
316 static bitmap candidate_bitmap;
317 static hash_table<uid_decl_hasher> *candidates;
318
319 /* For a candidate UID return the candidates decl. */
320
321 static inline tree
322 candidate (unsigned uid)
323 {
324 tree_node t;
325 t.decl_minimal.uid = uid;
326 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
327 }
328
329 /* Bitmap of candidates which we should try to entirely scalarize away and
330 those which cannot be (because they are and need be used as a whole). */
331 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
332
333 /* Bitmap of candidates in the constant pool, which cannot be scalarized
334 because this would produce non-constant expressions (e.g. Ada). */
335 static bitmap disqualified_constants;
336
337 /* Obstack for creation of fancy names. */
338 static struct obstack name_obstack;
339
340 /* Head of a linked list of accesses that need to have its subaccesses
341 propagated to their assignment counterparts. */
342 static struct access *rhs_work_queue_head, *lhs_work_queue_head;
343
344 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
345 representative fields are dumped, otherwise those which only describe the
346 individual access are. */
347
348 static struct
349 {
350 /* Number of processed aggregates is readily available in
351 analyze_all_variable_accesses and so is not stored here. */
352
353 /* Number of created scalar replacements. */
354 int replacements;
355
356 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
357 expression. */
358 int exprs;
359
360 /* Number of statements created by generate_subtree_copies. */
361 int subtree_copies;
362
363 /* Number of statements created by load_assign_lhs_subreplacements. */
364 int subreplacements;
365
366 /* Number of times sra_modify_assign has deleted a statement. */
367 int deleted;
368
369 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
370 RHS reparately due to type conversions or nonexistent matching
371 references. */
372 int separate_lhs_rhs_handling;
373
374 /* Number of parameters that were removed because they were unused. */
375 int deleted_unused_parameters;
376
377 /* Number of scalars passed as parameters by reference that have been
378 converted to be passed by value. */
379 int scalar_by_ref_to_by_val;
380
381 /* Number of aggregate parameters that were replaced by one or more of their
382 components. */
383 int aggregate_params_reduced;
384
385 /* Numbber of components created when splitting aggregate parameters. */
386 int param_reductions_created;
387 } sra_stats;
388
389 static void
390 dump_access (FILE *f, struct access *access, bool grp)
391 {
392 fprintf (f, "access { ");
393 fprintf (f, "base = (%d)'", DECL_UID (access->base));
394 print_generic_expr (f, access->base);
395 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
396 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
397 fprintf (f, ", expr = ");
398 print_generic_expr (f, access->expr);
399 fprintf (f, ", type = ");
400 print_generic_expr (f, access->type);
401 fprintf (f, ", reverse = %d", access->reverse);
402 if (grp)
403 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
404 "grp_assignment_write = %d, grp_scalar_read = %d, "
405 "grp_scalar_write = %d, grp_total_scalarization = %d, "
406 "grp_hint = %d, grp_covered = %d, "
407 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
408 "grp_same_access_path = %d, grp_partial_lhs = %d, "
409 "grp_to_be_replaced = %d, grp_to_be_debug_replaced = %d}\n",
410 access->grp_read, access->grp_write, access->grp_assignment_read,
411 access->grp_assignment_write, access->grp_scalar_read,
412 access->grp_scalar_write, access->grp_total_scalarization,
413 access->grp_hint, access->grp_covered,
414 access->grp_unscalarizable_region, access->grp_unscalarized_data,
415 access->grp_same_access_path, access->grp_partial_lhs,
416 access->grp_to_be_replaced, access->grp_to_be_debug_replaced);
417 else
418 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
419 "grp_partial_lhs = %d}\n",
420 access->write, access->grp_total_scalarization,
421 access->grp_partial_lhs);
422 }
423
424 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
425
426 static void
427 dump_access_tree_1 (FILE *f, struct access *access, int level)
428 {
429 do
430 {
431 int i;
432
433 for (i = 0; i < level; i++)
434 fputs ("* ", f);
435
436 dump_access (f, access, true);
437
438 if (access->first_child)
439 dump_access_tree_1 (f, access->first_child, level + 1);
440
441 access = access->next_sibling;
442 }
443 while (access);
444 }
445
446 /* Dump all access trees for a variable, given the pointer to the first root in
447 ACCESS. */
448
449 static void
450 dump_access_tree (FILE *f, struct access *access)
451 {
452 for (; access; access = access->next_grp)
453 dump_access_tree_1 (f, access, 0);
454 }
455
456 /* Return true iff ACC is non-NULL and has subaccesses. */
457
458 static inline bool
459 access_has_children_p (struct access *acc)
460 {
461 return acc && acc->first_child;
462 }
463
464 /* Return true iff ACC is (partly) covered by at least one replacement. */
465
466 static bool
467 access_has_replacements_p (struct access *acc)
468 {
469 struct access *child;
470 if (acc->grp_to_be_replaced)
471 return true;
472 for (child = acc->first_child; child; child = child->next_sibling)
473 if (access_has_replacements_p (child))
474 return true;
475 return false;
476 }
477
478 /* Return a vector of pointers to accesses for the variable given in BASE or
479 NULL if there is none. */
480
481 static vec<access_p> *
482 get_base_access_vector (tree base)
483 {
484 return base_access_vec->get (base);
485 }
486
487 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
488 in ACCESS. Return NULL if it cannot be found. */
489
490 static struct access *
491 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
492 HOST_WIDE_INT size)
493 {
494 while (access && (access->offset != offset || access->size != size))
495 {
496 struct access *child = access->first_child;
497
498 while (child && (child->offset + child->size <= offset))
499 child = child->next_sibling;
500 access = child;
501 }
502
503 /* Total scalarization does not replace single field structures with their
504 single field but rather creates an access for them underneath. Look for
505 it. */
506 if (access)
507 while (access->first_child
508 && access->first_child->offset == offset
509 && access->first_child->size == size)
510 access = access->first_child;
511
512 return access;
513 }
514
515 /* Return the first group representative for DECL or NULL if none exists. */
516
517 static struct access *
518 get_first_repr_for_decl (tree base)
519 {
520 vec<access_p> *access_vec;
521
522 access_vec = get_base_access_vector (base);
523 if (!access_vec)
524 return NULL;
525
526 return (*access_vec)[0];
527 }
528
529 /* Find an access representative for the variable BASE and given OFFSET and
530 SIZE. Requires that access trees have already been built. Return NULL if
531 it cannot be found. */
532
533 static struct access *
534 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
535 HOST_WIDE_INT size)
536 {
537 struct access *access;
538
539 access = get_first_repr_for_decl (base);
540 while (access && (access->offset + access->size <= offset))
541 access = access->next_grp;
542 if (!access)
543 return NULL;
544
545 return find_access_in_subtree (access, offset, size);
546 }
547
548 /* Add LINK to the linked list of assign links of RACC. */
549
550 static void
551 add_link_to_rhs (struct access *racc, struct assign_link *link)
552 {
553 gcc_assert (link->racc == racc);
554
555 if (!racc->first_rhs_link)
556 {
557 gcc_assert (!racc->last_rhs_link);
558 racc->first_rhs_link = link;
559 }
560 else
561 racc->last_rhs_link->next_rhs = link;
562
563 racc->last_rhs_link = link;
564 link->next_rhs = NULL;
565 }
566
567 /* Add LINK to the linked list of lhs assign links of LACC. */
568
569 static void
570 add_link_to_lhs (struct access *lacc, struct assign_link *link)
571 {
572 gcc_assert (link->lacc == lacc);
573
574 if (!lacc->first_lhs_link)
575 {
576 gcc_assert (!lacc->last_lhs_link);
577 lacc->first_lhs_link = link;
578 }
579 else
580 lacc->last_lhs_link->next_lhs = link;
581
582 lacc->last_lhs_link = link;
583 link->next_lhs = NULL;
584 }
585
586 /* Move all link structures in their linked list in OLD_ACC to the linked list
587 in NEW_ACC. */
588 static void
589 relink_to_new_repr (struct access *new_acc, struct access *old_acc)
590 {
591 if (old_acc->first_rhs_link)
592 {
593
594 if (new_acc->first_rhs_link)
595 {
596 gcc_assert (!new_acc->last_rhs_link->next_rhs);
597 gcc_assert (!old_acc->last_rhs_link
598 || !old_acc->last_rhs_link->next_rhs);
599
600 new_acc->last_rhs_link->next_rhs = old_acc->first_rhs_link;
601 new_acc->last_rhs_link = old_acc->last_rhs_link;
602 }
603 else
604 {
605 gcc_assert (!new_acc->last_rhs_link);
606
607 new_acc->first_rhs_link = old_acc->first_rhs_link;
608 new_acc->last_rhs_link = old_acc->last_rhs_link;
609 }
610 old_acc->first_rhs_link = old_acc->last_rhs_link = NULL;
611 }
612 else
613 gcc_assert (!old_acc->last_rhs_link);
614
615 if (old_acc->first_lhs_link)
616 {
617
618 if (new_acc->first_lhs_link)
619 {
620 gcc_assert (!new_acc->last_lhs_link->next_lhs);
621 gcc_assert (!old_acc->last_lhs_link
622 || !old_acc->last_lhs_link->next_lhs);
623
624 new_acc->last_lhs_link->next_lhs = old_acc->first_lhs_link;
625 new_acc->last_lhs_link = old_acc->last_lhs_link;
626 }
627 else
628 {
629 gcc_assert (!new_acc->last_lhs_link);
630
631 new_acc->first_lhs_link = old_acc->first_lhs_link;
632 new_acc->last_lhs_link = old_acc->last_lhs_link;
633 }
634 old_acc->first_lhs_link = old_acc->last_lhs_link = NULL;
635 }
636 else
637 gcc_assert (!old_acc->last_lhs_link);
638
639 }
640
641 /* Add ACCESS to the work to queue for propagation of subaccesses from RHS to
642 LHS (which is actually a stack). */
643
644 static void
645 add_access_to_rhs_work_queue (struct access *access)
646 {
647 if (access->first_rhs_link && !access->grp_rhs_queued)
648 {
649 gcc_assert (!access->next_rhs_queued);
650 access->next_rhs_queued = rhs_work_queue_head;
651 access->grp_rhs_queued = 1;
652 rhs_work_queue_head = access;
653 }
654 }
655
656 /* Add ACCESS to the work to queue for propagation of subaccesses from LHS to
657 RHS (which is actually a stack). */
658
659 static void
660 add_access_to_lhs_work_queue (struct access *access)
661 {
662 if (access->first_lhs_link && !access->grp_lhs_queued)
663 {
664 gcc_assert (!access->next_lhs_queued);
665 access->next_lhs_queued = lhs_work_queue_head;
666 access->grp_lhs_queued = 1;
667 lhs_work_queue_head = access;
668 }
669 }
670
671 /* Pop an access from the work queue for propagating from RHS to LHS, and
672 return it, assuming there is one. */
673
674 static struct access *
675 pop_access_from_rhs_work_queue (void)
676 {
677 struct access *access = rhs_work_queue_head;
678
679 rhs_work_queue_head = access->next_rhs_queued;
680 access->next_rhs_queued = NULL;
681 access->grp_rhs_queued = 0;
682 return access;
683 }
684
685 /* Pop an access from the work queue for propagating from LHS to RHS, and
686 return it, assuming there is one. */
687
688 static struct access *
689 pop_access_from_lhs_work_queue (void)
690 {
691 struct access *access = lhs_work_queue_head;
692
693 lhs_work_queue_head = access->next_lhs_queued;
694 access->next_lhs_queued = NULL;
695 access->grp_lhs_queued = 0;
696 return access;
697 }
698
699 /* Allocate necessary structures. */
700
701 static void
702 sra_initialize (void)
703 {
704 candidate_bitmap = BITMAP_ALLOC (NULL);
705 candidates = new hash_table<uid_decl_hasher>
706 (vec_safe_length (cfun->local_decls) / 2);
707 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
708 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
709 disqualified_constants = BITMAP_ALLOC (NULL);
710 gcc_obstack_init (&name_obstack);
711 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
712 memset (&sra_stats, 0, sizeof (sra_stats));
713 }
714
715 /* Deallocate all general structures. */
716
717 static void
718 sra_deinitialize (void)
719 {
720 BITMAP_FREE (candidate_bitmap);
721 delete candidates;
722 candidates = NULL;
723 BITMAP_FREE (should_scalarize_away_bitmap);
724 BITMAP_FREE (cannot_scalarize_away_bitmap);
725 BITMAP_FREE (disqualified_constants);
726 access_pool.release ();
727 assign_link_pool.release ();
728 obstack_free (&name_obstack, NULL);
729
730 delete base_access_vec;
731 }
732
733 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
734
735 static bool constant_decl_p (tree decl)
736 {
737 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
738 }
739
740 /* Remove DECL from candidates for SRA and write REASON to the dump file if
741 there is one. */
742
743 static void
744 disqualify_candidate (tree decl, const char *reason)
745 {
746 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
747 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
748 if (constant_decl_p (decl))
749 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
750
751 if (dump_file && (dump_flags & TDF_DETAILS))
752 {
753 fprintf (dump_file, "! Disqualifying ");
754 print_generic_expr (dump_file, decl);
755 fprintf (dump_file, " - %s\n", reason);
756 }
757 }
758
759 /* Return true iff the type contains a field or an element which does not allow
760 scalarization. Use VISITED_TYPES to avoid re-checking already checked
761 (sub-)types. */
762
763 static bool
764 type_internals_preclude_sra_p_1 (tree type, const char **msg,
765 hash_set<tree> *visited_types)
766 {
767 tree fld;
768 tree et;
769
770 if (visited_types->contains (type))
771 return false;
772 visited_types->add (type);
773
774 switch (TREE_CODE (type))
775 {
776 case RECORD_TYPE:
777 case UNION_TYPE:
778 case QUAL_UNION_TYPE:
779 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
780 if (TREE_CODE (fld) == FIELD_DECL)
781 {
782 if (TREE_CODE (fld) == FUNCTION_DECL)
783 continue;
784 tree ft = TREE_TYPE (fld);
785
786 if (TREE_THIS_VOLATILE (fld))
787 {
788 *msg = "volatile structure field";
789 return true;
790 }
791 if (!DECL_FIELD_OFFSET (fld))
792 {
793 *msg = "no structure field offset";
794 return true;
795 }
796 if (!DECL_SIZE (fld))
797 {
798 *msg = "zero structure field size";
799 return true;
800 }
801 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
802 {
803 *msg = "structure field offset not fixed";
804 return true;
805 }
806 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
807 {
808 *msg = "structure field size not fixed";
809 return true;
810 }
811 if (!tree_fits_shwi_p (bit_position (fld)))
812 {
813 *msg = "structure field size too big";
814 return true;
815 }
816 if (AGGREGATE_TYPE_P (ft)
817 && int_bit_position (fld) % BITS_PER_UNIT != 0)
818 {
819 *msg = "structure field is bit field";
820 return true;
821 }
822
823 if (AGGREGATE_TYPE_P (ft)
824 && type_internals_preclude_sra_p_1 (ft, msg, visited_types))
825 return true;
826 }
827
828 return false;
829
830 case ARRAY_TYPE:
831 et = TREE_TYPE (type);
832
833 if (TYPE_VOLATILE (et))
834 {
835 *msg = "element type is volatile";
836 return true;
837 }
838
839 if (AGGREGATE_TYPE_P (et)
840 && type_internals_preclude_sra_p_1 (et, msg, visited_types))
841 return true;
842
843 return false;
844
845 default:
846 return false;
847 }
848 }
849
850 /* Return true iff the type contains a field or an element which does not allow
851 scalarization. */
852
853 bool
854 type_internals_preclude_sra_p (tree type, const char **msg)
855 {
856 hash_set<tree> visited_types;
857 return type_internals_preclude_sra_p_1 (type, msg, &visited_types);
858 }
859
860
861 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
862 the three fields. Also add it to the vector of accesses corresponding to
863 the base. Finally, return the new access. */
864
865 static struct access *
866 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
867 {
868 struct access *access = access_pool.allocate ();
869
870 memset (access, 0, sizeof (struct access));
871 access->base = base;
872 access->offset = offset;
873 access->size = size;
874
875 base_access_vec->get_or_insert (base).safe_push (access);
876
877 return access;
878 }
879
880 static bool maybe_add_sra_candidate (tree);
881
882 /* Create and insert access for EXPR. Return created access, or NULL if it is
883 not possible. Also scan for uses of constant pool as we go along and add
884 to candidates. */
885
886 static struct access *
887 create_access (tree expr, gimple *stmt, bool write)
888 {
889 struct access *access;
890 poly_int64 poffset, psize, pmax_size;
891 tree base = expr;
892 bool reverse, unscalarizable_region = false;
893
894 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
895 &reverse);
896
897 /* For constant-pool entries, check we can substitute the constant value. */
898 if (constant_decl_p (base))
899 {
900 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
901 if (expr != base
902 && !is_gimple_reg_type (TREE_TYPE (expr))
903 && dump_file && (dump_flags & TDF_DETAILS))
904 {
905 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
906 and elements of multidimensional arrays (which are
907 multi-element arrays in their own right). */
908 fprintf (dump_file, "Allowing non-reg-type load of part"
909 " of constant-pool entry: ");
910 print_generic_expr (dump_file, expr);
911 }
912 maybe_add_sra_candidate (base);
913 }
914
915 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
916 return NULL;
917
918 HOST_WIDE_INT offset, size, max_size;
919 if (!poffset.is_constant (&offset)
920 || !psize.is_constant (&size)
921 || !pmax_size.is_constant (&max_size))
922 {
923 disqualify_candidate (base, "Encountered a polynomial-sized access.");
924 return NULL;
925 }
926
927 if (size != max_size)
928 {
929 size = max_size;
930 unscalarizable_region = true;
931 }
932 if (size == 0)
933 return NULL;
934 if (offset < 0)
935 {
936 disqualify_candidate (base, "Encountered a negative offset access.");
937 return NULL;
938 }
939 if (size < 0)
940 {
941 disqualify_candidate (base, "Encountered an unconstrained access.");
942 return NULL;
943 }
944
945 access = create_access_1 (base, offset, size);
946 access->expr = expr;
947 access->type = TREE_TYPE (expr);
948 access->write = write;
949 access->grp_unscalarizable_region = unscalarizable_region;
950 access->stmt = stmt;
951 access->reverse = reverse;
952
953 return access;
954 }
955
956
957 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
958 ARRAY_TYPE with fields that are either of gimple register types (excluding
959 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
960 we are considering a decl from constant pool. If it is false, char arrays
961 will be refused. */
962
963 static bool
964 scalarizable_type_p (tree type, bool const_decl)
965 {
966 if (is_gimple_reg_type (type))
967 return true;
968 if (type_contains_placeholder_p (type))
969 return false;
970
971 bool have_predecessor_field = false;
972 HOST_WIDE_INT prev_pos = 0;
973
974 switch (TREE_CODE (type))
975 {
976 case RECORD_TYPE:
977 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
978 if (TREE_CODE (fld) == FIELD_DECL)
979 {
980 tree ft = TREE_TYPE (fld);
981
982 if (zerop (DECL_SIZE (fld)))
983 continue;
984
985 HOST_WIDE_INT pos = int_bit_position (fld);
986 if (have_predecessor_field
987 && pos <= prev_pos)
988 return false;
989
990 have_predecessor_field = true;
991 prev_pos = pos;
992
993 if (DECL_BIT_FIELD (fld))
994 return false;
995
996 if (!scalarizable_type_p (ft, const_decl))
997 return false;
998 }
999
1000 return true;
1001
1002 case ARRAY_TYPE:
1003 {
1004 HOST_WIDE_INT min_elem_size;
1005 if (const_decl)
1006 min_elem_size = 0;
1007 else
1008 min_elem_size = BITS_PER_UNIT;
1009
1010 if (TYPE_DOMAIN (type) == NULL_TREE
1011 || !tree_fits_shwi_p (TYPE_SIZE (type))
1012 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
1013 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
1014 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
1015 return false;
1016 if (tree_to_shwi (TYPE_SIZE (type)) == 0
1017 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1018 /* Zero-element array, should not prevent scalarization. */
1019 ;
1020 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1021 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1022 /* Variable-length array, do not allow scalarization. */
1023 return false;
1024
1025 tree elem = TREE_TYPE (type);
1026 if (!scalarizable_type_p (elem, const_decl))
1027 return false;
1028 return true;
1029 }
1030 default:
1031 return false;
1032 }
1033 }
1034
1035 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1036
1037 static inline bool
1038 contains_view_convert_expr_p (const_tree ref)
1039 {
1040 while (handled_component_p (ref))
1041 {
1042 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1043 return true;
1044 ref = TREE_OPERAND (ref, 0);
1045 }
1046
1047 return false;
1048 }
1049
1050 /* Return true if REF contains a VIEW_CONVERT_EXPR or a COMPONENT_REF with a
1051 bit-field field declaration. If TYPE_CHANGING_P is non-NULL, set the bool
1052 it points to will be set if REF contains any of the above or a MEM_REF
1053 expression that effectively performs type conversion. */
1054
1055 static bool
1056 contains_vce_or_bfcref_p (const_tree ref, bool *type_changing_p = NULL)
1057 {
1058 while (handled_component_p (ref))
1059 {
1060 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
1061 || (TREE_CODE (ref) == COMPONENT_REF
1062 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
1063 {
1064 if (type_changing_p)
1065 *type_changing_p = true;
1066 return true;
1067 }
1068 ref = TREE_OPERAND (ref, 0);
1069 }
1070
1071 if (!type_changing_p
1072 || TREE_CODE (ref) != MEM_REF
1073 || TREE_CODE (TREE_OPERAND (ref, 0)) != ADDR_EXPR)
1074 return false;
1075
1076 tree mem = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
1077 if (TYPE_MAIN_VARIANT (TREE_TYPE (ref))
1078 != TYPE_MAIN_VARIANT (TREE_TYPE (mem)))
1079 *type_changing_p = true;
1080
1081 return false;
1082 }
1083
1084 /* Search the given tree for a declaration by skipping handled components and
1085 exclude it from the candidates. */
1086
1087 static void
1088 disqualify_base_of_expr (tree t, const char *reason)
1089 {
1090 t = get_base_address (t);
1091 if (t && DECL_P (t))
1092 disqualify_candidate (t, reason);
1093 }
1094
1095 /* Scan expression EXPR and create access structures for all accesses to
1096 candidates for scalarization. Return the created access or NULL if none is
1097 created. */
1098
1099 static struct access *
1100 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1101 {
1102 struct access *ret = NULL;
1103 bool partial_ref;
1104
1105 if (TREE_CODE (expr) == BIT_FIELD_REF
1106 || TREE_CODE (expr) == IMAGPART_EXPR
1107 || TREE_CODE (expr) == REALPART_EXPR)
1108 {
1109 expr = TREE_OPERAND (expr, 0);
1110 partial_ref = true;
1111 }
1112 else
1113 partial_ref = false;
1114
1115 if (storage_order_barrier_p (expr))
1116 {
1117 disqualify_base_of_expr (expr, "storage order barrier.");
1118 return NULL;
1119 }
1120
1121 /* We need to dive through V_C_Es in order to get the size of its parameter
1122 and not the result type. Ada produces such statements. We are also
1123 capable of handling the topmost V_C_E but not any of those buried in other
1124 handled components. */
1125 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1126 expr = TREE_OPERAND (expr, 0);
1127
1128 if (contains_view_convert_expr_p (expr))
1129 {
1130 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1131 "component.");
1132 return NULL;
1133 }
1134 if (TREE_THIS_VOLATILE (expr))
1135 {
1136 disqualify_base_of_expr (expr, "part of a volatile reference.");
1137 return NULL;
1138 }
1139
1140 switch (TREE_CODE (expr))
1141 {
1142 case MEM_REF:
1143 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR)
1144 return NULL;
1145 /* fall through */
1146 case VAR_DECL:
1147 case PARM_DECL:
1148 case RESULT_DECL:
1149 case COMPONENT_REF:
1150 case ARRAY_REF:
1151 case ARRAY_RANGE_REF:
1152 ret = create_access (expr, stmt, write);
1153 break;
1154
1155 default:
1156 break;
1157 }
1158
1159 if (write && partial_ref && ret)
1160 ret->grp_partial_lhs = 1;
1161
1162 return ret;
1163 }
1164
1165 /* Scan expression EXPR and create access structures for all accesses to
1166 candidates for scalarization. Return true if any access has been inserted.
1167 STMT must be the statement from which the expression is taken, WRITE must be
1168 true if the expression is a store and false otherwise. */
1169
1170 static bool
1171 build_access_from_expr (tree expr, gimple *stmt, bool write)
1172 {
1173 struct access *access;
1174
1175 access = build_access_from_expr_1 (expr, stmt, write);
1176 if (access)
1177 {
1178 /* This means the aggregate is accesses as a whole in a way other than an
1179 assign statement and thus cannot be removed even if we had a scalar
1180 replacement for everything. */
1181 if (cannot_scalarize_away_bitmap)
1182 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1183 return true;
1184 }
1185 return false;
1186 }
1187
1188 /* Return the single non-EH successor edge of BB or NULL if there is none or
1189 more than one. */
1190
1191 static edge
1192 single_non_eh_succ (basic_block bb)
1193 {
1194 edge e, res = NULL;
1195 edge_iterator ei;
1196
1197 FOR_EACH_EDGE (e, ei, bb->succs)
1198 if (!(e->flags & EDGE_EH))
1199 {
1200 if (res)
1201 return NULL;
1202 res = e;
1203 }
1204
1205 return res;
1206 }
1207
1208 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1209 there is no alternative spot where to put statements SRA might need to
1210 generate after it. The spot we are looking for is an edge leading to a
1211 single non-EH successor, if it exists and is indeed single. RHS may be
1212 NULL, in that case ignore it. */
1213
1214 static bool
1215 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1216 {
1217 if (stmt_ends_bb_p (stmt))
1218 {
1219 if (single_non_eh_succ (gimple_bb (stmt)))
1220 return false;
1221
1222 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1223 if (rhs)
1224 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1225 return true;
1226 }
1227 return false;
1228 }
1229
1230 /* Return true if the nature of BASE is such that it contains data even if
1231 there is no write to it in the function. */
1232
1233 static bool
1234 comes_initialized_p (tree base)
1235 {
1236 return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1237 }
1238
1239 /* Scan expressions occurring in STMT, create access structures for all accesses
1240 to candidates for scalarization and remove those candidates which occur in
1241 statements or expressions that prevent them from being split apart. Return
1242 true if any access has been inserted. */
1243
1244 static bool
1245 build_accesses_from_assign (gimple *stmt)
1246 {
1247 tree lhs, rhs;
1248 struct access *lacc, *racc;
1249
1250 if (!gimple_assign_single_p (stmt)
1251 /* Scope clobbers don't influence scalarization. */
1252 || gimple_clobber_p (stmt))
1253 return false;
1254
1255 lhs = gimple_assign_lhs (stmt);
1256 rhs = gimple_assign_rhs1 (stmt);
1257
1258 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1259 return false;
1260
1261 racc = build_access_from_expr_1 (rhs, stmt, false);
1262 lacc = build_access_from_expr_1 (lhs, stmt, true);
1263
1264 if (lacc)
1265 {
1266 lacc->grp_assignment_write = 1;
1267 if (storage_order_barrier_p (rhs))
1268 lacc->grp_unscalarizable_region = 1;
1269
1270 if (should_scalarize_away_bitmap && !is_gimple_reg_type (lacc->type))
1271 {
1272 bool type_changing_p = false;
1273 contains_vce_or_bfcref_p (lhs, &type_changing_p);
1274 if (type_changing_p)
1275 bitmap_set_bit (cannot_scalarize_away_bitmap,
1276 DECL_UID (lacc->base));
1277 }
1278 }
1279
1280 if (racc)
1281 {
1282 racc->grp_assignment_read = 1;
1283 if (should_scalarize_away_bitmap && !is_gimple_reg_type (racc->type))
1284 {
1285 bool type_changing_p = false;
1286 contains_vce_or_bfcref_p (rhs, &type_changing_p);
1287
1288 if (type_changing_p || gimple_has_volatile_ops (stmt))
1289 bitmap_set_bit (cannot_scalarize_away_bitmap,
1290 DECL_UID (racc->base));
1291 else
1292 bitmap_set_bit (should_scalarize_away_bitmap,
1293 DECL_UID (racc->base));
1294 }
1295 if (storage_order_barrier_p (lhs))
1296 racc->grp_unscalarizable_region = 1;
1297 }
1298
1299 if (lacc && racc
1300 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1301 && !lacc->grp_unscalarizable_region
1302 && !racc->grp_unscalarizable_region
1303 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1304 && lacc->size == racc->size
1305 && useless_type_conversion_p (lacc->type, racc->type))
1306 {
1307 struct assign_link *link;
1308
1309 link = assign_link_pool.allocate ();
1310 memset (link, 0, sizeof (struct assign_link));
1311
1312 link->lacc = lacc;
1313 link->racc = racc;
1314 add_link_to_rhs (racc, link);
1315 add_link_to_lhs (lacc, link);
1316 add_access_to_rhs_work_queue (racc);
1317 add_access_to_lhs_work_queue (lacc);
1318
1319 /* Let's delay marking the areas as written until propagation of accesses
1320 across link, unless the nature of rhs tells us that its data comes
1321 from elsewhere. */
1322 if (!comes_initialized_p (racc->base))
1323 lacc->write = false;
1324 }
1325
1326 return lacc || racc;
1327 }
1328
1329 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1330 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1331
1332 static bool
1333 asm_visit_addr (gimple *, tree op, tree, void *)
1334 {
1335 op = get_base_address (op);
1336 if (op
1337 && DECL_P (op))
1338 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1339
1340 return false;
1341 }
1342
1343 /* Scan function and look for interesting expressions and create access
1344 structures for them. Return true iff any access is created. */
1345
1346 static bool
1347 scan_function (void)
1348 {
1349 basic_block bb;
1350 bool ret = false;
1351
1352 FOR_EACH_BB_FN (bb, cfun)
1353 {
1354 gimple_stmt_iterator gsi;
1355 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1356 {
1357 gimple *stmt = gsi_stmt (gsi);
1358 tree t;
1359 unsigned i;
1360
1361 switch (gimple_code (stmt))
1362 {
1363 case GIMPLE_RETURN:
1364 t = gimple_return_retval (as_a <greturn *> (stmt));
1365 if (t != NULL_TREE)
1366 ret |= build_access_from_expr (t, stmt, false);
1367 break;
1368
1369 case GIMPLE_ASSIGN:
1370 ret |= build_accesses_from_assign (stmt);
1371 break;
1372
1373 case GIMPLE_CALL:
1374 for (i = 0; i < gimple_call_num_args (stmt); i++)
1375 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1376 stmt, false);
1377
1378 t = gimple_call_lhs (stmt);
1379 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1380 ret |= build_access_from_expr (t, stmt, true);
1381 break;
1382
1383 case GIMPLE_ASM:
1384 {
1385 gasm *asm_stmt = as_a <gasm *> (stmt);
1386 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1387 asm_visit_addr);
1388 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1389 {
1390 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1391 ret |= build_access_from_expr (t, asm_stmt, false);
1392 }
1393 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1394 {
1395 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1396 ret |= build_access_from_expr (t, asm_stmt, true);
1397 }
1398 }
1399 break;
1400
1401 default:
1402 break;
1403 }
1404 }
1405 }
1406
1407 return ret;
1408 }
1409
1410 /* Helper of QSORT function. There are pointers to accesses in the array. An
1411 access is considered smaller than another if it has smaller offset or if the
1412 offsets are the same but is size is bigger. */
1413
1414 static int
1415 compare_access_positions (const void *a, const void *b)
1416 {
1417 const access_p *fp1 = (const access_p *) a;
1418 const access_p *fp2 = (const access_p *) b;
1419 const access_p f1 = *fp1;
1420 const access_p f2 = *fp2;
1421
1422 if (f1->offset != f2->offset)
1423 return f1->offset < f2->offset ? -1 : 1;
1424
1425 if (f1->size == f2->size)
1426 {
1427 if (f1->type == f2->type)
1428 return 0;
1429 /* Put any non-aggregate type before any aggregate type. */
1430 else if (!is_gimple_reg_type (f1->type)
1431 && is_gimple_reg_type (f2->type))
1432 return 1;
1433 else if (is_gimple_reg_type (f1->type)
1434 && !is_gimple_reg_type (f2->type))
1435 return -1;
1436 /* Put any complex or vector type before any other scalar type. */
1437 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1438 && TREE_CODE (f1->type) != VECTOR_TYPE
1439 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1440 || TREE_CODE (f2->type) == VECTOR_TYPE))
1441 return 1;
1442 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1443 || TREE_CODE (f1->type) == VECTOR_TYPE)
1444 && TREE_CODE (f2->type) != COMPLEX_TYPE
1445 && TREE_CODE (f2->type) != VECTOR_TYPE)
1446 return -1;
1447 /* Put any integral type before any non-integral type. When splicing, we
1448 make sure that those with insufficient precision and occupying the
1449 same space are not scalarized. */
1450 else if (INTEGRAL_TYPE_P (f1->type)
1451 && !INTEGRAL_TYPE_P (f2->type))
1452 return -1;
1453 else if (!INTEGRAL_TYPE_P (f1->type)
1454 && INTEGRAL_TYPE_P (f2->type))
1455 return 1;
1456 /* Put the integral type with the bigger precision first. */
1457 else if (INTEGRAL_TYPE_P (f1->type)
1458 && INTEGRAL_TYPE_P (f2->type)
1459 && (TYPE_PRECISION (f2->type) != TYPE_PRECISION (f1->type)))
1460 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1461 /* Stabilize the sort. */
1462 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1463 }
1464
1465 /* We want the bigger accesses first, thus the opposite operator in the next
1466 line: */
1467 return f1->size > f2->size ? -1 : 1;
1468 }
1469
1470
1471 /* Append a name of the declaration to the name obstack. A helper function for
1472 make_fancy_name. */
1473
1474 static void
1475 make_fancy_decl_name (tree decl)
1476 {
1477 char buffer[32];
1478
1479 tree name = DECL_NAME (decl);
1480 if (name)
1481 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1482 IDENTIFIER_LENGTH (name));
1483 else
1484 {
1485 sprintf (buffer, "D%u", DECL_UID (decl));
1486 obstack_grow (&name_obstack, buffer, strlen (buffer));
1487 }
1488 }
1489
1490 /* Helper for make_fancy_name. */
1491
1492 static void
1493 make_fancy_name_1 (tree expr)
1494 {
1495 char buffer[32];
1496 tree index;
1497
1498 if (DECL_P (expr))
1499 {
1500 make_fancy_decl_name (expr);
1501 return;
1502 }
1503
1504 switch (TREE_CODE (expr))
1505 {
1506 case COMPONENT_REF:
1507 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1508 obstack_1grow (&name_obstack, '$');
1509 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1510 break;
1511
1512 case ARRAY_REF:
1513 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1514 obstack_1grow (&name_obstack, '$');
1515 /* Arrays with only one element may not have a constant as their
1516 index. */
1517 index = TREE_OPERAND (expr, 1);
1518 if (TREE_CODE (index) != INTEGER_CST)
1519 break;
1520 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1521 obstack_grow (&name_obstack, buffer, strlen (buffer));
1522 break;
1523
1524 case ADDR_EXPR:
1525 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1526 break;
1527
1528 case MEM_REF:
1529 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1530 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1531 {
1532 obstack_1grow (&name_obstack, '$');
1533 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1534 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1535 obstack_grow (&name_obstack, buffer, strlen (buffer));
1536 }
1537 break;
1538
1539 case BIT_FIELD_REF:
1540 case REALPART_EXPR:
1541 case IMAGPART_EXPR:
1542 gcc_unreachable (); /* we treat these as scalars. */
1543 break;
1544 default:
1545 break;
1546 }
1547 }
1548
1549 /* Create a human readable name for replacement variable of ACCESS. */
1550
1551 static char *
1552 make_fancy_name (tree expr)
1553 {
1554 make_fancy_name_1 (expr);
1555 obstack_1grow (&name_obstack, '\0');
1556 return XOBFINISH (&name_obstack, char *);
1557 }
1558
1559 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1560 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1561 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1562 be non-NULL and is used to insert new statements either before or below
1563 the current one as specified by INSERT_AFTER. This function is not capable
1564 of handling bitfields. */
1565
1566 tree
1567 build_ref_for_offset (location_t loc, tree base, poly_int64 offset,
1568 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1569 bool insert_after)
1570 {
1571 tree prev_base = base;
1572 tree off;
1573 tree mem_ref;
1574 poly_int64 base_offset;
1575 unsigned HOST_WIDE_INT misalign;
1576 unsigned int align;
1577
1578 /* Preserve address-space information. */
1579 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1580 if (as != TYPE_ADDR_SPACE (exp_type))
1581 exp_type = build_qualified_type (exp_type,
1582 TYPE_QUALS (exp_type)
1583 | ENCODE_QUAL_ADDR_SPACE (as));
1584
1585 poly_int64 byte_offset = exact_div (offset, BITS_PER_UNIT);
1586 get_object_alignment_1 (base, &align, &misalign);
1587 base = get_addr_base_and_unit_offset (base, &base_offset);
1588
1589 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1590 offset such as array[var_index]. */
1591 if (!base)
1592 {
1593 gassign *stmt;
1594 tree tmp, addr;
1595
1596 gcc_checking_assert (gsi);
1597 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1598 addr = build_fold_addr_expr (unshare_expr (prev_base));
1599 STRIP_USELESS_TYPE_CONVERSION (addr);
1600 stmt = gimple_build_assign (tmp, addr);
1601 gimple_set_location (stmt, loc);
1602 if (insert_after)
1603 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1604 else
1605 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1606
1607 off = build_int_cst (reference_alias_ptr_type (prev_base), byte_offset);
1608 base = tmp;
1609 }
1610 else if (TREE_CODE (base) == MEM_REF)
1611 {
1612 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1613 base_offset + byte_offset);
1614 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1615 base = unshare_expr (TREE_OPERAND (base, 0));
1616 }
1617 else
1618 {
1619 off = build_int_cst (reference_alias_ptr_type (prev_base),
1620 base_offset + byte_offset);
1621 base = build_fold_addr_expr (unshare_expr (base));
1622 }
1623
1624 unsigned int align_bound = known_alignment (misalign + offset);
1625 if (align_bound != 0)
1626 align = MIN (align, align_bound);
1627 if (align != TYPE_ALIGN (exp_type))
1628 exp_type = build_aligned_type (exp_type, align);
1629
1630 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1631 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1632 if (TREE_THIS_VOLATILE (prev_base))
1633 TREE_THIS_VOLATILE (mem_ref) = 1;
1634 if (TREE_SIDE_EFFECTS (prev_base))
1635 TREE_SIDE_EFFECTS (mem_ref) = 1;
1636 return mem_ref;
1637 }
1638
1639 /* Construct and return a memory reference that is equal to a portion of
1640 MODEL->expr but is based on BASE. If this cannot be done, return NULL. */
1641
1642 static tree
1643 build_reconstructed_reference (location_t, tree base, struct access *model)
1644 {
1645 tree expr = model->expr, prev_expr = NULL;
1646 while (!types_compatible_p (TREE_TYPE (expr), TREE_TYPE (base)))
1647 {
1648 if (!handled_component_p (expr))
1649 return NULL_TREE;
1650 prev_expr = expr;
1651 expr = TREE_OPERAND (expr, 0);
1652 }
1653
1654 /* Guard against broken VIEW_CONVERT_EXPRs... */
1655 if (!prev_expr)
1656 return NULL_TREE;
1657
1658 TREE_OPERAND (prev_expr, 0) = base;
1659 tree ref = unshare_expr (model->expr);
1660 TREE_OPERAND (prev_expr, 0) = expr;
1661 return ref;
1662 }
1663
1664 /* Construct a memory reference to a part of an aggregate BASE at the given
1665 OFFSET and of the same type as MODEL. In case this is a reference to a
1666 bit-field, the function will replicate the last component_ref of model's
1667 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1668 build_ref_for_offset. */
1669
1670 static tree
1671 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1672 struct access *model, gimple_stmt_iterator *gsi,
1673 bool insert_after)
1674 {
1675 gcc_assert (offset >= 0);
1676 if (TREE_CODE (model->expr) == COMPONENT_REF
1677 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1678 {
1679 /* This access represents a bit-field. */
1680 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1681
1682 offset -= int_bit_position (fld);
1683 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1684 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1685 gsi, insert_after);
1686 /* The flag will be set on the record type. */
1687 REF_REVERSE_STORAGE_ORDER (t) = 0;
1688 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1689 NULL_TREE);
1690 }
1691 else
1692 {
1693 tree res;
1694 if (model->grp_same_access_path
1695 && !TREE_THIS_VOLATILE (base)
1696 && (TYPE_ADDR_SPACE (TREE_TYPE (base))
1697 == TYPE_ADDR_SPACE (TREE_TYPE (model->expr)))
1698 && offset <= model->offset
1699 /* build_reconstructed_reference can still fail if we have already
1700 massaged BASE because of another type incompatibility. */
1701 && (res = build_reconstructed_reference (loc, base, model)))
1702 return res;
1703 else
1704 return build_ref_for_offset (loc, base, offset, model->reverse,
1705 model->type, gsi, insert_after);
1706 }
1707 }
1708
1709 /* Attempt to build a memory reference that we could but into a gimple
1710 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1711 create statements and return s NULL instead. This function also ignores
1712 alignment issues and so its results should never end up in non-debug
1713 statements. */
1714
1715 static tree
1716 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1717 struct access *model)
1718 {
1719 poly_int64 base_offset;
1720 tree off;
1721
1722 if (TREE_CODE (model->expr) == COMPONENT_REF
1723 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1724 return NULL_TREE;
1725
1726 base = get_addr_base_and_unit_offset (base, &base_offset);
1727 if (!base)
1728 return NULL_TREE;
1729 if (TREE_CODE (base) == MEM_REF)
1730 {
1731 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1732 base_offset + offset / BITS_PER_UNIT);
1733 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1734 base = unshare_expr (TREE_OPERAND (base, 0));
1735 }
1736 else
1737 {
1738 off = build_int_cst (reference_alias_ptr_type (base),
1739 base_offset + offset / BITS_PER_UNIT);
1740 base = build_fold_addr_expr (unshare_expr (base));
1741 }
1742
1743 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1744 }
1745
1746 /* Construct a memory reference consisting of component_refs and array_refs to
1747 a part of an aggregate *RES (which is of type TYPE). The requested part
1748 should have type EXP_TYPE at be the given OFFSET. This function might not
1749 succeed, it returns true when it does and only then *RES points to something
1750 meaningful. This function should be used only to build expressions that we
1751 might need to present to user (e.g. in warnings). In all other situations,
1752 build_ref_for_model or build_ref_for_offset should be used instead. */
1753
1754 static bool
1755 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1756 tree exp_type)
1757 {
1758 while (1)
1759 {
1760 tree fld;
1761 tree tr_size, index, minidx;
1762 HOST_WIDE_INT el_size;
1763
1764 if (offset == 0 && exp_type
1765 && types_compatible_p (exp_type, type))
1766 return true;
1767
1768 switch (TREE_CODE (type))
1769 {
1770 case UNION_TYPE:
1771 case QUAL_UNION_TYPE:
1772 case RECORD_TYPE:
1773 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1774 {
1775 HOST_WIDE_INT pos, size;
1776 tree tr_pos, expr, *expr_ptr;
1777
1778 if (TREE_CODE (fld) != FIELD_DECL)
1779 continue;
1780
1781 tr_pos = bit_position (fld);
1782 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1783 continue;
1784 pos = tree_to_uhwi (tr_pos);
1785 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1786 tr_size = DECL_SIZE (fld);
1787 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1788 continue;
1789 size = tree_to_uhwi (tr_size);
1790 if (size == 0)
1791 {
1792 if (pos != offset)
1793 continue;
1794 }
1795 else if (pos > offset || (pos + size) <= offset)
1796 continue;
1797
1798 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1799 NULL_TREE);
1800 expr_ptr = &expr;
1801 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1802 offset - pos, exp_type))
1803 {
1804 *res = expr;
1805 return true;
1806 }
1807 }
1808 return false;
1809
1810 case ARRAY_TYPE:
1811 tr_size = TYPE_SIZE (TREE_TYPE (type));
1812 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1813 return false;
1814 el_size = tree_to_uhwi (tr_size);
1815
1816 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1817 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1818 return false;
1819 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1820 if (!integer_zerop (minidx))
1821 index = int_const_binop (PLUS_EXPR, index, minidx);
1822 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1823 NULL_TREE, NULL_TREE);
1824 offset = offset % el_size;
1825 type = TREE_TYPE (type);
1826 break;
1827
1828 default:
1829 if (offset != 0)
1830 return false;
1831
1832 if (exp_type)
1833 return false;
1834 else
1835 return true;
1836 }
1837 }
1838 }
1839
1840 /* Print message to dump file why a variable was rejected. */
1841
1842 static void
1843 reject (tree var, const char *msg)
1844 {
1845 if (dump_file && (dump_flags & TDF_DETAILS))
1846 {
1847 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1848 print_generic_expr (dump_file, var);
1849 fprintf (dump_file, "\n");
1850 }
1851 }
1852
1853 /* Return true if VAR is a candidate for SRA. */
1854
1855 static bool
1856 maybe_add_sra_candidate (tree var)
1857 {
1858 tree type = TREE_TYPE (var);
1859 const char *msg;
1860 tree_node **slot;
1861
1862 if (!AGGREGATE_TYPE_P (type))
1863 {
1864 reject (var, "not aggregate");
1865 return false;
1866 }
1867 /* Allow constant-pool entries that "need to live in memory". */
1868 if (needs_to_live_in_memory (var) && !constant_decl_p (var))
1869 {
1870 reject (var, "needs to live in memory");
1871 return false;
1872 }
1873 if (TREE_THIS_VOLATILE (var))
1874 {
1875 reject (var, "is volatile");
1876 return false;
1877 }
1878 if (!COMPLETE_TYPE_P (type))
1879 {
1880 reject (var, "has incomplete type");
1881 return false;
1882 }
1883 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1884 {
1885 reject (var, "type size not fixed");
1886 return false;
1887 }
1888 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1889 {
1890 reject (var, "type size is zero");
1891 return false;
1892 }
1893 if (type_internals_preclude_sra_p (type, &msg))
1894 {
1895 reject (var, msg);
1896 return false;
1897 }
1898 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1899 we also want to schedule it rather late. Thus we ignore it in
1900 the early pass. */
1901 (sra_mode == SRA_MODE_EARLY_INTRA
1902 && is_va_list_type (type)))
1903 {
1904 reject (var, "is va_list");
1905 return false;
1906 }
1907
1908 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1909 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1910 *slot = var;
1911
1912 if (dump_file && (dump_flags & TDF_DETAILS))
1913 {
1914 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1915 print_generic_expr (dump_file, var);
1916 fprintf (dump_file, "\n");
1917 }
1918
1919 return true;
1920 }
1921
1922 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1923 those with type which is suitable for scalarization. */
1924
1925 static bool
1926 find_var_candidates (void)
1927 {
1928 tree var, parm;
1929 unsigned int i;
1930 bool ret = false;
1931
1932 for (parm = DECL_ARGUMENTS (current_function_decl);
1933 parm;
1934 parm = DECL_CHAIN (parm))
1935 ret |= maybe_add_sra_candidate (parm);
1936
1937 FOR_EACH_LOCAL_DECL (cfun, i, var)
1938 {
1939 if (!VAR_P (var))
1940 continue;
1941
1942 ret |= maybe_add_sra_candidate (var);
1943 }
1944
1945 return ret;
1946 }
1947
1948 /* Return true if EXP is a reference chain of COMPONENT_REFs and AREAY_REFs
1949 ending either with a DECL or a MEM_REF with zero offset. */
1950
1951 static bool
1952 path_comparable_for_same_access (tree expr)
1953 {
1954 while (handled_component_p (expr))
1955 {
1956 if (TREE_CODE (expr) == ARRAY_REF)
1957 {
1958 /* SSA name indices can occur here too when the array is of sie one.
1959 But we cannot just re-use array_refs with SSA names elsewhere in
1960 the function, so disallow non-constant indices. TODO: Remove this
1961 limitation after teaching build_reconstructed_reference to replace
1962 the index with the index type lower bound. */
1963 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST)
1964 return false;
1965 }
1966 expr = TREE_OPERAND (expr, 0);
1967 }
1968
1969 if (TREE_CODE (expr) == MEM_REF)
1970 {
1971 if (!zerop (TREE_OPERAND (expr, 1)))
1972 return false;
1973 }
1974 else
1975 gcc_assert (DECL_P (expr));
1976
1977 return true;
1978 }
1979
1980 /* Assuming that EXP1 consists of only COMPONENT_REFs and ARRAY_REFs, return
1981 true if the chain of these handled components are exactly the same as EXP2
1982 and the expression under them is the same DECL or an equivalent MEM_REF.
1983 The reference picked by compare_access_positions must go to EXP1. */
1984
1985 static bool
1986 same_access_path_p (tree exp1, tree exp2)
1987 {
1988 if (TREE_CODE (exp1) != TREE_CODE (exp2))
1989 {
1990 /* Special case single-field structures loaded sometimes as the field
1991 and sometimes as the structure. If the field is of a scalar type,
1992 compare_access_positions will put it into exp1.
1993
1994 TODO: The gimple register type condition can be removed if teach
1995 compare_access_positions to put inner types first. */
1996 if (is_gimple_reg_type (TREE_TYPE (exp1))
1997 && TREE_CODE (exp1) == COMPONENT_REF
1998 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (exp1, 0)))
1999 == TYPE_MAIN_VARIANT (TREE_TYPE (exp2))))
2000 exp1 = TREE_OPERAND (exp1, 0);
2001 else
2002 return false;
2003 }
2004
2005 if (!operand_equal_p (exp1, exp2, OEP_ADDRESS_OF))
2006 return false;
2007
2008 return true;
2009 }
2010
2011 /* Sort all accesses for the given variable, check for partial overlaps and
2012 return NULL if there are any. If there are none, pick a representative for
2013 each combination of offset and size and create a linked list out of them.
2014 Return the pointer to the first representative and make sure it is the first
2015 one in the vector of accesses. */
2016
2017 static struct access *
2018 sort_and_splice_var_accesses (tree var)
2019 {
2020 int i, j, access_count;
2021 struct access *res, **prev_acc_ptr = &res;
2022 vec<access_p> *access_vec;
2023 bool first = true;
2024 HOST_WIDE_INT low = -1, high = 0;
2025
2026 access_vec = get_base_access_vector (var);
2027 if (!access_vec)
2028 return NULL;
2029 access_count = access_vec->length ();
2030
2031 /* Sort by <OFFSET, SIZE>. */
2032 access_vec->qsort (compare_access_positions);
2033
2034 i = 0;
2035 while (i < access_count)
2036 {
2037 struct access *access = (*access_vec)[i];
2038 bool grp_write = access->write;
2039 bool grp_read = !access->write;
2040 bool grp_scalar_write = access->write
2041 && is_gimple_reg_type (access->type);
2042 bool grp_scalar_read = !access->write
2043 && is_gimple_reg_type (access->type);
2044 bool grp_assignment_read = access->grp_assignment_read;
2045 bool grp_assignment_write = access->grp_assignment_write;
2046 bool multiple_scalar_reads = false;
2047 bool grp_partial_lhs = access->grp_partial_lhs;
2048 bool first_scalar = is_gimple_reg_type (access->type);
2049 bool unscalarizable_region = access->grp_unscalarizable_region;
2050 bool grp_same_access_path = true;
2051 bool bf_non_full_precision
2052 = (INTEGRAL_TYPE_P (access->type)
2053 && TYPE_PRECISION (access->type) != access->size
2054 && TREE_CODE (access->expr) == COMPONENT_REF
2055 && DECL_BIT_FIELD (TREE_OPERAND (access->expr, 1)));
2056
2057 if (first || access->offset >= high)
2058 {
2059 first = false;
2060 low = access->offset;
2061 high = access->offset + access->size;
2062 }
2063 else if (access->offset > low && access->offset + access->size > high)
2064 return NULL;
2065 else
2066 gcc_assert (access->offset >= low
2067 && access->offset + access->size <= high);
2068
2069 grp_same_access_path = path_comparable_for_same_access (access->expr);
2070
2071 j = i + 1;
2072 while (j < access_count)
2073 {
2074 struct access *ac2 = (*access_vec)[j];
2075 if (ac2->offset != access->offset || ac2->size != access->size)
2076 break;
2077 if (ac2->write)
2078 {
2079 grp_write = true;
2080 grp_scalar_write = (grp_scalar_write
2081 || is_gimple_reg_type (ac2->type));
2082 }
2083 else
2084 {
2085 grp_read = true;
2086 if (is_gimple_reg_type (ac2->type))
2087 {
2088 if (grp_scalar_read)
2089 multiple_scalar_reads = true;
2090 else
2091 grp_scalar_read = true;
2092 }
2093 }
2094 grp_assignment_read |= ac2->grp_assignment_read;
2095 grp_assignment_write |= ac2->grp_assignment_write;
2096 grp_partial_lhs |= ac2->grp_partial_lhs;
2097 unscalarizable_region |= ac2->grp_unscalarizable_region;
2098 relink_to_new_repr (access, ac2);
2099
2100 /* If there are both aggregate-type and scalar-type accesses with
2101 this combination of size and offset, the comparison function
2102 should have put the scalars first. */
2103 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2104 /* It also prefers integral types to non-integral. However, when the
2105 precision of the selected type does not span the entire area and
2106 should also be used for a non-integer (i.e. float), we must not
2107 let that happen. Normally analyze_access_subtree expands the type
2108 to cover the entire area but for bit-fields it doesn't. */
2109 if (bf_non_full_precision && !INTEGRAL_TYPE_P (ac2->type))
2110 {
2111 if (dump_file && (dump_flags & TDF_DETAILS))
2112 {
2113 fprintf (dump_file, "Cannot scalarize the following access "
2114 "because insufficient precision integer type was "
2115 "selected.\n ");
2116 dump_access (dump_file, access, false);
2117 }
2118 unscalarizable_region = true;
2119 }
2120
2121 if (grp_same_access_path
2122 && !same_access_path_p (access->expr, ac2->expr))
2123 grp_same_access_path = false;
2124
2125 ac2->group_representative = access;
2126 j++;
2127 }
2128
2129 i = j;
2130
2131 access->group_representative = access;
2132 access->grp_write = grp_write;
2133 access->grp_read = grp_read;
2134 access->grp_scalar_read = grp_scalar_read;
2135 access->grp_scalar_write = grp_scalar_write;
2136 access->grp_assignment_read = grp_assignment_read;
2137 access->grp_assignment_write = grp_assignment_write;
2138 access->grp_hint = multiple_scalar_reads && !constant_decl_p (var);
2139 access->grp_partial_lhs = grp_partial_lhs;
2140 access->grp_unscalarizable_region = unscalarizable_region;
2141 access->grp_same_access_path = grp_same_access_path;
2142
2143 *prev_acc_ptr = access;
2144 prev_acc_ptr = &access->next_grp;
2145 }
2146
2147 gcc_assert (res == (*access_vec)[0]);
2148 return res;
2149 }
2150
2151 /* Create a variable for the given ACCESS which determines the type, name and a
2152 few other properties. Return the variable declaration and store it also to
2153 ACCESS->replacement. REG_TREE is used when creating a declaration to base a
2154 default-definition SSA name on in order to facilitate an uninitialized
2155 warning. It is used instead of the actual ACCESS type if that is not of a
2156 gimple register type. */
2157
2158 static tree
2159 create_access_replacement (struct access *access, tree reg_type = NULL_TREE)
2160 {
2161 tree repl;
2162
2163 tree type = access->type;
2164 if (reg_type && !is_gimple_reg_type (type))
2165 type = reg_type;
2166
2167 if (access->grp_to_be_debug_replaced)
2168 {
2169 repl = create_tmp_var_raw (access->type);
2170 DECL_CONTEXT (repl) = current_function_decl;
2171 }
2172 else
2173 /* Drop any special alignment on the type if it's not on the main
2174 variant. This avoids issues with weirdo ABIs like AAPCS. */
2175 repl = create_tmp_var (build_qualified_type (TYPE_MAIN_VARIANT (type),
2176 TYPE_QUALS (type)), "SR");
2177 if (access->grp_partial_lhs
2178 && is_gimple_reg_type (type))
2179 DECL_NOT_GIMPLE_REG_P (repl) = 1;
2180
2181 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2182 DECL_ARTIFICIAL (repl) = 1;
2183 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2184
2185 if (DECL_NAME (access->base)
2186 && !DECL_IGNORED_P (access->base)
2187 && !DECL_ARTIFICIAL (access->base))
2188 {
2189 char *pretty_name = make_fancy_name (access->expr);
2190 tree debug_expr = unshare_expr_without_location (access->expr), d;
2191 bool fail = false;
2192
2193 DECL_NAME (repl) = get_identifier (pretty_name);
2194 DECL_NAMELESS (repl) = 1;
2195 obstack_free (&name_obstack, pretty_name);
2196
2197 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2198 as DECL_DEBUG_EXPR isn't considered when looking for still
2199 used SSA_NAMEs and thus they could be freed. All debug info
2200 generation cares is whether something is constant or variable
2201 and that get_ref_base_and_extent works properly on the
2202 expression. It cannot handle accesses at a non-constant offset
2203 though, so just give up in those cases. */
2204 for (d = debug_expr;
2205 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2206 d = TREE_OPERAND (d, 0))
2207 switch (TREE_CODE (d))
2208 {
2209 case ARRAY_REF:
2210 case ARRAY_RANGE_REF:
2211 if (TREE_OPERAND (d, 1)
2212 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2213 fail = true;
2214 if (TREE_OPERAND (d, 3)
2215 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2216 fail = true;
2217 /* FALLTHRU */
2218 case COMPONENT_REF:
2219 if (TREE_OPERAND (d, 2)
2220 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2221 fail = true;
2222 break;
2223 case MEM_REF:
2224 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2225 fail = true;
2226 else
2227 d = TREE_OPERAND (d, 0);
2228 break;
2229 default:
2230 break;
2231 }
2232 if (!fail)
2233 {
2234 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2235 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2236 }
2237 if (access->grp_no_warning)
2238 TREE_NO_WARNING (repl) = 1;
2239 else
2240 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2241 }
2242 else
2243 TREE_NO_WARNING (repl) = 1;
2244
2245 if (dump_file)
2246 {
2247 if (access->grp_to_be_debug_replaced)
2248 {
2249 fprintf (dump_file, "Created a debug-only replacement for ");
2250 print_generic_expr (dump_file, access->base);
2251 fprintf (dump_file, " offset: %u, size: %u\n",
2252 (unsigned) access->offset, (unsigned) access->size);
2253 }
2254 else
2255 {
2256 fprintf (dump_file, "Created a replacement for ");
2257 print_generic_expr (dump_file, access->base);
2258 fprintf (dump_file, " offset: %u, size: %u: ",
2259 (unsigned) access->offset, (unsigned) access->size);
2260 print_generic_expr (dump_file, repl, TDF_UID);
2261 fprintf (dump_file, "\n");
2262 }
2263 }
2264 sra_stats.replacements++;
2265
2266 return repl;
2267 }
2268
2269 /* Return ACCESS scalar replacement, which must exist. */
2270
2271 static inline tree
2272 get_access_replacement (struct access *access)
2273 {
2274 gcc_checking_assert (access->replacement_decl);
2275 return access->replacement_decl;
2276 }
2277
2278
2279 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2280 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2281 to it is not "within" the root. Return false iff some accesses partially
2282 overlap. */
2283
2284 static bool
2285 build_access_subtree (struct access **access)
2286 {
2287 struct access *root = *access, *last_child = NULL;
2288 HOST_WIDE_INT limit = root->offset + root->size;
2289
2290 *access = (*access)->next_grp;
2291 while (*access && (*access)->offset + (*access)->size <= limit)
2292 {
2293 if (!last_child)
2294 root->first_child = *access;
2295 else
2296 last_child->next_sibling = *access;
2297 last_child = *access;
2298 (*access)->parent = root;
2299 (*access)->grp_write |= root->grp_write;
2300
2301 if (!build_access_subtree (access))
2302 return false;
2303 }
2304
2305 if (*access && (*access)->offset < limit)
2306 return false;
2307
2308 return true;
2309 }
2310
2311 /* Build a tree of access representatives, ACCESS is the pointer to the first
2312 one, others are linked in a list by the next_grp field. Return false iff
2313 some accesses partially overlap. */
2314
2315 static bool
2316 build_access_trees (struct access *access)
2317 {
2318 while (access)
2319 {
2320 struct access *root = access;
2321
2322 if (!build_access_subtree (&access))
2323 return false;
2324 root->next_grp = access;
2325 }
2326 return true;
2327 }
2328
2329 /* Traverse the access forest where ROOT is the first root and verify that
2330 various important invariants hold true. */
2331
2332 DEBUG_FUNCTION void
2333 verify_sra_access_forest (struct access *root)
2334 {
2335 struct access *access = root;
2336 tree first_base = root->base;
2337 gcc_assert (DECL_P (first_base));
2338 do
2339 {
2340 gcc_assert (access->base == first_base);
2341 if (access->parent)
2342 gcc_assert (access->offset >= access->parent->offset
2343 && access->size <= access->parent->size);
2344 if (access->next_sibling)
2345 gcc_assert (access->next_sibling->offset
2346 >= access->offset + access->size);
2347
2348 poly_int64 poffset, psize, pmax_size;
2349 bool reverse;
2350 tree base = get_ref_base_and_extent (access->expr, &poffset, &psize,
2351 &pmax_size, &reverse);
2352 HOST_WIDE_INT offset, size, max_size;
2353 if (!poffset.is_constant (&offset)
2354 || !psize.is_constant (&size)
2355 || !pmax_size.is_constant (&max_size))
2356 gcc_unreachable ();
2357 gcc_assert (base == first_base);
2358 gcc_assert (offset == access->offset);
2359 gcc_assert (access->grp_unscalarizable_region
2360 || access->grp_total_scalarization
2361 || size == max_size);
2362 gcc_assert (access->grp_unscalarizable_region
2363 || !is_gimple_reg_type (access->type)
2364 || size == access->size);
2365 gcc_assert (reverse == access->reverse);
2366
2367 if (access->first_child)
2368 {
2369 gcc_assert (access->first_child->parent == access);
2370 access = access->first_child;
2371 }
2372 else if (access->next_sibling)
2373 {
2374 gcc_assert (access->next_sibling->parent == access->parent);
2375 access = access->next_sibling;
2376 }
2377 else
2378 {
2379 while (access->parent && !access->next_sibling)
2380 access = access->parent;
2381 if (access->next_sibling)
2382 access = access->next_sibling;
2383 else
2384 {
2385 gcc_assert (access == root);
2386 root = root->next_grp;
2387 access = root;
2388 }
2389 }
2390 }
2391 while (access);
2392 }
2393
2394 /* Verify access forests of all candidates with accesses by calling
2395 verify_access_forest on each on them. */
2396
2397 DEBUG_FUNCTION void
2398 verify_all_sra_access_forests (void)
2399 {
2400 bitmap_iterator bi;
2401 unsigned i;
2402 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2403 {
2404 tree var = candidate (i);
2405 struct access *access = get_first_repr_for_decl (var);
2406 if (access)
2407 {
2408 gcc_assert (access->base == var);
2409 verify_sra_access_forest (access);
2410 }
2411 }
2412 }
2413
2414 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2415 array. */
2416
2417 static bool
2418 expr_with_var_bounded_array_refs_p (tree expr)
2419 {
2420 while (handled_component_p (expr))
2421 {
2422 if (TREE_CODE (expr) == ARRAY_REF
2423 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2424 return true;
2425 expr = TREE_OPERAND (expr, 0);
2426 }
2427 return false;
2428 }
2429
2430 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2431 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. If TOTALLY
2432 is set, we are totally scalarizing the aggregate. Also set all sorts of
2433 access flags appropriately along the way, notably always set grp_read and
2434 grp_assign_read according to MARK_READ and grp_write when MARK_WRITE is
2435 true.
2436
2437 Creating a replacement for a scalar access is considered beneficial if its
2438 grp_hint ot TOTALLY is set (this means either that there is more than one
2439 direct read access or that we are attempting total scalarization) or
2440 according to the following table:
2441
2442 Access written to through a scalar type (once or more times)
2443 |
2444 | Written to in an assignment statement
2445 | |
2446 | | Access read as scalar _once_
2447 | | |
2448 | | | Read in an assignment statement
2449 | | | |
2450 | | | | Scalarize Comment
2451 -----------------------------------------------------------------------------
2452 0 0 0 0 No access for the scalar
2453 0 0 0 1 No access for the scalar
2454 0 0 1 0 No Single read - won't help
2455 0 0 1 1 No The same case
2456 0 1 0 0 No access for the scalar
2457 0 1 0 1 No access for the scalar
2458 0 1 1 0 Yes s = *g; return s.i;
2459 0 1 1 1 Yes The same case as above
2460 1 0 0 0 No Won't help
2461 1 0 0 1 Yes s.i = 1; *g = s;
2462 1 0 1 0 Yes s.i = 5; g = s.i;
2463 1 0 1 1 Yes The same case as above
2464 1 1 0 0 No Won't help.
2465 1 1 0 1 Yes s.i = 1; *g = s;
2466 1 1 1 0 Yes s = *g; return s.i;
2467 1 1 1 1 Yes Any of the above yeses */
2468
2469 static bool
2470 analyze_access_subtree (struct access *root, struct access *parent,
2471 bool allow_replacements, bool totally)
2472 {
2473 struct access *child;
2474 HOST_WIDE_INT limit = root->offset + root->size;
2475 HOST_WIDE_INT covered_to = root->offset;
2476 bool scalar = is_gimple_reg_type (root->type);
2477 bool hole = false, sth_created = false;
2478
2479 if (parent)
2480 {
2481 if (parent->grp_read)
2482 root->grp_read = 1;
2483 if (parent->grp_assignment_read)
2484 root->grp_assignment_read = 1;
2485 if (parent->grp_write)
2486 root->grp_write = 1;
2487 if (parent->grp_assignment_write)
2488 root->grp_assignment_write = 1;
2489 if (!parent->grp_same_access_path)
2490 root->grp_same_access_path = 0;
2491 }
2492
2493 if (root->grp_unscalarizable_region)
2494 allow_replacements = false;
2495
2496 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2497 allow_replacements = false;
2498
2499 for (child = root->first_child; child; child = child->next_sibling)
2500 {
2501 hole |= covered_to < child->offset;
2502 sth_created |= analyze_access_subtree (child, root,
2503 allow_replacements && !scalar,
2504 totally);
2505
2506 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2507 if (child->grp_covered)
2508 covered_to += child->size;
2509 else
2510 hole = true;
2511 }
2512
2513 if (allow_replacements && scalar && !root->first_child
2514 && (totally || !root->grp_total_scalarization)
2515 && (totally
2516 || root->grp_hint
2517 || ((root->grp_scalar_read || root->grp_assignment_read)
2518 && (root->grp_scalar_write || root->grp_assignment_write))))
2519 {
2520 /* Always create access replacements that cover the whole access.
2521 For integral types this means the precision has to match.
2522 Avoid assumptions based on the integral type kind, too. */
2523 if (INTEGRAL_TYPE_P (root->type)
2524 && (TREE_CODE (root->type) != INTEGER_TYPE
2525 || TYPE_PRECISION (root->type) != root->size)
2526 /* But leave bitfield accesses alone. */
2527 && (TREE_CODE (root->expr) != COMPONENT_REF
2528 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2529 {
2530 tree rt = root->type;
2531 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2532 && (root->size % BITS_PER_UNIT) == 0);
2533 root->type = build_nonstandard_integer_type (root->size,
2534 TYPE_UNSIGNED (rt));
2535 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2536 root->offset, root->reverse,
2537 root->type, NULL, false);
2538
2539 if (dump_file && (dump_flags & TDF_DETAILS))
2540 {
2541 fprintf (dump_file, "Changing the type of a replacement for ");
2542 print_generic_expr (dump_file, root->base);
2543 fprintf (dump_file, " offset: %u, size: %u ",
2544 (unsigned) root->offset, (unsigned) root->size);
2545 fprintf (dump_file, " to an integer.\n");
2546 }
2547 }
2548
2549 root->grp_to_be_replaced = 1;
2550 root->replacement_decl = create_access_replacement (root);
2551 sth_created = true;
2552 hole = false;
2553 }
2554 else
2555 {
2556 if (allow_replacements
2557 && scalar && !root->first_child
2558 && !root->grp_total_scalarization
2559 && (root->grp_scalar_write || root->grp_assignment_write)
2560 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2561 DECL_UID (root->base)))
2562 {
2563 gcc_checking_assert (!root->grp_scalar_read
2564 && !root->grp_assignment_read);
2565 sth_created = true;
2566 if (MAY_HAVE_DEBUG_BIND_STMTS)
2567 {
2568 root->grp_to_be_debug_replaced = 1;
2569 root->replacement_decl = create_access_replacement (root);
2570 }
2571 }
2572
2573 if (covered_to < limit)
2574 hole = true;
2575 if (scalar || !allow_replacements)
2576 root->grp_total_scalarization = 0;
2577 }
2578
2579 if (!hole || totally)
2580 root->grp_covered = 1;
2581 else if (root->grp_write || comes_initialized_p (root->base))
2582 root->grp_unscalarized_data = 1; /* not covered and written to */
2583 return sth_created;
2584 }
2585
2586 /* Analyze all access trees linked by next_grp by the means of
2587 analyze_access_subtree. */
2588 static bool
2589 analyze_access_trees (struct access *access)
2590 {
2591 bool ret = false;
2592
2593 while (access)
2594 {
2595 if (analyze_access_subtree (access, NULL, true,
2596 access->grp_total_scalarization))
2597 ret = true;
2598 access = access->next_grp;
2599 }
2600
2601 return ret;
2602 }
2603
2604 /* Return true iff a potential new child of ACC at offset OFFSET and with size
2605 SIZE would conflict with an already existing one. If exactly such a child
2606 already exists in ACC, store a pointer to it in EXACT_MATCH. */
2607
2608 static bool
2609 child_would_conflict_in_acc (struct access *acc, HOST_WIDE_INT norm_offset,
2610 HOST_WIDE_INT size, struct access **exact_match)
2611 {
2612 struct access *child;
2613
2614 for (child = acc->first_child; child; child = child->next_sibling)
2615 {
2616 if (child->offset == norm_offset && child->size == size)
2617 {
2618 *exact_match = child;
2619 return true;
2620 }
2621
2622 if (child->offset < norm_offset + size
2623 && child->offset + child->size > norm_offset)
2624 return true;
2625 }
2626
2627 return false;
2628 }
2629
2630 /* Create a new child access of PARENT, with all properties just like MODEL
2631 except for its offset and with its grp_write false and grp_read true.
2632 Return the new access or NULL if it cannot be created. Note that this
2633 access is created long after all splicing and sorting, it's not located in
2634 any access vector and is automatically a representative of its group. Set
2635 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
2636
2637 static struct access *
2638 create_artificial_child_access (struct access *parent, struct access *model,
2639 HOST_WIDE_INT new_offset,
2640 bool set_grp_read, bool set_grp_write)
2641 {
2642 struct access **child;
2643 tree expr = parent->base;
2644
2645 gcc_assert (!model->grp_unscalarizable_region);
2646
2647 struct access *access = access_pool.allocate ();
2648 memset (access, 0, sizeof (struct access));
2649 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2650 model->type))
2651 {
2652 access->grp_no_warning = true;
2653 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2654 new_offset, model, NULL, false);
2655 }
2656
2657 access->base = parent->base;
2658 access->expr = expr;
2659 access->offset = new_offset;
2660 access->size = model->size;
2661 access->type = model->type;
2662 access->parent = parent;
2663 access->grp_read = set_grp_read;
2664 access->grp_write = set_grp_write;
2665 access->reverse = model->reverse;
2666
2667 child = &parent->first_child;
2668 while (*child && (*child)->offset < new_offset)
2669 child = &(*child)->next_sibling;
2670
2671 access->next_sibling = *child;
2672 *child = access;
2673
2674 return access;
2675 }
2676
2677
2678 /* Beginning with ACCESS, traverse its whole access subtree and mark all
2679 sub-trees as written to. If any of them has not been marked so previously
2680 and has assignment links leading from it, re-enqueue it. */
2681
2682 static void
2683 subtree_mark_written_and_rhs_enqueue (struct access *access)
2684 {
2685 if (access->grp_write)
2686 return;
2687 access->grp_write = true;
2688 add_access_to_rhs_work_queue (access);
2689
2690 struct access *child;
2691 for (child = access->first_child; child; child = child->next_sibling)
2692 subtree_mark_written_and_rhs_enqueue (child);
2693 }
2694
2695 /* If there is still budget to create a propagation access for DECL, return
2696 true and decrement the budget. Otherwise return false. */
2697
2698 static bool
2699 budget_for_propagation_access (tree decl)
2700 {
2701 unsigned b, *p = propagation_budget->get (decl);
2702 if (p)
2703 b = *p;
2704 else
2705 b = param_sra_max_propagations;
2706
2707 if (b == 0)
2708 return false;
2709 b--;
2710
2711 if (b == 0 && dump_file && (dump_flags & TDF_DETAILS))
2712 {
2713 fprintf (dump_file, "The propagation budget of ");
2714 print_generic_expr (dump_file, decl);
2715 fprintf (dump_file, " (UID: %u) has been exhausted.\n", DECL_UID (decl));
2716 }
2717 propagation_budget->put (decl, b);
2718 return true;
2719 }
2720
2721 /* Propagate subaccesses and grp_write flags of RACC across an assignment link
2722 to LACC. Enqueue sub-accesses as necessary so that the write flag is
2723 propagated transitively. Return true if anything changed. Additionally, if
2724 RACC is a scalar access but LACC is not, change the type of the latter, if
2725 possible. */
2726
2727 static bool
2728 propagate_subaccesses_from_rhs (struct access *lacc, struct access *racc)
2729 {
2730 struct access *rchild;
2731 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2732 bool ret = false;
2733
2734 /* IF the LHS is still not marked as being written to, we only need to do so
2735 if the RHS at this level actually was. */
2736 if (!lacc->grp_write)
2737 {
2738 gcc_checking_assert (!comes_initialized_p (racc->base));
2739 if (racc->grp_write)
2740 {
2741 subtree_mark_written_and_rhs_enqueue (lacc);
2742 ret = true;
2743 }
2744 }
2745
2746 if (is_gimple_reg_type (lacc->type)
2747 || lacc->grp_unscalarizable_region
2748 || racc->grp_unscalarizable_region)
2749 {
2750 if (!lacc->grp_write)
2751 {
2752 ret = true;
2753 subtree_mark_written_and_rhs_enqueue (lacc);
2754 }
2755 return ret;
2756 }
2757
2758 if (is_gimple_reg_type (racc->type))
2759 {
2760 if (!lacc->grp_write)
2761 {
2762 ret = true;
2763 subtree_mark_written_and_rhs_enqueue (lacc);
2764 }
2765 if (!lacc->first_child && !racc->first_child)
2766 {
2767 /* We are about to change the access type from aggregate to scalar,
2768 so we need to put the reverse flag onto the access, if any. */
2769 const bool reverse = TYPE_REVERSE_STORAGE_ORDER (lacc->type);
2770 tree t = lacc->base;
2771
2772 lacc->type = racc->type;
2773 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2774 lacc->offset, racc->type))
2775 {
2776 lacc->expr = t;
2777 lacc->grp_same_access_path = true;
2778 }
2779 else
2780 {
2781 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2782 lacc->base, lacc->offset,
2783 racc, NULL, false);
2784 if (TREE_CODE (lacc->expr) == MEM_REF)
2785 REF_REVERSE_STORAGE_ORDER (lacc->expr) = reverse;
2786 lacc->grp_no_warning = true;
2787 lacc->grp_same_access_path = false;
2788 }
2789 lacc->reverse = reverse;
2790 }
2791 return ret;
2792 }
2793
2794 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2795 {
2796 struct access *new_acc = NULL;
2797 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2798
2799 if (child_would_conflict_in_acc (lacc, norm_offset, rchild->size,
2800 &new_acc))
2801 {
2802 if (new_acc)
2803 {
2804 if (!new_acc->grp_write && rchild->grp_write)
2805 {
2806 gcc_assert (!lacc->grp_write);
2807 subtree_mark_written_and_rhs_enqueue (new_acc);
2808 ret = true;
2809 }
2810
2811 rchild->grp_hint = 1;
2812 new_acc->grp_hint |= new_acc->grp_read;
2813 if (rchild->first_child
2814 && propagate_subaccesses_from_rhs (new_acc, rchild))
2815 {
2816 ret = 1;
2817 add_access_to_rhs_work_queue (new_acc);
2818 }
2819 }
2820 else
2821 {
2822 if (!lacc->grp_write)
2823 {
2824 ret = true;
2825 subtree_mark_written_and_rhs_enqueue (lacc);
2826 }
2827 }
2828 continue;
2829 }
2830
2831 if (rchild->grp_unscalarizable_region
2832 || !budget_for_propagation_access (lacc->base))
2833 {
2834 if (rchild->grp_write && !lacc->grp_write)
2835 {
2836 ret = true;
2837 subtree_mark_written_and_rhs_enqueue (lacc);
2838 }
2839 continue;
2840 }
2841
2842 rchild->grp_hint = 1;
2843 /* Because get_ref_base_and_extent always includes padding in size for
2844 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
2845 type, we might be actually attempting to here to create a child of the
2846 same type as the parent. */
2847 if (!types_compatible_p (lacc->type, rchild->type))
2848 new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
2849 false,
2850 (lacc->grp_write
2851 || rchild->grp_write));
2852 else
2853 new_acc = lacc;
2854 gcc_checking_assert (new_acc);
2855 if (racc->first_child)
2856 propagate_subaccesses_from_rhs (new_acc, rchild);
2857
2858 add_access_to_rhs_work_queue (lacc);
2859 ret = true;
2860 }
2861
2862 return ret;
2863 }
2864
2865 /* Propagate subaccesses of LACC across an assignment link to RACC if they
2866 should inhibit total scalarization of the corresponding area. No flags are
2867 being propagated in the process. Return true if anything changed. */
2868
2869 static bool
2870 propagate_subaccesses_from_lhs (struct access *lacc, struct access *racc)
2871 {
2872 if (is_gimple_reg_type (racc->type)
2873 || lacc->grp_unscalarizable_region
2874 || racc->grp_unscalarizable_region)
2875 return false;
2876
2877 /* TODO: Do we want set some new racc flag to stop potential total
2878 scalarization if lacc is a scalar access (and none fo the two have
2879 children)? */
2880
2881 bool ret = false;
2882 HOST_WIDE_INT norm_delta = racc->offset - lacc->offset;
2883 for (struct access *lchild = lacc->first_child;
2884 lchild;
2885 lchild = lchild->next_sibling)
2886 {
2887 struct access *matching_acc = NULL;
2888 HOST_WIDE_INT norm_offset = lchild->offset + norm_delta;
2889
2890 if (lchild->grp_unscalarizable_region
2891 || child_would_conflict_in_acc (racc, norm_offset, lchild->size,
2892 &matching_acc)
2893 || !budget_for_propagation_access (racc->base))
2894 {
2895 if (matching_acc
2896 && propagate_subaccesses_from_lhs (lchild, matching_acc))
2897 add_access_to_lhs_work_queue (matching_acc);
2898 continue;
2899 }
2900
2901 /* Because get_ref_base_and_extent always includes padding in size for
2902 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
2903 type, we might be actually attempting to here to create a child of the
2904 same type as the parent. */
2905 if (!types_compatible_p (racc->type, lchild->type))
2906 {
2907 struct access *new_acc
2908 = create_artificial_child_access (racc, lchild, norm_offset,
2909 true, false);
2910 propagate_subaccesses_from_lhs (lchild, new_acc);
2911 }
2912 else
2913 propagate_subaccesses_from_lhs (lchild, racc);
2914 ret = true;
2915 }
2916 return ret;
2917 }
2918
2919 /* Propagate all subaccesses across assignment links. */
2920
2921 static void
2922 propagate_all_subaccesses (void)
2923 {
2924 propagation_budget = new hash_map<tree, unsigned>;
2925 while (rhs_work_queue_head)
2926 {
2927 struct access *racc = pop_access_from_rhs_work_queue ();
2928 struct assign_link *link;
2929
2930 if (racc->group_representative)
2931 racc= racc->group_representative;
2932 gcc_assert (racc->first_rhs_link);
2933
2934 for (link = racc->first_rhs_link; link; link = link->next_rhs)
2935 {
2936 struct access *lacc = link->lacc;
2937
2938 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2939 continue;
2940 lacc = lacc->group_representative;
2941
2942 bool reque_parents = false;
2943 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2944 {
2945 if (!lacc->grp_write)
2946 {
2947 subtree_mark_written_and_rhs_enqueue (lacc);
2948 reque_parents = true;
2949 }
2950 }
2951 else if (propagate_subaccesses_from_rhs (lacc, racc))
2952 reque_parents = true;
2953
2954 if (reque_parents)
2955 do
2956 {
2957 add_access_to_rhs_work_queue (lacc);
2958 lacc = lacc->parent;
2959 }
2960 while (lacc);
2961 }
2962 }
2963
2964 while (lhs_work_queue_head)
2965 {
2966 struct access *lacc = pop_access_from_lhs_work_queue ();
2967 struct assign_link *link;
2968
2969 if (lacc->group_representative)
2970 lacc = lacc->group_representative;
2971 gcc_assert (lacc->first_lhs_link);
2972
2973 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2974 continue;
2975
2976 for (link = lacc->first_lhs_link; link; link = link->next_lhs)
2977 {
2978 struct access *racc = link->racc;
2979
2980 if (racc->group_representative)
2981 racc = racc->group_representative;
2982 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2983 continue;
2984 if (propagate_subaccesses_from_lhs (lacc, racc))
2985 add_access_to_lhs_work_queue (racc);
2986 }
2987 }
2988 delete propagation_budget;
2989 }
2990
2991 /* Return true if the forest beginning with ROOT does not contain
2992 unscalarizable regions or non-byte aligned accesses. */
2993
2994 static bool
2995 can_totally_scalarize_forest_p (struct access *root)
2996 {
2997 struct access *access = root;
2998 do
2999 {
3000 if (access->grp_unscalarizable_region
3001 || (access->offset % BITS_PER_UNIT) != 0
3002 || (access->size % BITS_PER_UNIT) != 0
3003 || (is_gimple_reg_type (access->type)
3004 && access->first_child))
3005 return false;
3006
3007 if (access->first_child)
3008 access = access->first_child;
3009 else if (access->next_sibling)
3010 access = access->next_sibling;
3011 else
3012 {
3013 while (access->parent && !access->next_sibling)
3014 access = access->parent;
3015 if (access->next_sibling)
3016 access = access->next_sibling;
3017 else
3018 {
3019 gcc_assert (access == root);
3020 root = root->next_grp;
3021 access = root;
3022 }
3023 }
3024 }
3025 while (access);
3026 return true;
3027 }
3028
3029 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3030 reference EXPR for total scalarization purposes and mark it as such. Within
3031 the children of PARENT, link it in between PTR and NEXT_SIBLING. */
3032
3033 static struct access *
3034 create_total_scalarization_access (struct access *parent, HOST_WIDE_INT pos,
3035 HOST_WIDE_INT size, tree type, tree expr,
3036 struct access **ptr,
3037 struct access *next_sibling)
3038 {
3039 struct access *access = access_pool.allocate ();
3040 memset (access, 0, sizeof (struct access));
3041 access->base = parent->base;
3042 access->offset = pos;
3043 access->size = size;
3044 access->expr = expr;
3045 access->type = type;
3046 access->parent = parent;
3047 access->grp_write = parent->grp_write;
3048 access->grp_total_scalarization = 1;
3049 access->grp_hint = 1;
3050 access->grp_same_access_path = path_comparable_for_same_access (expr);
3051 access->reverse = reverse_storage_order_for_component_p (expr);
3052
3053 access->next_sibling = next_sibling;
3054 *ptr = access;
3055 return access;
3056 }
3057
3058 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3059 reference EXPR for total scalarization purposes and mark it as such, link it
3060 at *PTR and reshape the tree so that those elements at *PTR and their
3061 siblings which fall within the part described by POS and SIZE are moved to
3062 be children of the new access. If a partial overlap is detected, return
3063 NULL. */
3064
3065 static struct access *
3066 create_total_access_and_reshape (struct access *parent, HOST_WIDE_INT pos,
3067 HOST_WIDE_INT size, tree type, tree expr,
3068 struct access **ptr)
3069 {
3070 struct access **p = ptr;
3071
3072 while (*p && (*p)->offset < pos + size)
3073 {
3074 if ((*p)->offset + (*p)->size > pos + size)
3075 return NULL;
3076 p = &(*p)->next_sibling;
3077 }
3078
3079 struct access *next_child = *ptr;
3080 struct access *new_acc
3081 = create_total_scalarization_access (parent, pos, size, type, expr,
3082 ptr, *p);
3083 if (p != ptr)
3084 {
3085 new_acc->first_child = next_child;
3086 *p = NULL;
3087 for (struct access *a = next_child; a; a = a->next_sibling)
3088 a->parent = new_acc;
3089 }
3090 return new_acc;
3091 }
3092
3093 static bool totally_scalarize_subtree (struct access *root);
3094
3095 /* Return true if INNER is either the same type as OUTER or if it is the type
3096 of a record field in OUTER at offset zero, possibly in nested
3097 sub-records. */
3098
3099 static bool
3100 access_and_field_type_match_p (tree outer, tree inner)
3101 {
3102 if (TYPE_MAIN_VARIANT (outer) == TYPE_MAIN_VARIANT (inner))
3103 return true;
3104 if (TREE_CODE (outer) != RECORD_TYPE)
3105 return false;
3106 tree fld = TYPE_FIELDS (outer);
3107 while (fld)
3108 {
3109 if (TREE_CODE (fld) == FIELD_DECL)
3110 {
3111 if (!zerop (DECL_FIELD_OFFSET (fld)))
3112 return false;
3113 if (TYPE_MAIN_VARIANT (TREE_TYPE (fld)) == inner)
3114 return true;
3115 if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE)
3116 fld = TYPE_FIELDS (TREE_TYPE (fld));
3117 else
3118 return false;
3119 }
3120 else
3121 fld = DECL_CHAIN (fld);
3122 }
3123 return false;
3124 }
3125
3126 /* Return type of total_should_skip_creating_access indicating whether a total
3127 scalarization access for a field/element should be created, whether it
3128 already exists or whether the entire total scalarization has to fail. */
3129
3130 enum total_sra_field_state {TOTAL_FLD_CREATE, TOTAL_FLD_DONE, TOTAL_FLD_FAILED};
3131
3132 /* Do all the necessary steps in total scalarization when the given aggregate
3133 type has a TYPE at POS with the given SIZE should be put into PARENT and
3134 when we have processed all its siblings with smaller offsets up until and
3135 including LAST_SEEN_SIBLING (which can be NULL).
3136
3137 If some further siblings are to be skipped, set *LAST_SEEN_SIBLING as
3138 appropriate. Return TOTAL_FLD_CREATE id the caller should carry on with
3139 creating a new access, TOTAL_FLD_DONE if access or accesses capable of
3140 representing the described part of the aggregate for the purposes of total
3141 scalarization already exist or TOTAL_FLD_FAILED if there is a problem which
3142 prevents total scalarization from happening at all. */
3143
3144 static enum total_sra_field_state
3145 total_should_skip_creating_access (struct access *parent,
3146 struct access **last_seen_sibling,
3147 tree type, HOST_WIDE_INT pos,
3148 HOST_WIDE_INT size)
3149 {
3150 struct access *next_child;
3151 if (!*last_seen_sibling)
3152 next_child = parent->first_child;
3153 else
3154 next_child = (*last_seen_sibling)->next_sibling;
3155
3156 /* First, traverse the chain of siblings until it points to an access with
3157 offset at least equal to POS. Check all skipped accesses whether they
3158 span the POS boundary and if so, return with a failure. */
3159 while (next_child && next_child->offset < pos)
3160 {
3161 if (next_child->offset + next_child->size > pos)
3162 return TOTAL_FLD_FAILED;
3163 *last_seen_sibling = next_child;
3164 next_child = next_child->next_sibling;
3165 }
3166
3167 /* Now check whether next_child has exactly the right POS and SIZE and if so,
3168 whether it can represent what we need and can be totally scalarized
3169 itself. */
3170 if (next_child && next_child->offset == pos
3171 && next_child->size == size)
3172 {
3173 if (!is_gimple_reg_type (next_child->type)
3174 && (!access_and_field_type_match_p (type, next_child->type)
3175 || !totally_scalarize_subtree (next_child)))
3176 return TOTAL_FLD_FAILED;
3177
3178 *last_seen_sibling = next_child;
3179 return TOTAL_FLD_DONE;
3180 }
3181
3182 /* If the child we're looking at would partially overlap, we just cannot
3183 totally scalarize. */
3184 if (next_child
3185 && next_child->offset < pos + size
3186 && next_child->offset + next_child->size > pos + size)
3187 return TOTAL_FLD_FAILED;
3188
3189 if (is_gimple_reg_type (type))
3190 {
3191 /* We don't scalarize accesses that are children of other scalar type
3192 accesses, so if we go on and create an access for a register type,
3193 there should not be any pre-existing children. There are rare cases
3194 where the requested type is a vector but we already have register
3195 accesses for all its elements which is equally good. Detect that
3196 situation or whether we need to bail out. */
3197
3198 HOST_WIDE_INT covered = pos;
3199 bool skipping = false;
3200 while (next_child
3201 && next_child->offset + next_child->size <= pos + size)
3202 {
3203 if (next_child->offset != covered
3204 || !is_gimple_reg_type (next_child->type))
3205 return TOTAL_FLD_FAILED;
3206
3207 covered += next_child->size;
3208 *last_seen_sibling = next_child;
3209 next_child = next_child->next_sibling;
3210 skipping = true;
3211 }
3212
3213 if (skipping)
3214 {
3215 if (covered != pos + size)
3216 return TOTAL_FLD_FAILED;
3217 else
3218 return TOTAL_FLD_DONE;
3219 }
3220 }
3221
3222 return TOTAL_FLD_CREATE;
3223 }
3224
3225 /* Go over sub-tree rooted in ROOT and attempt to create scalar accesses
3226 spanning all uncovered areas covered by ROOT, return false if the attempt
3227 failed. All created accesses will have grp_unscalarizable_region set (and
3228 should be ignored if the function returns false). */
3229
3230 static bool
3231 totally_scalarize_subtree (struct access *root)
3232 {
3233 gcc_checking_assert (!root->grp_unscalarizable_region);
3234 gcc_checking_assert (!is_gimple_reg_type (root->type));
3235
3236 struct access *last_seen_sibling = NULL;
3237
3238 switch (TREE_CODE (root->type))
3239 {
3240 case RECORD_TYPE:
3241 for (tree fld = TYPE_FIELDS (root->type); fld; fld = DECL_CHAIN (fld))
3242 if (TREE_CODE (fld) == FIELD_DECL)
3243 {
3244 tree ft = TREE_TYPE (fld);
3245 HOST_WIDE_INT fsize = tree_to_uhwi (DECL_SIZE (fld));
3246 if (!fsize)
3247 continue;
3248
3249 HOST_WIDE_INT pos = root->offset + int_bit_position (fld);
3250 enum total_sra_field_state
3251 state = total_should_skip_creating_access (root,
3252 &last_seen_sibling,
3253 ft, pos, fsize);
3254 switch (state)
3255 {
3256 case TOTAL_FLD_FAILED:
3257 return false;
3258 case TOTAL_FLD_DONE:
3259 continue;
3260 case TOTAL_FLD_CREATE:
3261 break;
3262 default:
3263 gcc_unreachable ();
3264 }
3265
3266 struct access **p = (last_seen_sibling
3267 ? &last_seen_sibling->next_sibling
3268 : &root->first_child);
3269 tree nref = build3 (COMPONENT_REF, ft, root->expr, fld, NULL_TREE);
3270 struct access *new_child
3271 = create_total_access_and_reshape (root, pos, fsize, ft, nref, p);
3272 if (!new_child)
3273 return false;
3274
3275 if (!is_gimple_reg_type (ft)
3276 && !totally_scalarize_subtree (new_child))
3277 return false;
3278 last_seen_sibling = new_child;
3279 }
3280 break;
3281 case ARRAY_TYPE:
3282 {
3283 tree elemtype = TREE_TYPE (root->type);
3284 tree elem_size = TYPE_SIZE (elemtype);
3285 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
3286 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
3287 gcc_assert (el_size > 0);
3288
3289 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (root->type));
3290 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
3291 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (root->type));
3292 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
3293 if (!maxidx)
3294 goto out;
3295 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
3296 tree domain = TYPE_DOMAIN (root->type);
3297 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
3298 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
3299 offset_int idx = wi::to_offset (minidx);
3300 offset_int max = wi::to_offset (maxidx);
3301 if (!TYPE_UNSIGNED (domain))
3302 {
3303 idx = wi::sext (idx, TYPE_PRECISION (domain));
3304 max = wi::sext (max, TYPE_PRECISION (domain));
3305 }
3306 for (HOST_WIDE_INT pos = root->offset;
3307 idx <= max;
3308 pos += el_size, ++idx)
3309 {
3310 enum total_sra_field_state
3311 state = total_should_skip_creating_access (root,
3312 &last_seen_sibling,
3313 elemtype, pos,
3314 el_size);
3315 switch (state)
3316 {
3317 case TOTAL_FLD_FAILED:
3318 return false;
3319 case TOTAL_FLD_DONE:
3320 continue;
3321 case TOTAL_FLD_CREATE:
3322 break;
3323 default:
3324 gcc_unreachable ();
3325 }
3326
3327 struct access **p = (last_seen_sibling
3328 ? &last_seen_sibling->next_sibling
3329 : &root->first_child);
3330 tree nref = build4 (ARRAY_REF, elemtype, root->expr,
3331 wide_int_to_tree (domain, idx),
3332 NULL_TREE, NULL_TREE);
3333 struct access *new_child
3334 = create_total_access_and_reshape (root, pos, el_size, elemtype,
3335 nref, p);
3336 if (!new_child)
3337 return false;
3338
3339 if (!is_gimple_reg_type (elemtype)
3340 && !totally_scalarize_subtree (new_child))
3341 return false;
3342 last_seen_sibling = new_child;
3343 }
3344 }
3345 break;
3346 default:
3347 gcc_unreachable ();
3348 }
3349
3350 out:
3351 return true;
3352 }
3353
3354 /* Go through all accesses collected throughout the (intraprocedural) analysis
3355 stage, exclude overlapping ones, identify representatives and build trees
3356 out of them, making decisions about scalarization on the way. Return true
3357 iff there are any to-be-scalarized variables after this stage. */
3358
3359 static bool
3360 analyze_all_variable_accesses (void)
3361 {
3362 int res = 0;
3363 bitmap tmp = BITMAP_ALLOC (NULL);
3364 bitmap_iterator bi;
3365 unsigned i;
3366
3367 bitmap_copy (tmp, candidate_bitmap);
3368 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
3369 {
3370 tree var = candidate (i);
3371 struct access *access;
3372
3373 access = sort_and_splice_var_accesses (var);
3374 if (!access || !build_access_trees (access))
3375 disqualify_candidate (var,
3376 "No or inhibitingly overlapping accesses.");
3377 }
3378
3379 propagate_all_subaccesses ();
3380
3381 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
3382 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
3383 fall back to a target default. */
3384 unsigned HOST_WIDE_INT max_scalarization_size
3385 = get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
3386
3387 if (optimize_speed_p)
3388 {
3389 if (global_options_set.x_param_sra_max_scalarization_size_speed)
3390 max_scalarization_size = param_sra_max_scalarization_size_speed;
3391 }
3392 else
3393 {
3394 if (global_options_set.x_param_sra_max_scalarization_size_size)
3395 max_scalarization_size = param_sra_max_scalarization_size_size;
3396 }
3397 max_scalarization_size *= BITS_PER_UNIT;
3398
3399 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3400 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
3401 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
3402 {
3403 tree var = candidate (i);
3404 if (!VAR_P (var))
3405 continue;
3406
3407 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var))) > max_scalarization_size)
3408 {
3409 if (dump_file && (dump_flags & TDF_DETAILS))
3410 {
3411 fprintf (dump_file, "Too big to totally scalarize: ");
3412 print_generic_expr (dump_file, var);
3413 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
3414 }
3415 continue;
3416 }
3417
3418 bool all_types_ok = true;
3419 for (struct access *access = get_first_repr_for_decl (var);
3420 access;
3421 access = access->next_grp)
3422 if (!can_totally_scalarize_forest_p (access)
3423 || !scalarizable_type_p (access->type, constant_decl_p (var)))
3424 {
3425 all_types_ok = false;
3426 break;
3427 }
3428 if (!all_types_ok)
3429 continue;
3430
3431 if (dump_file && (dump_flags & TDF_DETAILS))
3432 {
3433 fprintf (dump_file, "Will attempt to totally scalarize ");
3434 print_generic_expr (dump_file, var);
3435 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
3436 }
3437 bool scalarized = true;
3438 for (struct access *access = get_first_repr_for_decl (var);
3439 access;
3440 access = access->next_grp)
3441 if (!is_gimple_reg_type (access->type)
3442 && !totally_scalarize_subtree (access))
3443 {
3444 scalarized = false;
3445 break;
3446 }
3447
3448 if (scalarized)
3449 for (struct access *access = get_first_repr_for_decl (var);
3450 access;
3451 access = access->next_grp)
3452 access->grp_total_scalarization = true;
3453 }
3454
3455 if (flag_checking)
3456 verify_all_sra_access_forests ();
3457
3458 bitmap_copy (tmp, candidate_bitmap);
3459 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
3460 {
3461 tree var = candidate (i);
3462 struct access *access = get_first_repr_for_decl (var);
3463
3464 if (analyze_access_trees (access))
3465 {
3466 res++;
3467 if (dump_file && (dump_flags & TDF_DETAILS))
3468 {
3469 fprintf (dump_file, "\nAccess trees for ");
3470 print_generic_expr (dump_file, var);
3471 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
3472 dump_access_tree (dump_file, access);
3473 fprintf (dump_file, "\n");
3474 }
3475 }
3476 else
3477 disqualify_candidate (var, "No scalar replacements to be created.");
3478 }
3479
3480 BITMAP_FREE (tmp);
3481
3482 if (res)
3483 {
3484 statistics_counter_event (cfun, "Scalarized aggregates", res);
3485 return true;
3486 }
3487 else
3488 return false;
3489 }
3490
3491 /* Generate statements copying scalar replacements of accesses within a subtree
3492 into or out of AGG. ACCESS, all its children, siblings and their children
3493 are to be processed. AGG is an aggregate type expression (can be a
3494 declaration but does not have to be, it can for example also be a mem_ref or
3495 a series of handled components). TOP_OFFSET is the offset of the processed
3496 subtree which has to be subtracted from offsets of individual accesses to
3497 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
3498 replacements in the interval <start_offset, start_offset + chunk_size>,
3499 otherwise copy all. GSI is a statement iterator used to place the new
3500 statements. WRITE should be true when the statements should write from AGG
3501 to the replacement and false if vice versa. if INSERT_AFTER is true, new
3502 statements will be added after the current statement in GSI, they will be
3503 added before the statement otherwise. */
3504
3505 static void
3506 generate_subtree_copies (struct access *access, tree agg,
3507 HOST_WIDE_INT top_offset,
3508 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
3509 gimple_stmt_iterator *gsi, bool write,
3510 bool insert_after, location_t loc)
3511 {
3512 /* Never write anything into constant pool decls. See PR70602. */
3513 if (!write && constant_decl_p (agg))
3514 return;
3515 do
3516 {
3517 if (chunk_size && access->offset >= start_offset + chunk_size)
3518 return;
3519
3520 if (access->grp_to_be_replaced
3521 && (chunk_size == 0
3522 || access->offset + access->size > start_offset))
3523 {
3524 tree expr, repl = get_access_replacement (access);
3525 gassign *stmt;
3526
3527 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
3528 access, gsi, insert_after);
3529
3530 if (write)
3531 {
3532 if (access->grp_partial_lhs)
3533 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
3534 !insert_after,
3535 insert_after ? GSI_NEW_STMT
3536 : GSI_SAME_STMT);
3537 stmt = gimple_build_assign (repl, expr);
3538 }
3539 else
3540 {
3541 TREE_NO_WARNING (repl) = 1;
3542 if (access->grp_partial_lhs)
3543 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3544 !insert_after,
3545 insert_after ? GSI_NEW_STMT
3546 : GSI_SAME_STMT);
3547 stmt = gimple_build_assign (expr, repl);
3548 }
3549 gimple_set_location (stmt, loc);
3550
3551 if (insert_after)
3552 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3553 else
3554 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3555 update_stmt (stmt);
3556 sra_stats.subtree_copies++;
3557 }
3558 else if (write
3559 && access->grp_to_be_debug_replaced
3560 && (chunk_size == 0
3561 || access->offset + access->size > start_offset))
3562 {
3563 gdebug *ds;
3564 tree drhs = build_debug_ref_for_model (loc, agg,
3565 access->offset - top_offset,
3566 access);
3567 ds = gimple_build_debug_bind (get_access_replacement (access),
3568 drhs, gsi_stmt (*gsi));
3569 if (insert_after)
3570 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3571 else
3572 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3573 }
3574
3575 if (access->first_child)
3576 generate_subtree_copies (access->first_child, agg, top_offset,
3577 start_offset, chunk_size, gsi,
3578 write, insert_after, loc);
3579
3580 access = access->next_sibling;
3581 }
3582 while (access);
3583 }
3584
3585 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
3586 root of the subtree to be processed. GSI is the statement iterator used
3587 for inserting statements which are added after the current statement if
3588 INSERT_AFTER is true or before it otherwise. */
3589
3590 static void
3591 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
3592 bool insert_after, location_t loc)
3593
3594 {
3595 struct access *child;
3596
3597 if (access->grp_to_be_replaced)
3598 {
3599 gassign *stmt;
3600
3601 stmt = gimple_build_assign (get_access_replacement (access),
3602 build_zero_cst (access->type));
3603 if (insert_after)
3604 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3605 else
3606 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3607 update_stmt (stmt);
3608 gimple_set_location (stmt, loc);
3609 }
3610 else if (access->grp_to_be_debug_replaced)
3611 {
3612 gdebug *ds
3613 = gimple_build_debug_bind (get_access_replacement (access),
3614 build_zero_cst (access->type),
3615 gsi_stmt (*gsi));
3616 if (insert_after)
3617 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3618 else
3619 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3620 }
3621
3622 for (child = access->first_child; child; child = child->next_sibling)
3623 init_subtree_with_zero (child, gsi, insert_after, loc);
3624 }
3625
3626 /* Clobber all scalar replacements in an access subtree. ACCESS is the
3627 root of the subtree to be processed. GSI is the statement iterator used
3628 for inserting statements which are added after the current statement if
3629 INSERT_AFTER is true or before it otherwise. */
3630
3631 static void
3632 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
3633 bool insert_after, location_t loc)
3634
3635 {
3636 struct access *child;
3637
3638 if (access->grp_to_be_replaced)
3639 {
3640 tree rep = get_access_replacement (access);
3641 tree clobber = build_clobber (access->type);
3642 gimple *stmt = gimple_build_assign (rep, clobber);
3643
3644 if (insert_after)
3645 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3646 else
3647 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3648 update_stmt (stmt);
3649 gimple_set_location (stmt, loc);
3650 }
3651
3652 for (child = access->first_child; child; child = child->next_sibling)
3653 clobber_subtree (child, gsi, insert_after, loc);
3654 }
3655
3656 /* Search for an access representative for the given expression EXPR and
3657 return it or NULL if it cannot be found. */
3658
3659 static struct access *
3660 get_access_for_expr (tree expr)
3661 {
3662 poly_int64 poffset, psize, pmax_size;
3663 HOST_WIDE_INT offset, max_size;
3664 tree base;
3665 bool reverse;
3666
3667 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
3668 a different size than the size of its argument and we need the latter
3669 one. */
3670 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3671 expr = TREE_OPERAND (expr, 0);
3672
3673 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
3674 &reverse);
3675 if (!known_size_p (pmax_size)
3676 || !pmax_size.is_constant (&max_size)
3677 || !poffset.is_constant (&offset)
3678 || !DECL_P (base))
3679 return NULL;
3680
3681 if (tree basesize = DECL_SIZE (base))
3682 {
3683 poly_int64 sz;
3684 if (offset < 0
3685 || !poly_int_tree_p (basesize, &sz)
3686 || known_le (sz, offset))
3687 return NULL;
3688 }
3689
3690 if (max_size == 0
3691 || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3692 return NULL;
3693
3694 return get_var_base_offset_size_access (base, offset, max_size);
3695 }
3696
3697 /* Replace the expression EXPR with a scalar replacement if there is one and
3698 generate other statements to do type conversion or subtree copying if
3699 necessary. GSI is used to place newly created statements, WRITE is true if
3700 the expression is being written to (it is on a LHS of a statement or output
3701 in an assembly statement). */
3702
3703 static bool
3704 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
3705 {
3706 location_t loc;
3707 struct access *access;
3708 tree type, bfr, orig_expr;
3709 bool partial_cplx_access = false;
3710
3711 if (TREE_CODE (*expr) == BIT_FIELD_REF)
3712 {
3713 bfr = *expr;
3714 expr = &TREE_OPERAND (*expr, 0);
3715 }
3716 else
3717 bfr = NULL_TREE;
3718
3719 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3720 {
3721 expr = &TREE_OPERAND (*expr, 0);
3722 partial_cplx_access = true;
3723 }
3724 access = get_access_for_expr (*expr);
3725 if (!access)
3726 return false;
3727 type = TREE_TYPE (*expr);
3728 orig_expr = *expr;
3729
3730 loc = gimple_location (gsi_stmt (*gsi));
3731 gimple_stmt_iterator alt_gsi = gsi_none ();
3732 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
3733 {
3734 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3735 gsi = &alt_gsi;
3736 }
3737
3738 if (access->grp_to_be_replaced)
3739 {
3740 tree repl = get_access_replacement (access);
3741 /* If we replace a non-register typed access simply use the original
3742 access expression to extract the scalar component afterwards.
3743 This happens if scalarizing a function return value or parameter
3744 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3745 gcc.c-torture/compile/20011217-1.c.
3746
3747 We also want to use this when accessing a complex or vector which can
3748 be accessed as a different type too, potentially creating a need for
3749 type conversion (see PR42196) and when scalarized unions are involved
3750 in assembler statements (see PR42398). */
3751 if (!bfr && !useless_type_conversion_p (type, access->type))
3752 {
3753 tree ref;
3754
3755 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
3756
3757 if (partial_cplx_access)
3758 {
3759 /* VIEW_CONVERT_EXPRs in partial complex access are always fine in
3760 the case of a write because in such case the replacement cannot
3761 be a gimple register. In the case of a load, we have to
3762 differentiate in between a register an non-register
3763 replacement. */
3764 tree t = build1 (VIEW_CONVERT_EXPR, type, repl);
3765 gcc_checking_assert (!write || access->grp_partial_lhs);
3766 if (!access->grp_partial_lhs)
3767 {
3768 tree tmp = make_ssa_name (type);
3769 gassign *stmt = gimple_build_assign (tmp, t);
3770 /* This is always a read. */
3771 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3772 t = tmp;
3773 }
3774 *expr = t;
3775 }
3776 else if (write)
3777 {
3778 gassign *stmt;
3779
3780 if (access->grp_partial_lhs)
3781 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
3782 false, GSI_NEW_STMT);
3783 stmt = gimple_build_assign (repl, ref);
3784 gimple_set_location (stmt, loc);
3785 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3786 }
3787 else
3788 {
3789 gassign *stmt;
3790
3791 if (access->grp_partial_lhs)
3792 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3793 true, GSI_SAME_STMT);
3794 stmt = gimple_build_assign (ref, repl);
3795 gimple_set_location (stmt, loc);
3796 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3797 }
3798 }
3799 else
3800 *expr = repl;
3801 sra_stats.exprs++;
3802 }
3803 else if (write && access->grp_to_be_debug_replaced)
3804 {
3805 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3806 NULL_TREE,
3807 gsi_stmt (*gsi));
3808 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3809 }
3810
3811 if (access->first_child)
3812 {
3813 HOST_WIDE_INT start_offset, chunk_size;
3814 if (bfr
3815 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3816 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3817 {
3818 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3819 start_offset = access->offset
3820 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3821 }
3822 else
3823 start_offset = chunk_size = 0;
3824
3825 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3826 start_offset, chunk_size, gsi, write, write,
3827 loc);
3828 }
3829 return true;
3830 }
3831
3832 /* Where scalar replacements of the RHS have been written to when a replacement
3833 of a LHS of an assigments cannot be direclty loaded from a replacement of
3834 the RHS. */
3835 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3836 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3837 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3838
3839 struct subreplacement_assignment_data
3840 {
3841 /* Offset of the access representing the lhs of the assignment. */
3842 HOST_WIDE_INT left_offset;
3843
3844 /* LHS and RHS of the original assignment. */
3845 tree assignment_lhs, assignment_rhs;
3846
3847 /* Access representing the rhs of the whole assignment. */
3848 struct access *top_racc;
3849
3850 /* Stmt iterator used for statement insertions after the original assignment.
3851 It points to the main GSI used to traverse a BB during function body
3852 modification. */
3853 gimple_stmt_iterator *new_gsi;
3854
3855 /* Stmt iterator used for statement insertions before the original
3856 assignment. Keeps on pointing to the original statement. */
3857 gimple_stmt_iterator old_gsi;
3858
3859 /* Location of the assignment. */
3860 location_t loc;
3861
3862 /* Keeps the information whether we have needed to refresh replacements of
3863 the LHS and from which side of the assignments this takes place. */
3864 enum unscalarized_data_handling refreshed;
3865 };
3866
3867 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3868 base aggregate if there are unscalarized data or directly to LHS of the
3869 statement that is pointed to by GSI otherwise. */
3870
3871 static void
3872 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3873 {
3874 tree src;
3875 if (sad->top_racc->grp_unscalarized_data)
3876 {
3877 src = sad->assignment_rhs;
3878 sad->refreshed = SRA_UDH_RIGHT;
3879 }
3880 else
3881 {
3882 src = sad->assignment_lhs;
3883 sad->refreshed = SRA_UDH_LEFT;
3884 }
3885 generate_subtree_copies (sad->top_racc->first_child, src,
3886 sad->top_racc->offset, 0, 0,
3887 &sad->old_gsi, false, false, sad->loc);
3888 }
3889
3890 /* Try to generate statements to load all sub-replacements in an access subtree
3891 formed by children of LACC from scalar replacements in the SAD->top_racc
3892 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3893 and load the accesses from it. */
3894
3895 static void
3896 load_assign_lhs_subreplacements (struct access *lacc,
3897 struct subreplacement_assignment_data *sad)
3898 {
3899 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3900 {
3901 HOST_WIDE_INT offset;
3902 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3903
3904 if (lacc->grp_to_be_replaced)
3905 {
3906 struct access *racc;
3907 gassign *stmt;
3908 tree rhs;
3909
3910 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3911 if (racc && racc->grp_to_be_replaced)
3912 {
3913 rhs = get_access_replacement (racc);
3914 if (!useless_type_conversion_p (lacc->type, racc->type))
3915 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3916 lacc->type, rhs);
3917
3918 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3919 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3920 NULL_TREE, true, GSI_SAME_STMT);
3921 }
3922 else
3923 {
3924 /* No suitable access on the right hand side, need to load from
3925 the aggregate. See if we have to update it first... */
3926 if (sad->refreshed == SRA_UDH_NONE)
3927 handle_unscalarized_data_in_subtree (sad);
3928
3929 if (sad->refreshed == SRA_UDH_LEFT)
3930 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3931 lacc->offset - sad->left_offset,
3932 lacc, sad->new_gsi, true);
3933 else
3934 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3935 lacc->offset - sad->left_offset,
3936 lacc, sad->new_gsi, true);
3937 if (lacc->grp_partial_lhs)
3938 rhs = force_gimple_operand_gsi (sad->new_gsi,
3939 rhs, true, NULL_TREE,
3940 false, GSI_NEW_STMT);
3941 }
3942
3943 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3944 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3945 gimple_set_location (stmt, sad->loc);
3946 update_stmt (stmt);
3947 sra_stats.subreplacements++;
3948 }
3949 else
3950 {
3951 if (sad->refreshed == SRA_UDH_NONE
3952 && lacc->grp_read && !lacc->grp_covered)
3953 handle_unscalarized_data_in_subtree (sad);
3954
3955 if (lacc && lacc->grp_to_be_debug_replaced)
3956 {
3957 gdebug *ds;
3958 tree drhs;
3959 struct access *racc = find_access_in_subtree (sad->top_racc,
3960 offset,
3961 lacc->size);
3962
3963 if (racc && racc->grp_to_be_replaced)
3964 {
3965 if (racc->grp_write || constant_decl_p (racc->base))
3966 drhs = get_access_replacement (racc);
3967 else
3968 drhs = NULL;
3969 }
3970 else if (sad->refreshed == SRA_UDH_LEFT)
3971 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3972 lacc->offset, lacc);
3973 else if (sad->refreshed == SRA_UDH_RIGHT)
3974 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3975 offset, lacc);
3976 else
3977 drhs = NULL_TREE;
3978 if (drhs
3979 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3980 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3981 lacc->type, drhs);
3982 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3983 drhs, gsi_stmt (sad->old_gsi));
3984 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3985 }
3986 }
3987
3988 if (lacc->first_child)
3989 load_assign_lhs_subreplacements (lacc, sad);
3990 }
3991 }
3992
3993 /* Result code for SRA assignment modification. */
3994 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3995 SRA_AM_MODIFIED, /* stmt changed but not
3996 removed */
3997 SRA_AM_REMOVED }; /* stmt eliminated */
3998
3999 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
4000 to the assignment and GSI is the statement iterator pointing at it. Returns
4001 the same values as sra_modify_assign. */
4002
4003 static enum assignment_mod_result
4004 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
4005 {
4006 tree lhs = gimple_assign_lhs (stmt);
4007 struct access *acc = get_access_for_expr (lhs);
4008 if (!acc)
4009 return SRA_AM_NONE;
4010 location_t loc = gimple_location (stmt);
4011
4012 if (gimple_clobber_p (stmt))
4013 {
4014 /* Clobber the replacement variable. */
4015 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
4016 /* Remove clobbers of fully scalarized variables, they are dead. */
4017 if (acc->grp_covered)
4018 {
4019 unlink_stmt_vdef (stmt);
4020 gsi_remove (gsi, true);
4021 release_defs (stmt);
4022 return SRA_AM_REMOVED;
4023 }
4024 else
4025 return SRA_AM_MODIFIED;
4026 }
4027
4028 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
4029 {
4030 /* I have never seen this code path trigger but if it can happen the
4031 following should handle it gracefully. */
4032 if (access_has_children_p (acc))
4033 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
4034 true, true, loc);
4035 return SRA_AM_MODIFIED;
4036 }
4037
4038 if (acc->grp_covered)
4039 {
4040 init_subtree_with_zero (acc, gsi, false, loc);
4041 unlink_stmt_vdef (stmt);
4042 gsi_remove (gsi, true);
4043 release_defs (stmt);
4044 return SRA_AM_REMOVED;
4045 }
4046 else
4047 {
4048 init_subtree_with_zero (acc, gsi, true, loc);
4049 return SRA_AM_MODIFIED;
4050 }
4051 }
4052
4053 /* Create and return a new suitable default definition SSA_NAME for RACC which
4054 is an access describing an uninitialized part of an aggregate that is being
4055 loaded. REG_TREE is used instead of the actual RACC type if that is not of
4056 a gimple register type. */
4057
4058 static tree
4059 get_repl_default_def_ssa_name (struct access *racc, tree reg_type)
4060 {
4061 gcc_checking_assert (!racc->grp_to_be_replaced
4062 && !racc->grp_to_be_debug_replaced);
4063 if (!racc->replacement_decl)
4064 racc->replacement_decl = create_access_replacement (racc, reg_type);
4065 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
4066 }
4067
4068 /* Examine both sides of the assignment statement pointed to by STMT, replace
4069 them with a scalare replacement if there is one and generate copying of
4070 replacements if scalarized aggregates have been used in the assignment. GSI
4071 is used to hold generated statements for type conversions and subtree
4072 copying. */
4073
4074 static enum assignment_mod_result
4075 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
4076 {
4077 struct access *lacc, *racc;
4078 tree lhs, rhs;
4079 bool modify_this_stmt = false;
4080 bool force_gimple_rhs = false;
4081 location_t loc;
4082 gimple_stmt_iterator orig_gsi = *gsi;
4083
4084 if (!gimple_assign_single_p (stmt))
4085 return SRA_AM_NONE;
4086 lhs = gimple_assign_lhs (stmt);
4087 rhs = gimple_assign_rhs1 (stmt);
4088
4089 if (TREE_CODE (rhs) == CONSTRUCTOR)
4090 return sra_modify_constructor_assign (stmt, gsi);
4091
4092 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
4093 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
4094 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
4095 {
4096 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
4097 gsi, false);
4098 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
4099 gsi, true);
4100 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
4101 }
4102
4103 lacc = get_access_for_expr (lhs);
4104 racc = get_access_for_expr (rhs);
4105 if (!lacc && !racc)
4106 return SRA_AM_NONE;
4107 /* Avoid modifying initializations of constant-pool replacements. */
4108 if (racc && (racc->replacement_decl == lhs))
4109 return SRA_AM_NONE;
4110
4111 loc = gimple_location (stmt);
4112 if (lacc && lacc->grp_to_be_replaced)
4113 {
4114 lhs = get_access_replacement (lacc);
4115 gimple_assign_set_lhs (stmt, lhs);
4116 modify_this_stmt = true;
4117 if (lacc->grp_partial_lhs)
4118 force_gimple_rhs = true;
4119 sra_stats.exprs++;
4120 }
4121
4122 if (racc && racc->grp_to_be_replaced)
4123 {
4124 rhs = get_access_replacement (racc);
4125 modify_this_stmt = true;
4126 if (racc->grp_partial_lhs)
4127 force_gimple_rhs = true;
4128 sra_stats.exprs++;
4129 }
4130 else if (racc
4131 && !racc->grp_unscalarized_data
4132 && !racc->grp_unscalarizable_region
4133 && TREE_CODE (lhs) == SSA_NAME
4134 && !access_has_replacements_p (racc))
4135 {
4136 rhs = get_repl_default_def_ssa_name (racc, TREE_TYPE (lhs));
4137 modify_this_stmt = true;
4138 sra_stats.exprs++;
4139 }
4140
4141 if (modify_this_stmt)
4142 {
4143 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
4144 {
4145 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
4146 ??? This should move to fold_stmt which we simply should
4147 call after building a VIEW_CONVERT_EXPR here. */
4148 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
4149 && !contains_bitfld_component_ref_p (lhs))
4150 {
4151 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
4152 gimple_assign_set_lhs (stmt, lhs);
4153 }
4154 else if (lacc
4155 && AGGREGATE_TYPE_P (TREE_TYPE (rhs))
4156 && !contains_vce_or_bfcref_p (rhs))
4157 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
4158
4159 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
4160 {
4161 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
4162 rhs);
4163 if (is_gimple_reg_type (TREE_TYPE (lhs))
4164 && TREE_CODE (lhs) != SSA_NAME)
4165 force_gimple_rhs = true;
4166 }
4167 }
4168 }
4169
4170 if (lacc && lacc->grp_to_be_debug_replaced)
4171 {
4172 tree dlhs = get_access_replacement (lacc);
4173 tree drhs = unshare_expr (rhs);
4174 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
4175 {
4176 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
4177 && !contains_vce_or_bfcref_p (drhs))
4178 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
4179 if (drhs
4180 && !useless_type_conversion_p (TREE_TYPE (dlhs),
4181 TREE_TYPE (drhs)))
4182 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
4183 TREE_TYPE (dlhs), drhs);
4184 }
4185 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
4186 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
4187 }
4188
4189 /* From this point on, the function deals with assignments in between
4190 aggregates when at least one has scalar reductions of some of its
4191 components. There are three possible scenarios: Both the LHS and RHS have
4192 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
4193
4194 In the first case, we would like to load the LHS components from RHS
4195 components whenever possible. If that is not possible, we would like to
4196 read it directly from the RHS (after updating it by storing in it its own
4197 components). If there are some necessary unscalarized data in the LHS,
4198 those will be loaded by the original assignment too. If neither of these
4199 cases happen, the original statement can be removed. Most of this is done
4200 by load_assign_lhs_subreplacements.
4201
4202 In the second case, we would like to store all RHS scalarized components
4203 directly into LHS and if they cover the aggregate completely, remove the
4204 statement too. In the third case, we want the LHS components to be loaded
4205 directly from the RHS (DSE will remove the original statement if it
4206 becomes redundant).
4207
4208 This is a bit complex but manageable when types match and when unions do
4209 not cause confusion in a way that we cannot really load a component of LHS
4210 from the RHS or vice versa (the access representing this level can have
4211 subaccesses that are accessible only through a different union field at a
4212 higher level - different from the one used in the examined expression).
4213 Unions are fun.
4214
4215 Therefore, I specially handle a fourth case, happening when there is a
4216 specific type cast or it is impossible to locate a scalarized subaccess on
4217 the other side of the expression. If that happens, I simply "refresh" the
4218 RHS by storing in it is scalarized components leave the original statement
4219 there to do the copying and then load the scalar replacements of the LHS.
4220 This is what the first branch does. */
4221
4222 if (modify_this_stmt
4223 || gimple_has_volatile_ops (stmt)
4224 || contains_vce_or_bfcref_p (rhs)
4225 || contains_vce_or_bfcref_p (lhs)
4226 || stmt_ends_bb_p (stmt))
4227 {
4228 /* No need to copy into a constant-pool, it comes pre-initialized. */
4229 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
4230 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
4231 gsi, false, false, loc);
4232 if (access_has_children_p (lacc))
4233 {
4234 gimple_stmt_iterator alt_gsi = gsi_none ();
4235 if (stmt_ends_bb_p (stmt))
4236 {
4237 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
4238 gsi = &alt_gsi;
4239 }
4240 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
4241 gsi, true, true, loc);
4242 }
4243 sra_stats.separate_lhs_rhs_handling++;
4244
4245 /* This gimplification must be done after generate_subtree_copies,
4246 lest we insert the subtree copies in the middle of the gimplified
4247 sequence. */
4248 if (force_gimple_rhs)
4249 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
4250 true, GSI_SAME_STMT);
4251 if (gimple_assign_rhs1 (stmt) != rhs)
4252 {
4253 modify_this_stmt = true;
4254 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
4255 gcc_assert (stmt == gsi_stmt (orig_gsi));
4256 }
4257
4258 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
4259 }
4260 else
4261 {
4262 if (access_has_children_p (lacc)
4263 && access_has_children_p (racc)
4264 /* When an access represents an unscalarizable region, it usually
4265 represents accesses with variable offset and thus must not be used
4266 to generate new memory accesses. */
4267 && !lacc->grp_unscalarizable_region
4268 && !racc->grp_unscalarizable_region)
4269 {
4270 struct subreplacement_assignment_data sad;
4271
4272 sad.left_offset = lacc->offset;
4273 sad.assignment_lhs = lhs;
4274 sad.assignment_rhs = rhs;
4275 sad.top_racc = racc;
4276 sad.old_gsi = *gsi;
4277 sad.new_gsi = gsi;
4278 sad.loc = gimple_location (stmt);
4279 sad.refreshed = SRA_UDH_NONE;
4280
4281 if (lacc->grp_read && !lacc->grp_covered)
4282 handle_unscalarized_data_in_subtree (&sad);
4283
4284 load_assign_lhs_subreplacements (lacc, &sad);
4285 if (sad.refreshed != SRA_UDH_RIGHT)
4286 {
4287 gsi_next (gsi);
4288 unlink_stmt_vdef (stmt);
4289 gsi_remove (&sad.old_gsi, true);
4290 release_defs (stmt);
4291 sra_stats.deleted++;
4292 return SRA_AM_REMOVED;
4293 }
4294 }
4295 else
4296 {
4297 if (access_has_children_p (racc)
4298 && !racc->grp_unscalarized_data
4299 && TREE_CODE (lhs) != SSA_NAME)
4300 {
4301 if (dump_file)
4302 {
4303 fprintf (dump_file, "Removing load: ");
4304 print_gimple_stmt (dump_file, stmt, 0);
4305 }
4306 generate_subtree_copies (racc->first_child, lhs,
4307 racc->offset, 0, 0, gsi,
4308 false, false, loc);
4309 gcc_assert (stmt == gsi_stmt (*gsi));
4310 unlink_stmt_vdef (stmt);
4311 gsi_remove (gsi, true);
4312 release_defs (stmt);
4313 sra_stats.deleted++;
4314 return SRA_AM_REMOVED;
4315 }
4316 /* Restore the aggregate RHS from its components so the
4317 prevailing aggregate copy does the right thing. */
4318 if (access_has_children_p (racc))
4319 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
4320 gsi, false, false, loc);
4321 /* Re-load the components of the aggregate copy destination.
4322 But use the RHS aggregate to load from to expose more
4323 optimization opportunities. */
4324 if (access_has_children_p (lacc))
4325 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
4326 0, 0, gsi, true, true, loc);
4327 }
4328
4329 return SRA_AM_NONE;
4330 }
4331 }
4332
4333 /* Set any scalar replacements of values in the constant pool to the initial
4334 value of the constant. (Constant-pool decls like *.LC0 have effectively
4335 been initialized before the program starts, we must do the same for their
4336 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
4337 the function's entry block. */
4338
4339 static void
4340 initialize_constant_pool_replacements (void)
4341 {
4342 gimple_seq seq = NULL;
4343 gimple_stmt_iterator gsi = gsi_start (seq);
4344 bitmap_iterator bi;
4345 unsigned i;
4346
4347 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
4348 {
4349 tree var = candidate (i);
4350 if (!constant_decl_p (var))
4351 continue;
4352
4353 struct access *access = get_first_repr_for_decl (var);
4354
4355 while (access)
4356 {
4357 if (access->replacement_decl)
4358 {
4359 gassign *stmt
4360 = gimple_build_assign (get_access_replacement (access),
4361 unshare_expr (access->expr));
4362 if (dump_file && (dump_flags & TDF_DETAILS))
4363 {
4364 fprintf (dump_file, "Generating constant initializer: ");
4365 print_gimple_stmt (dump_file, stmt, 0);
4366 fprintf (dump_file, "\n");
4367 }
4368 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4369 update_stmt (stmt);
4370 }
4371
4372 if (access->first_child)
4373 access = access->first_child;
4374 else if (access->next_sibling)
4375 access = access->next_sibling;
4376 else
4377 {
4378 while (access->parent && !access->next_sibling)
4379 access = access->parent;
4380 if (access->next_sibling)
4381 access = access->next_sibling;
4382 else
4383 access = access->next_grp;
4384 }
4385 }
4386 }
4387
4388 seq = gsi_seq (gsi);
4389 if (seq)
4390 gsi_insert_seq_on_edge_immediate (
4391 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
4392 }
4393
4394 /* Traverse the function body and all modifications as decided in
4395 analyze_all_variable_accesses. Return true iff the CFG has been
4396 changed. */
4397
4398 static bool
4399 sra_modify_function_body (void)
4400 {
4401 bool cfg_changed = false;
4402 basic_block bb;
4403
4404 initialize_constant_pool_replacements ();
4405
4406 FOR_EACH_BB_FN (bb, cfun)
4407 {
4408 gimple_stmt_iterator gsi = gsi_start_bb (bb);
4409 while (!gsi_end_p (gsi))
4410 {
4411 gimple *stmt = gsi_stmt (gsi);
4412 enum assignment_mod_result assign_result;
4413 bool modified = false, deleted = false;
4414 tree *t;
4415 unsigned i;
4416
4417 switch (gimple_code (stmt))
4418 {
4419 case GIMPLE_RETURN:
4420 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4421 if (*t != NULL_TREE)
4422 modified |= sra_modify_expr (t, &gsi, false);
4423 break;
4424
4425 case GIMPLE_ASSIGN:
4426 assign_result = sra_modify_assign (stmt, &gsi);
4427 modified |= assign_result == SRA_AM_MODIFIED;
4428 deleted = assign_result == SRA_AM_REMOVED;
4429 break;
4430
4431 case GIMPLE_CALL:
4432 /* Operands must be processed before the lhs. */
4433 for (i = 0; i < gimple_call_num_args (stmt); i++)
4434 {
4435 t = gimple_call_arg_ptr (stmt, i);
4436 modified |= sra_modify_expr (t, &gsi, false);
4437 }
4438
4439 if (gimple_call_lhs (stmt))
4440 {
4441 t = gimple_call_lhs_ptr (stmt);
4442 modified |= sra_modify_expr (t, &gsi, true);
4443 }
4444 break;
4445
4446 case GIMPLE_ASM:
4447 {
4448 gasm *asm_stmt = as_a <gasm *> (stmt);
4449 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4450 {
4451 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4452 modified |= sra_modify_expr (t, &gsi, false);
4453 }
4454 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4455 {
4456 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4457 modified |= sra_modify_expr (t, &gsi, true);
4458 }
4459 }
4460 break;
4461
4462 default:
4463 break;
4464 }
4465
4466 if (modified)
4467 {
4468 update_stmt (stmt);
4469 if (maybe_clean_eh_stmt (stmt)
4470 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4471 cfg_changed = true;
4472 }
4473 if (!deleted)
4474 gsi_next (&gsi);
4475 }
4476 }
4477
4478 gsi_commit_edge_inserts ();
4479 return cfg_changed;
4480 }
4481
4482 /* Generate statements initializing scalar replacements of parts of function
4483 parameters. */
4484
4485 static void
4486 initialize_parameter_reductions (void)
4487 {
4488 gimple_stmt_iterator gsi;
4489 gimple_seq seq = NULL;
4490 tree parm;
4491
4492 gsi = gsi_start (seq);
4493 for (parm = DECL_ARGUMENTS (current_function_decl);
4494 parm;
4495 parm = DECL_CHAIN (parm))
4496 {
4497 vec<access_p> *access_vec;
4498 struct access *access;
4499
4500 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4501 continue;
4502 access_vec = get_base_access_vector (parm);
4503 if (!access_vec)
4504 continue;
4505
4506 for (access = (*access_vec)[0];
4507 access;
4508 access = access->next_grp)
4509 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
4510 EXPR_LOCATION (parm));
4511 }
4512
4513 seq = gsi_seq (gsi);
4514 if (seq)
4515 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
4516 }
4517
4518 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
4519 it reveals there are components of some aggregates to be scalarized, it runs
4520 the required transformations. */
4521 static unsigned int
4522 perform_intra_sra (void)
4523 {
4524 int ret = 0;
4525 sra_initialize ();
4526
4527 if (!find_var_candidates ())
4528 goto out;
4529
4530 if (!scan_function ())
4531 goto out;
4532
4533 if (!analyze_all_variable_accesses ())
4534 goto out;
4535
4536 if (sra_modify_function_body ())
4537 ret = TODO_update_ssa | TODO_cleanup_cfg;
4538 else
4539 ret = TODO_update_ssa;
4540 initialize_parameter_reductions ();
4541
4542 statistics_counter_event (cfun, "Scalar replacements created",
4543 sra_stats.replacements);
4544 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
4545 statistics_counter_event (cfun, "Subtree copy stmts",
4546 sra_stats.subtree_copies);
4547 statistics_counter_event (cfun, "Subreplacement stmts",
4548 sra_stats.subreplacements);
4549 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
4550 statistics_counter_event (cfun, "Separate LHS and RHS handling",
4551 sra_stats.separate_lhs_rhs_handling);
4552
4553 out:
4554 sra_deinitialize ();
4555 return ret;
4556 }
4557
4558 /* Perform early intraprocedural SRA. */
4559 static unsigned int
4560 early_intra_sra (void)
4561 {
4562 sra_mode = SRA_MODE_EARLY_INTRA;
4563 return perform_intra_sra ();
4564 }
4565
4566 /* Perform "late" intraprocedural SRA. */
4567 static unsigned int
4568 late_intra_sra (void)
4569 {
4570 sra_mode = SRA_MODE_INTRA;
4571 return perform_intra_sra ();
4572 }
4573
4574
4575 static bool
4576 gate_intra_sra (void)
4577 {
4578 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
4579 }
4580
4581
4582 namespace {
4583
4584 const pass_data pass_data_sra_early =
4585 {
4586 GIMPLE_PASS, /* type */
4587 "esra", /* name */
4588 OPTGROUP_NONE, /* optinfo_flags */
4589 TV_TREE_SRA, /* tv_id */
4590 ( PROP_cfg | PROP_ssa ), /* properties_required */
4591 0, /* properties_provided */
4592 0, /* properties_destroyed */
4593 0, /* todo_flags_start */
4594 TODO_update_ssa, /* todo_flags_finish */
4595 };
4596
4597 class pass_sra_early : public gimple_opt_pass
4598 {
4599 public:
4600 pass_sra_early (gcc::context *ctxt)
4601 : gimple_opt_pass (pass_data_sra_early, ctxt)
4602 {}
4603
4604 /* opt_pass methods: */
4605 virtual bool gate (function *) { return gate_intra_sra (); }
4606 virtual unsigned int execute (function *) { return early_intra_sra (); }
4607
4608 }; // class pass_sra_early
4609
4610 } // anon namespace
4611
4612 gimple_opt_pass *
4613 make_pass_sra_early (gcc::context *ctxt)
4614 {
4615 return new pass_sra_early (ctxt);
4616 }
4617
4618 namespace {
4619
4620 const pass_data pass_data_sra =
4621 {
4622 GIMPLE_PASS, /* type */
4623 "sra", /* name */
4624 OPTGROUP_NONE, /* optinfo_flags */
4625 TV_TREE_SRA, /* tv_id */
4626 ( PROP_cfg | PROP_ssa ), /* properties_required */
4627 0, /* properties_provided */
4628 0, /* properties_destroyed */
4629 TODO_update_address_taken, /* todo_flags_start */
4630 TODO_update_ssa, /* todo_flags_finish */
4631 };
4632
4633 class pass_sra : public gimple_opt_pass
4634 {
4635 public:
4636 pass_sra (gcc::context *ctxt)
4637 : gimple_opt_pass (pass_data_sra, ctxt)
4638 {}
4639
4640 /* opt_pass methods: */
4641 virtual bool gate (function *) { return gate_intra_sra (); }
4642 virtual unsigned int execute (function *) { return late_intra_sra (); }
4643
4644 }; // class pass_sra
4645
4646 } // anon namespace
4647
4648 gimple_opt_pass *
4649 make_pass_sra (gcc::context *ctxt)
4650 {
4651 return new pass_sra (ctxt);
4652 }