tree-vectorizer.c: Fix documentation.
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "toplev.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "cgraph.h"
83 #include "tree-flow.h"
84 #include "ipa-prop.h"
85 #include "tree-pretty-print.h"
86 #include "statistics.h"
87 #include "tree-dump.h"
88 #include "timevar.h"
89 #include "params.h"
90 #include "target.h"
91 #include "flags.h"
92 #include "dbgcnt.h"
93 #include "tree-inline.h"
94 #include "gimple-pretty-print.h"
95
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
98 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 SRA_MODE_INTRA }; /* late intraprocedural SRA */
100
101 /* Global variable describing which aggregate reduction we are performing at
102 the moment. */
103 static enum sra_mode sra_mode;
104
105 struct assign_link;
106
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
112
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
118
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
122
123 struct access
124 {
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset;
129 HOST_WIDE_INT size;
130 tree base;
131
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
134 testcase. */
135 tree expr;
136 /* Type. */
137 tree type;
138
139 /* The statement this access belongs to. */
140 gimple stmt;
141
142 /* Next group representative for this aggregate. */
143 struct access *next_grp;
144
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access *group_representative;
148
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access *first_child;
152
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access *next_sibling;
157
158 /* Pointers to the first and last element in the linked list of assign
159 links. */
160 struct assign_link *first_link, *last_link;
161
162 /* Pointer to the next access in the work queue. */
163 struct access *next_queued;
164
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl;
169
170 /* Is this particular access write access? */
171 unsigned write : 1;
172
173 /* Is this access an artificial one created to scalarize some record
174 entirely? */
175 unsigned total_scalarization : 1;
176
177 /* Is this access currently in the work queue? */
178 unsigned grp_queued : 1;
179
180 /* Does this group contain a write access? This flag is propagated down the
181 access tree. */
182 unsigned grp_write : 1;
183
184 /* Does this group contain a read access? This flag is propagated down the
185 access tree. */
186 unsigned grp_read : 1;
187
188 /* Does this group contain a read access that comes from an assignment
189 statement? This flag is propagated down the access tree. */
190 unsigned grp_assignment_read : 1;
191
192 /* Other passes of the analysis use this bit to make function
193 analyze_access_subtree create scalar replacements for this group if
194 possible. */
195 unsigned grp_hint : 1;
196
197 /* Is the subtree rooted in this access fully covered by scalar
198 replacements? */
199 unsigned grp_covered : 1;
200
201 /* If set to true, this access and all below it in an access tree must not be
202 scalarized. */
203 unsigned grp_unscalarizable_region : 1;
204
205 /* Whether data have been written to parts of the aggregate covered by this
206 access which is not to be scalarized. This flag is propagated up in the
207 access tree. */
208 unsigned grp_unscalarized_data : 1;
209
210 /* Does this access and/or group contain a write access through a
211 BIT_FIELD_REF? */
212 unsigned grp_partial_lhs : 1;
213
214 /* Set when a scalar replacement should be created for this variable. We do
215 the decision and creation at different places because create_tmp_var
216 cannot be called from within FOR_EACH_REFERENCED_VAR. */
217 unsigned grp_to_be_replaced : 1;
218
219 /* Is it possible that the group refers to data which might be (directly or
220 otherwise) modified? */
221 unsigned grp_maybe_modified : 1;
222
223 /* Set when this is a representative of a pointer to scalar (i.e. by
224 reference) parameter which we consider for turning into a plain scalar
225 (i.e. a by value parameter). */
226 unsigned grp_scalar_ptr : 1;
227
228 /* Set when we discover that this pointer is not safe to dereference in the
229 caller. */
230 unsigned grp_not_necessarilly_dereferenced : 1;
231 };
232
233 typedef struct access *access_p;
234
235 DEF_VEC_P (access_p);
236 DEF_VEC_ALLOC_P (access_p, heap);
237
238 /* Alloc pool for allocating access structures. */
239 static alloc_pool access_pool;
240
241 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
242 are used to propagate subaccesses from rhs to lhs as long as they don't
243 conflict with what is already there. */
244 struct assign_link
245 {
246 struct access *lacc, *racc;
247 struct assign_link *next;
248 };
249
250 /* Alloc pool for allocating assign link structures. */
251 static alloc_pool link_pool;
252
253 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
254 static struct pointer_map_t *base_access_vec;
255
256 /* Bitmap of candidates. */
257 static bitmap candidate_bitmap;
258
259 /* Bitmap of candidates which we should try to entirely scalarize away and
260 those which cannot be (because they are and need be used as a whole). */
261 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
262
263 /* Obstack for creation of fancy names. */
264 static struct obstack name_obstack;
265
266 /* Head of a linked list of accesses that need to have its subaccesses
267 propagated to their assignment counterparts. */
268 static struct access *work_queue_head;
269
270 /* Number of parameters of the analyzed function when doing early ipa SRA. */
271 static int func_param_count;
272
273 /* scan_function sets the following to true if it encounters a call to
274 __builtin_apply_args. */
275 static bool encountered_apply_args;
276
277 /* Set by scan_function when it finds a recursive call. */
278 static bool encountered_recursive_call;
279
280 /* Set by scan_function when it finds a recursive call with less actual
281 arguments than formal parameters.. */
282 static bool encountered_unchangable_recursive_call;
283
284 /* This is a table in which for each basic block and parameter there is a
285 distance (offset + size) in that parameter which is dereferenced and
286 accessed in that BB. */
287 static HOST_WIDE_INT *bb_dereferences;
288 /* Bitmap of BBs that can cause the function to "stop" progressing by
289 returning, throwing externally, looping infinitely or calling a function
290 which might abort etc.. */
291 static bitmap final_bbs;
292
293 /* Representative of no accesses at all. */
294 static struct access no_accesses_representant;
295
296 /* Predicate to test the special value. */
297
298 static inline bool
299 no_accesses_p (struct access *access)
300 {
301 return access == &no_accesses_representant;
302 }
303
304 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
305 representative fields are dumped, otherwise those which only describe the
306 individual access are. */
307
308 static struct
309 {
310 /* Number of processed aggregates is readily available in
311 analyze_all_variable_accesses and so is not stored here. */
312
313 /* Number of created scalar replacements. */
314 int replacements;
315
316 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
317 expression. */
318 int exprs;
319
320 /* Number of statements created by generate_subtree_copies. */
321 int subtree_copies;
322
323 /* Number of statements created by load_assign_lhs_subreplacements. */
324 int subreplacements;
325
326 /* Number of times sra_modify_assign has deleted a statement. */
327 int deleted;
328
329 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
330 RHS reparately due to type conversions or nonexistent matching
331 references. */
332 int separate_lhs_rhs_handling;
333
334 /* Number of parameters that were removed because they were unused. */
335 int deleted_unused_parameters;
336
337 /* Number of scalars passed as parameters by reference that have been
338 converted to be passed by value. */
339 int scalar_by_ref_to_by_val;
340
341 /* Number of aggregate parameters that were replaced by one or more of their
342 components. */
343 int aggregate_params_reduced;
344
345 /* Numbber of components created when splitting aggregate parameters. */
346 int param_reductions_created;
347 } sra_stats;
348
349 static void
350 dump_access (FILE *f, struct access *access, bool grp)
351 {
352 fprintf (f, "access { ");
353 fprintf (f, "base = (%d)'", DECL_UID (access->base));
354 print_generic_expr (f, access->base, 0);
355 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
356 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
357 fprintf (f, ", expr = ");
358 print_generic_expr (f, access->expr, 0);
359 fprintf (f, ", type = ");
360 print_generic_expr (f, access->type, 0);
361 if (grp)
362 fprintf (f, ", grp_write = %d, total_scalarization = %d, "
363 "grp_read = %d, grp_hint = %d, grp_assignment_read = %d,"
364 "grp_covered = %d, grp_unscalarizable_region = %d, "
365 "grp_unscalarized_data = %d, grp_partial_lhs = %d, "
366 "grp_to_be_replaced = %d, grp_maybe_modified = %d, "
367 "grp_not_necessarilly_dereferenced = %d\n",
368 access->grp_write, access->total_scalarization,
369 access->grp_read, access->grp_hint, access->grp_assignment_read,
370 access->grp_covered, access->grp_unscalarizable_region,
371 access->grp_unscalarized_data, access->grp_partial_lhs,
372 access->grp_to_be_replaced, access->grp_maybe_modified,
373 access->grp_not_necessarilly_dereferenced);
374 else
375 fprintf (f, ", write = %d, total_scalarization = %d, "
376 "grp_partial_lhs = %d\n",
377 access->write, access->total_scalarization,
378 access->grp_partial_lhs);
379 }
380
381 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
382
383 static void
384 dump_access_tree_1 (FILE *f, struct access *access, int level)
385 {
386 do
387 {
388 int i;
389
390 for (i = 0; i < level; i++)
391 fputs ("* ", dump_file);
392
393 dump_access (f, access, true);
394
395 if (access->first_child)
396 dump_access_tree_1 (f, access->first_child, level + 1);
397
398 access = access->next_sibling;
399 }
400 while (access);
401 }
402
403 /* Dump all access trees for a variable, given the pointer to the first root in
404 ACCESS. */
405
406 static void
407 dump_access_tree (FILE *f, struct access *access)
408 {
409 for (; access; access = access->next_grp)
410 dump_access_tree_1 (f, access, 0);
411 }
412
413 /* Return true iff ACC is non-NULL and has subaccesses. */
414
415 static inline bool
416 access_has_children_p (struct access *acc)
417 {
418 return acc && acc->first_child;
419 }
420
421 /* Return a vector of pointers to accesses for the variable given in BASE or
422 NULL if there is none. */
423
424 static VEC (access_p, heap) *
425 get_base_access_vector (tree base)
426 {
427 void **slot;
428
429 slot = pointer_map_contains (base_access_vec, base);
430 if (!slot)
431 return NULL;
432 else
433 return *(VEC (access_p, heap) **) slot;
434 }
435
436 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
437 in ACCESS. Return NULL if it cannot be found. */
438
439 static struct access *
440 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
441 HOST_WIDE_INT size)
442 {
443 while (access && (access->offset != offset || access->size != size))
444 {
445 struct access *child = access->first_child;
446
447 while (child && (child->offset + child->size <= offset))
448 child = child->next_sibling;
449 access = child;
450 }
451
452 return access;
453 }
454
455 /* Return the first group representative for DECL or NULL if none exists. */
456
457 static struct access *
458 get_first_repr_for_decl (tree base)
459 {
460 VEC (access_p, heap) *access_vec;
461
462 access_vec = get_base_access_vector (base);
463 if (!access_vec)
464 return NULL;
465
466 return VEC_index (access_p, access_vec, 0);
467 }
468
469 /* Find an access representative for the variable BASE and given OFFSET and
470 SIZE. Requires that access trees have already been built. Return NULL if
471 it cannot be found. */
472
473 static struct access *
474 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
475 HOST_WIDE_INT size)
476 {
477 struct access *access;
478
479 access = get_first_repr_for_decl (base);
480 while (access && (access->offset + access->size <= offset))
481 access = access->next_grp;
482 if (!access)
483 return NULL;
484
485 return find_access_in_subtree (access, offset, size);
486 }
487
488 /* Add LINK to the linked list of assign links of RACC. */
489 static void
490 add_link_to_rhs (struct access *racc, struct assign_link *link)
491 {
492 gcc_assert (link->racc == racc);
493
494 if (!racc->first_link)
495 {
496 gcc_assert (!racc->last_link);
497 racc->first_link = link;
498 }
499 else
500 racc->last_link->next = link;
501
502 racc->last_link = link;
503 link->next = NULL;
504 }
505
506 /* Move all link structures in their linked list in OLD_RACC to the linked list
507 in NEW_RACC. */
508 static void
509 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
510 {
511 if (!old_racc->first_link)
512 {
513 gcc_assert (!old_racc->last_link);
514 return;
515 }
516
517 if (new_racc->first_link)
518 {
519 gcc_assert (!new_racc->last_link->next);
520 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
521
522 new_racc->last_link->next = old_racc->first_link;
523 new_racc->last_link = old_racc->last_link;
524 }
525 else
526 {
527 gcc_assert (!new_racc->last_link);
528
529 new_racc->first_link = old_racc->first_link;
530 new_racc->last_link = old_racc->last_link;
531 }
532 old_racc->first_link = old_racc->last_link = NULL;
533 }
534
535 /* Add ACCESS to the work queue (which is actually a stack). */
536
537 static void
538 add_access_to_work_queue (struct access *access)
539 {
540 if (!access->grp_queued)
541 {
542 gcc_assert (!access->next_queued);
543 access->next_queued = work_queue_head;
544 access->grp_queued = 1;
545 work_queue_head = access;
546 }
547 }
548
549 /* Pop an access from the work queue, and return it, assuming there is one. */
550
551 static struct access *
552 pop_access_from_work_queue (void)
553 {
554 struct access *access = work_queue_head;
555
556 work_queue_head = access->next_queued;
557 access->next_queued = NULL;
558 access->grp_queued = 0;
559 return access;
560 }
561
562
563 /* Allocate necessary structures. */
564
565 static void
566 sra_initialize (void)
567 {
568 candidate_bitmap = BITMAP_ALLOC (NULL);
569 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
570 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
571 gcc_obstack_init (&name_obstack);
572 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
573 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
574 base_access_vec = pointer_map_create ();
575 memset (&sra_stats, 0, sizeof (sra_stats));
576 encountered_apply_args = false;
577 encountered_recursive_call = false;
578 encountered_unchangable_recursive_call = false;
579 }
580
581 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
582
583 static bool
584 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
585 void *data ATTRIBUTE_UNUSED)
586 {
587 VEC (access_p, heap) *access_vec;
588 access_vec = (VEC (access_p, heap) *) *value;
589 VEC_free (access_p, heap, access_vec);
590
591 return true;
592 }
593
594 /* Deallocate all general structures. */
595
596 static void
597 sra_deinitialize (void)
598 {
599 BITMAP_FREE (candidate_bitmap);
600 BITMAP_FREE (should_scalarize_away_bitmap);
601 BITMAP_FREE (cannot_scalarize_away_bitmap);
602 free_alloc_pool (access_pool);
603 free_alloc_pool (link_pool);
604 obstack_free (&name_obstack, NULL);
605
606 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
607 pointer_map_destroy (base_access_vec);
608 }
609
610 /* Remove DECL from candidates for SRA and write REASON to the dump file if
611 there is one. */
612 static void
613 disqualify_candidate (tree decl, const char *reason)
614 {
615 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
616
617 if (dump_file && (dump_flags & TDF_DETAILS))
618 {
619 fprintf (dump_file, "! Disqualifying ");
620 print_generic_expr (dump_file, decl, 0);
621 fprintf (dump_file, " - %s\n", reason);
622 }
623 }
624
625 /* Return true iff the type contains a field or an element which does not allow
626 scalarization. */
627
628 static bool
629 type_internals_preclude_sra_p (tree type)
630 {
631 tree fld;
632 tree et;
633
634 switch (TREE_CODE (type))
635 {
636 case RECORD_TYPE:
637 case UNION_TYPE:
638 case QUAL_UNION_TYPE:
639 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
640 if (TREE_CODE (fld) == FIELD_DECL)
641 {
642 tree ft = TREE_TYPE (fld);
643
644 if (TREE_THIS_VOLATILE (fld)
645 || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
646 || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
647 || !host_integerp (DECL_SIZE (fld), 1))
648 return true;
649
650 if (AGGREGATE_TYPE_P (ft)
651 && type_internals_preclude_sra_p (ft))
652 return true;
653 }
654
655 return false;
656
657 case ARRAY_TYPE:
658 et = TREE_TYPE (type);
659
660 if (AGGREGATE_TYPE_P (et))
661 return type_internals_preclude_sra_p (et);
662 else
663 return false;
664
665 default:
666 return false;
667 }
668 }
669
670 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
671 base variable if it is. Return T if it is not an SSA_NAME. */
672
673 static tree
674 get_ssa_base_param (tree t)
675 {
676 if (TREE_CODE (t) == SSA_NAME)
677 {
678 if (SSA_NAME_IS_DEFAULT_DEF (t))
679 return SSA_NAME_VAR (t);
680 else
681 return NULL_TREE;
682 }
683 return t;
684 }
685
686 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
687 belongs to, unless the BB has already been marked as a potentially
688 final. */
689
690 static void
691 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
692 {
693 basic_block bb = gimple_bb (stmt);
694 int idx, parm_index = 0;
695 tree parm;
696
697 if (bitmap_bit_p (final_bbs, bb->index))
698 return;
699
700 for (parm = DECL_ARGUMENTS (current_function_decl);
701 parm && parm != base;
702 parm = DECL_CHAIN (parm))
703 parm_index++;
704
705 gcc_assert (parm_index < func_param_count);
706
707 idx = bb->index * func_param_count + parm_index;
708 if (bb_dereferences[idx] < dist)
709 bb_dereferences[idx] = dist;
710 }
711
712 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
713 the three fields. Also add it to the vector of accesses corresponding to
714 the base. Finally, return the new access. */
715
716 static struct access *
717 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
718 {
719 VEC (access_p, heap) *vec;
720 struct access *access;
721 void **slot;
722
723 access = (struct access *) pool_alloc (access_pool);
724 memset (access, 0, sizeof (struct access));
725 access->base = base;
726 access->offset = offset;
727 access->size = size;
728
729 slot = pointer_map_contains (base_access_vec, base);
730 if (slot)
731 vec = (VEC (access_p, heap) *) *slot;
732 else
733 vec = VEC_alloc (access_p, heap, 32);
734
735 VEC_safe_push (access_p, heap, vec, access);
736
737 *((struct VEC (access_p,heap) **)
738 pointer_map_insert (base_access_vec, base)) = vec;
739
740 return access;
741 }
742
743 /* Create and insert access for EXPR. Return created access, or NULL if it is
744 not possible. */
745
746 static struct access *
747 create_access (tree expr, gimple stmt, bool write)
748 {
749 struct access *access;
750 HOST_WIDE_INT offset, size, max_size;
751 tree base = expr;
752 bool ptr, unscalarizable_region = false;
753
754 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
755
756 if (sra_mode == SRA_MODE_EARLY_IPA
757 && TREE_CODE (base) == MEM_REF)
758 {
759 base = get_ssa_base_param (TREE_OPERAND (base, 0));
760 if (!base)
761 return NULL;
762 ptr = true;
763 }
764 else
765 ptr = false;
766
767 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
768 return NULL;
769
770 if (sra_mode == SRA_MODE_EARLY_IPA)
771 {
772 if (size < 0 || size != max_size)
773 {
774 disqualify_candidate (base, "Encountered a variable sized access.");
775 return NULL;
776 }
777 if (TREE_CODE (expr) == COMPONENT_REF
778 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
779 {
780 disqualify_candidate (base, "Encountered a bit-field access.");
781 return NULL;
782 }
783 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
784
785 if (ptr)
786 mark_parm_dereference (base, offset + size, stmt);
787 }
788 else
789 {
790 if (size != max_size)
791 {
792 size = max_size;
793 unscalarizable_region = true;
794 }
795 if (size < 0)
796 {
797 disqualify_candidate (base, "Encountered an unconstrained access.");
798 return NULL;
799 }
800 }
801
802 access = create_access_1 (base, offset, size);
803 access->expr = expr;
804 access->type = TREE_TYPE (expr);
805 access->write = write;
806 access->grp_unscalarizable_region = unscalarizable_region;
807 access->stmt = stmt;
808
809 return access;
810 }
811
812
813 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
814 register types or (recursively) records with only these two kinds of fields.
815 It also returns false if any of these records has a zero-size field as its
816 last field or has a bit-field. */
817
818 static bool
819 type_consists_of_records_p (tree type)
820 {
821 tree fld;
822 bool last_fld_has_zero_size = false;
823
824 if (TREE_CODE (type) != RECORD_TYPE)
825 return false;
826
827 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
828 if (TREE_CODE (fld) == FIELD_DECL)
829 {
830 tree ft = TREE_TYPE (fld);
831
832 if (DECL_BIT_FIELD (fld))
833 return false;
834
835 if (!is_gimple_reg_type (ft)
836 && !type_consists_of_records_p (ft))
837 return false;
838
839 last_fld_has_zero_size = tree_low_cst (DECL_SIZE (fld), 1) == 0;
840 }
841
842 if (last_fld_has_zero_size)
843 return false;
844
845 return true;
846 }
847
848 /* Create total_scalarization accesses for all scalar type fields in DECL that
849 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
850 must be the top-most VAR_DECL representing the variable, OFFSET must be the
851 offset of DECL within BASE. REF must be the memory reference expression for
852 the given decl. */
853
854 static void
855 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
856 tree ref)
857 {
858 tree fld, decl_type = TREE_TYPE (decl);
859
860 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
861 if (TREE_CODE (fld) == FIELD_DECL)
862 {
863 HOST_WIDE_INT pos = offset + int_bit_position (fld);
864 tree ft = TREE_TYPE (fld);
865 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
866 NULL_TREE);
867
868 if (is_gimple_reg_type (ft))
869 {
870 struct access *access;
871 HOST_WIDE_INT size;
872
873 size = tree_low_cst (DECL_SIZE (fld), 1);
874 access = create_access_1 (base, pos, size);
875 access->expr = nref;
876 access->type = ft;
877 access->total_scalarization = 1;
878 /* Accesses for intraprocedural SRA can have their stmt NULL. */
879 }
880 else
881 completely_scalarize_record (base, fld, pos, nref);
882 }
883 }
884
885
886 /* Search the given tree for a declaration by skipping handled components and
887 exclude it from the candidates. */
888
889 static void
890 disqualify_base_of_expr (tree t, const char *reason)
891 {
892 t = get_base_address (t);
893 if (sra_mode == SRA_MODE_EARLY_IPA
894 && TREE_CODE (t) == MEM_REF)
895 t = get_ssa_base_param (TREE_OPERAND (t, 0));
896
897 if (t && DECL_P (t))
898 disqualify_candidate (t, reason);
899 }
900
901 /* Scan expression EXPR and create access structures for all accesses to
902 candidates for scalarization. Return the created access or NULL if none is
903 created. */
904
905 static struct access *
906 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
907 {
908 struct access *ret = NULL;
909 bool partial_ref;
910
911 if (TREE_CODE (expr) == BIT_FIELD_REF
912 || TREE_CODE (expr) == IMAGPART_EXPR
913 || TREE_CODE (expr) == REALPART_EXPR)
914 {
915 expr = TREE_OPERAND (expr, 0);
916 partial_ref = true;
917 }
918 else
919 partial_ref = false;
920
921 /* We need to dive through V_C_Es in order to get the size of its parameter
922 and not the result type. Ada produces such statements. We are also
923 capable of handling the topmost V_C_E but not any of those buried in other
924 handled components. */
925 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
926 expr = TREE_OPERAND (expr, 0);
927
928 if (contains_view_convert_expr_p (expr))
929 {
930 disqualify_base_of_expr (expr, "V_C_E under a different handled "
931 "component.");
932 return NULL;
933 }
934
935 switch (TREE_CODE (expr))
936 {
937 case MEM_REF:
938 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
939 && sra_mode != SRA_MODE_EARLY_IPA)
940 return NULL;
941 /* fall through */
942 case VAR_DECL:
943 case PARM_DECL:
944 case RESULT_DECL:
945 case COMPONENT_REF:
946 case ARRAY_REF:
947 case ARRAY_RANGE_REF:
948 ret = create_access (expr, stmt, write);
949 break;
950
951 default:
952 break;
953 }
954
955 if (write && partial_ref && ret)
956 ret->grp_partial_lhs = 1;
957
958 return ret;
959 }
960
961 /* Scan expression EXPR and create access structures for all accesses to
962 candidates for scalarization. Return true if any access has been inserted.
963 STMT must be the statement from which the expression is taken, WRITE must be
964 true if the expression is a store and false otherwise. */
965
966 static bool
967 build_access_from_expr (tree expr, gimple stmt, bool write)
968 {
969 struct access *access;
970
971 access = build_access_from_expr_1 (expr, stmt, write);
972 if (access)
973 {
974 /* This means the aggregate is accesses as a whole in a way other than an
975 assign statement and thus cannot be removed even if we had a scalar
976 replacement for everything. */
977 if (cannot_scalarize_away_bitmap)
978 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
979 return true;
980 }
981 return false;
982 }
983
984 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
985 modes in which it matters, return true iff they have been disqualified. RHS
986 may be NULL, in that case ignore it. If we scalarize an aggregate in
987 intra-SRA we may need to add statements after each statement. This is not
988 possible if a statement unconditionally has to end the basic block. */
989 static bool
990 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
991 {
992 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
993 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
994 {
995 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
996 if (rhs)
997 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
998 return true;
999 }
1000 return false;
1001 }
1002
1003 /* Scan expressions occuring in STMT, create access structures for all accesses
1004 to candidates for scalarization and remove those candidates which occur in
1005 statements or expressions that prevent them from being split apart. Return
1006 true if any access has been inserted. */
1007
1008 static bool
1009 build_accesses_from_assign (gimple stmt)
1010 {
1011 tree lhs, rhs;
1012 struct access *lacc, *racc;
1013
1014 if (!gimple_assign_single_p (stmt))
1015 return false;
1016
1017 lhs = gimple_assign_lhs (stmt);
1018 rhs = gimple_assign_rhs1 (stmt);
1019
1020 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1021 return false;
1022
1023 racc = build_access_from_expr_1 (rhs, stmt, false);
1024 lacc = build_access_from_expr_1 (lhs, stmt, true);
1025
1026 if (racc)
1027 {
1028 racc->grp_assignment_read = 1;
1029 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1030 && !is_gimple_reg_type (racc->type))
1031 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1032 }
1033
1034 if (lacc && racc
1035 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1036 && !lacc->grp_unscalarizable_region
1037 && !racc->grp_unscalarizable_region
1038 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1039 /* FIXME: Turn the following line into an assert after PR 40058 is
1040 fixed. */
1041 && lacc->size == racc->size
1042 && useless_type_conversion_p (lacc->type, racc->type))
1043 {
1044 struct assign_link *link;
1045
1046 link = (struct assign_link *) pool_alloc (link_pool);
1047 memset (link, 0, sizeof (struct assign_link));
1048
1049 link->lacc = lacc;
1050 link->racc = racc;
1051
1052 add_link_to_rhs (racc, link);
1053 }
1054
1055 return lacc || racc;
1056 }
1057
1058 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1059 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1060
1061 static bool
1062 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1063 void *data ATTRIBUTE_UNUSED)
1064 {
1065 op = get_base_address (op);
1066 if (op
1067 && DECL_P (op))
1068 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1069
1070 return false;
1071 }
1072
1073 /* Return true iff callsite CALL has at least as many actual arguments as there
1074 are formal parameters of the function currently processed by IPA-SRA. */
1075
1076 static inline bool
1077 callsite_has_enough_arguments_p (gimple call)
1078 {
1079 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1080 }
1081
1082 /* Scan function and look for interesting expressions and create access
1083 structures for them. Return true iff any access is created. */
1084
1085 static bool
1086 scan_function (void)
1087 {
1088 basic_block bb;
1089 bool ret = false;
1090
1091 FOR_EACH_BB (bb)
1092 {
1093 gimple_stmt_iterator gsi;
1094 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1095 {
1096 gimple stmt = gsi_stmt (gsi);
1097 tree t;
1098 unsigned i;
1099
1100 if (final_bbs && stmt_can_throw_external (stmt))
1101 bitmap_set_bit (final_bbs, bb->index);
1102 switch (gimple_code (stmt))
1103 {
1104 case GIMPLE_RETURN:
1105 t = gimple_return_retval (stmt);
1106 if (t != NULL_TREE)
1107 ret |= build_access_from_expr (t, stmt, false);
1108 if (final_bbs)
1109 bitmap_set_bit (final_bbs, bb->index);
1110 break;
1111
1112 case GIMPLE_ASSIGN:
1113 ret |= build_accesses_from_assign (stmt);
1114 break;
1115
1116 case GIMPLE_CALL:
1117 for (i = 0; i < gimple_call_num_args (stmt); i++)
1118 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1119 stmt, false);
1120
1121 if (sra_mode == SRA_MODE_EARLY_IPA)
1122 {
1123 tree dest = gimple_call_fndecl (stmt);
1124 int flags = gimple_call_flags (stmt);
1125
1126 if (dest)
1127 {
1128 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1129 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1130 encountered_apply_args = true;
1131 if (cgraph_get_node (dest)
1132 == cgraph_get_node (current_function_decl))
1133 {
1134 encountered_recursive_call = true;
1135 if (!callsite_has_enough_arguments_p (stmt))
1136 encountered_unchangable_recursive_call = true;
1137 }
1138 }
1139
1140 if (final_bbs
1141 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1142 bitmap_set_bit (final_bbs, bb->index);
1143 }
1144
1145 t = gimple_call_lhs (stmt);
1146 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1147 ret |= build_access_from_expr (t, stmt, true);
1148 break;
1149
1150 case GIMPLE_ASM:
1151 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1152 asm_visit_addr);
1153 if (final_bbs)
1154 bitmap_set_bit (final_bbs, bb->index);
1155
1156 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1157 {
1158 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1159 ret |= build_access_from_expr (t, stmt, false);
1160 }
1161 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1162 {
1163 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1164 ret |= build_access_from_expr (t, stmt, true);
1165 }
1166 break;
1167
1168 default:
1169 break;
1170 }
1171 }
1172 }
1173
1174 return ret;
1175 }
1176
1177 /* Helper of QSORT function. There are pointers to accesses in the array. An
1178 access is considered smaller than another if it has smaller offset or if the
1179 offsets are the same but is size is bigger. */
1180
1181 static int
1182 compare_access_positions (const void *a, const void *b)
1183 {
1184 const access_p *fp1 = (const access_p *) a;
1185 const access_p *fp2 = (const access_p *) b;
1186 const access_p f1 = *fp1;
1187 const access_p f2 = *fp2;
1188
1189 if (f1->offset != f2->offset)
1190 return f1->offset < f2->offset ? -1 : 1;
1191
1192 if (f1->size == f2->size)
1193 {
1194 if (f1->type == f2->type)
1195 return 0;
1196 /* Put any non-aggregate type before any aggregate type. */
1197 else if (!is_gimple_reg_type (f1->type)
1198 && is_gimple_reg_type (f2->type))
1199 return 1;
1200 else if (is_gimple_reg_type (f1->type)
1201 && !is_gimple_reg_type (f2->type))
1202 return -1;
1203 /* Put any complex or vector type before any other scalar type. */
1204 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1205 && TREE_CODE (f1->type) != VECTOR_TYPE
1206 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1207 || TREE_CODE (f2->type) == VECTOR_TYPE))
1208 return 1;
1209 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1210 || TREE_CODE (f1->type) == VECTOR_TYPE)
1211 && TREE_CODE (f2->type) != COMPLEX_TYPE
1212 && TREE_CODE (f2->type) != VECTOR_TYPE)
1213 return -1;
1214 /* Put the integral type with the bigger precision first. */
1215 else if (INTEGRAL_TYPE_P (f1->type)
1216 && INTEGRAL_TYPE_P (f2->type))
1217 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1218 /* Put any integral type with non-full precision last. */
1219 else if (INTEGRAL_TYPE_P (f1->type)
1220 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1221 != TYPE_PRECISION (f1->type)))
1222 return 1;
1223 else if (INTEGRAL_TYPE_P (f2->type)
1224 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1225 != TYPE_PRECISION (f2->type)))
1226 return -1;
1227 /* Stabilize the sort. */
1228 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1229 }
1230
1231 /* We want the bigger accesses first, thus the opposite operator in the next
1232 line: */
1233 return f1->size > f2->size ? -1 : 1;
1234 }
1235
1236
1237 /* Append a name of the declaration to the name obstack. A helper function for
1238 make_fancy_name. */
1239
1240 static void
1241 make_fancy_decl_name (tree decl)
1242 {
1243 char buffer[32];
1244
1245 tree name = DECL_NAME (decl);
1246 if (name)
1247 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1248 IDENTIFIER_LENGTH (name));
1249 else
1250 {
1251 sprintf (buffer, "D%u", DECL_UID (decl));
1252 obstack_grow (&name_obstack, buffer, strlen (buffer));
1253 }
1254 }
1255
1256 /* Helper for make_fancy_name. */
1257
1258 static void
1259 make_fancy_name_1 (tree expr)
1260 {
1261 char buffer[32];
1262 tree index;
1263
1264 if (DECL_P (expr))
1265 {
1266 make_fancy_decl_name (expr);
1267 return;
1268 }
1269
1270 switch (TREE_CODE (expr))
1271 {
1272 case COMPONENT_REF:
1273 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1274 obstack_1grow (&name_obstack, '$');
1275 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1276 break;
1277
1278 case ARRAY_REF:
1279 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1280 obstack_1grow (&name_obstack, '$');
1281 /* Arrays with only one element may not have a constant as their
1282 index. */
1283 index = TREE_OPERAND (expr, 1);
1284 if (TREE_CODE (index) != INTEGER_CST)
1285 break;
1286 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1287 obstack_grow (&name_obstack, buffer, strlen (buffer));
1288 break;
1289
1290 case ADDR_EXPR:
1291 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1292 break;
1293
1294 case MEM_REF:
1295 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1296 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1297 {
1298 obstack_1grow (&name_obstack, '$');
1299 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1300 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1301 obstack_grow (&name_obstack, buffer, strlen (buffer));
1302 }
1303 break;
1304
1305 case BIT_FIELD_REF:
1306 case REALPART_EXPR:
1307 case IMAGPART_EXPR:
1308 gcc_unreachable (); /* we treat these as scalars. */
1309 break;
1310 default:
1311 break;
1312 }
1313 }
1314
1315 /* Create a human readable name for replacement variable of ACCESS. */
1316
1317 static char *
1318 make_fancy_name (tree expr)
1319 {
1320 make_fancy_name_1 (expr);
1321 obstack_1grow (&name_obstack, '\0');
1322 return XOBFINISH (&name_obstack, char *);
1323 }
1324
1325 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1326 EXP_TYPE at the given OFFSET. If BASE is something for which
1327 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1328 to insert new statements either before or below the current one as specified
1329 by INSERT_AFTER. This function is not capable of handling bitfields. */
1330
1331 tree
1332 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1333 tree exp_type, gimple_stmt_iterator *gsi,
1334 bool insert_after)
1335 {
1336 tree prev_base = base;
1337 tree off;
1338 HOST_WIDE_INT base_offset;
1339
1340 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1341
1342 base = get_addr_base_and_unit_offset (base, &base_offset);
1343
1344 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1345 offset such as array[var_index]. */
1346 if (!base)
1347 {
1348 gimple stmt;
1349 tree tmp, addr;
1350
1351 gcc_checking_assert (gsi);
1352 tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1353 add_referenced_var (tmp);
1354 tmp = make_ssa_name (tmp, NULL);
1355 addr = build_fold_addr_expr (unshare_expr (prev_base));
1356 stmt = gimple_build_assign (tmp, addr);
1357 gimple_set_location (stmt, loc);
1358 SSA_NAME_DEF_STMT (tmp) = stmt;
1359 if (insert_after)
1360 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1361 else
1362 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1363 update_stmt (stmt);
1364
1365 off = build_int_cst (reference_alias_ptr_type (prev_base),
1366 offset / BITS_PER_UNIT);
1367 base = tmp;
1368 }
1369 else if (TREE_CODE (base) == MEM_REF)
1370 {
1371 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1372 base_offset + offset / BITS_PER_UNIT);
1373 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off, 0);
1374 base = unshare_expr (TREE_OPERAND (base, 0));
1375 }
1376 else
1377 {
1378 off = build_int_cst (reference_alias_ptr_type (base),
1379 base_offset + offset / BITS_PER_UNIT);
1380 base = build_fold_addr_expr (unshare_expr (base));
1381 }
1382
1383 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1384 }
1385
1386 /* Construct a memory reference to a part of an aggregate BASE at the given
1387 OFFSET and of the same type as MODEL. In case this is a reference to a
1388 bit-field, the function will replicate the last component_ref of model's
1389 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1390 build_ref_for_offset. */
1391
1392 static tree
1393 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1394 struct access *model, gimple_stmt_iterator *gsi,
1395 bool insert_after)
1396 {
1397 if (TREE_CODE (model->expr) == COMPONENT_REF
1398 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1399 {
1400 /* This access represents a bit-field. */
1401 tree t, exp_type;
1402
1403 offset -= int_bit_position (TREE_OPERAND (model->expr, 1));
1404 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1405 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1406 return fold_build3_loc (loc, COMPONENT_REF, model->type, t,
1407 TREE_OPERAND (model->expr, 1), NULL_TREE);
1408 }
1409 else
1410 return build_ref_for_offset (loc, base, offset, model->type,
1411 gsi, insert_after);
1412 }
1413
1414 /* Construct a memory reference consisting of component_refs and array_refs to
1415 a part of an aggregate *RES (which is of type TYPE). The requested part
1416 should have type EXP_TYPE at be the given OFFSET. This function might not
1417 succeed, it returns true when it does and only then *RES points to something
1418 meaningful. This function should be used only to build expressions that we
1419 might need to present to user (e.g. in warnings). In all other situations,
1420 build_ref_for_model or build_ref_for_offset should be used instead. */
1421
1422 static bool
1423 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1424 tree exp_type)
1425 {
1426 while (1)
1427 {
1428 tree fld;
1429 tree tr_size, index, minidx;
1430 HOST_WIDE_INT el_size;
1431
1432 if (offset == 0 && exp_type
1433 && types_compatible_p (exp_type, type))
1434 return true;
1435
1436 switch (TREE_CODE (type))
1437 {
1438 case UNION_TYPE:
1439 case QUAL_UNION_TYPE:
1440 case RECORD_TYPE:
1441 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1442 {
1443 HOST_WIDE_INT pos, size;
1444 tree expr, *expr_ptr;
1445
1446 if (TREE_CODE (fld) != FIELD_DECL)
1447 continue;
1448
1449 pos = int_bit_position (fld);
1450 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1451 tr_size = DECL_SIZE (fld);
1452 if (!tr_size || !host_integerp (tr_size, 1))
1453 continue;
1454 size = tree_low_cst (tr_size, 1);
1455 if (size == 0)
1456 {
1457 if (pos != offset)
1458 continue;
1459 }
1460 else if (pos > offset || (pos + size) <= offset)
1461 continue;
1462
1463 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1464 NULL_TREE);
1465 expr_ptr = &expr;
1466 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1467 offset - pos, exp_type))
1468 {
1469 *res = expr;
1470 return true;
1471 }
1472 }
1473 return false;
1474
1475 case ARRAY_TYPE:
1476 tr_size = TYPE_SIZE (TREE_TYPE (type));
1477 if (!tr_size || !host_integerp (tr_size, 1))
1478 return false;
1479 el_size = tree_low_cst (tr_size, 1);
1480
1481 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1482 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1483 return false;
1484 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1485 if (!integer_zerop (minidx))
1486 index = int_const_binop (PLUS_EXPR, index, minidx, 0);
1487 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1488 NULL_TREE, NULL_TREE);
1489 offset = offset % el_size;
1490 type = TREE_TYPE (type);
1491 break;
1492
1493 default:
1494 if (offset != 0)
1495 return false;
1496
1497 if (exp_type)
1498 return false;
1499 else
1500 return true;
1501 }
1502 }
1503 }
1504
1505 /* Return true iff TYPE is stdarg va_list type. */
1506
1507 static inline bool
1508 is_va_list_type (tree type)
1509 {
1510 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1511 }
1512
1513 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1514 those with type which is suitable for scalarization. */
1515
1516 static bool
1517 find_var_candidates (void)
1518 {
1519 tree var, type;
1520 referenced_var_iterator rvi;
1521 bool ret = false;
1522
1523 FOR_EACH_REFERENCED_VAR (var, rvi)
1524 {
1525 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1526 continue;
1527 type = TREE_TYPE (var);
1528
1529 if (!AGGREGATE_TYPE_P (type)
1530 || needs_to_live_in_memory (var)
1531 || TREE_THIS_VOLATILE (var)
1532 || !COMPLETE_TYPE_P (type)
1533 || !host_integerp (TYPE_SIZE (type), 1)
1534 || tree_low_cst (TYPE_SIZE (type), 1) == 0
1535 || type_internals_preclude_sra_p (type)
1536 /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1537 we also want to schedule it rather late. Thus we ignore it in
1538 the early pass. */
1539 || (sra_mode == SRA_MODE_EARLY_INTRA
1540 && is_va_list_type (type)))
1541 continue;
1542
1543 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1544
1545 if (dump_file && (dump_flags & TDF_DETAILS))
1546 {
1547 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1548 print_generic_expr (dump_file, var, 0);
1549 fprintf (dump_file, "\n");
1550 }
1551 ret = true;
1552 }
1553
1554 return ret;
1555 }
1556
1557 /* Sort all accesses for the given variable, check for partial overlaps and
1558 return NULL if there are any. If there are none, pick a representative for
1559 each combination of offset and size and create a linked list out of them.
1560 Return the pointer to the first representative and make sure it is the first
1561 one in the vector of accesses. */
1562
1563 static struct access *
1564 sort_and_splice_var_accesses (tree var)
1565 {
1566 int i, j, access_count;
1567 struct access *res, **prev_acc_ptr = &res;
1568 VEC (access_p, heap) *access_vec;
1569 bool first = true;
1570 HOST_WIDE_INT low = -1, high = 0;
1571
1572 access_vec = get_base_access_vector (var);
1573 if (!access_vec)
1574 return NULL;
1575 access_count = VEC_length (access_p, access_vec);
1576
1577 /* Sort by <OFFSET, SIZE>. */
1578 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
1579 compare_access_positions);
1580
1581 i = 0;
1582 while (i < access_count)
1583 {
1584 struct access *access = VEC_index (access_p, access_vec, i);
1585 bool grp_write = access->write;
1586 bool grp_read = !access->write;
1587 bool grp_assignment_read = access->grp_assignment_read;
1588 bool multiple_reads = false;
1589 bool total_scalarization = access->total_scalarization;
1590 bool grp_partial_lhs = access->grp_partial_lhs;
1591 bool first_scalar = is_gimple_reg_type (access->type);
1592 bool unscalarizable_region = access->grp_unscalarizable_region;
1593
1594 if (first || access->offset >= high)
1595 {
1596 first = false;
1597 low = access->offset;
1598 high = access->offset + access->size;
1599 }
1600 else if (access->offset > low && access->offset + access->size > high)
1601 return NULL;
1602 else
1603 gcc_assert (access->offset >= low
1604 && access->offset + access->size <= high);
1605
1606 j = i + 1;
1607 while (j < access_count)
1608 {
1609 struct access *ac2 = VEC_index (access_p, access_vec, j);
1610 if (ac2->offset != access->offset || ac2->size != access->size)
1611 break;
1612 if (ac2->write)
1613 grp_write = true;
1614 else
1615 {
1616 if (grp_read)
1617 multiple_reads = true;
1618 else
1619 grp_read = true;
1620 }
1621 grp_assignment_read |= ac2->grp_assignment_read;
1622 grp_partial_lhs |= ac2->grp_partial_lhs;
1623 unscalarizable_region |= ac2->grp_unscalarizable_region;
1624 total_scalarization |= ac2->total_scalarization;
1625 relink_to_new_repr (access, ac2);
1626
1627 /* If there are both aggregate-type and scalar-type accesses with
1628 this combination of size and offset, the comparison function
1629 should have put the scalars first. */
1630 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1631 ac2->group_representative = access;
1632 j++;
1633 }
1634
1635 i = j;
1636
1637 access->group_representative = access;
1638 access->grp_write = grp_write;
1639 access->grp_read = grp_read;
1640 access->grp_assignment_read = grp_assignment_read;
1641 access->grp_hint = multiple_reads || total_scalarization;
1642 access->grp_partial_lhs = grp_partial_lhs;
1643 access->grp_unscalarizable_region = unscalarizable_region;
1644 if (access->first_link)
1645 add_access_to_work_queue (access);
1646
1647 *prev_acc_ptr = access;
1648 prev_acc_ptr = &access->next_grp;
1649 }
1650
1651 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1652 return res;
1653 }
1654
1655 /* Create a variable for the given ACCESS which determines the type, name and a
1656 few other properties. Return the variable declaration and store it also to
1657 ACCESS->replacement. */
1658
1659 static tree
1660 create_access_replacement (struct access *access, bool rename)
1661 {
1662 tree repl;
1663
1664 repl = create_tmp_var (access->type, "SR");
1665 get_var_ann (repl);
1666 add_referenced_var (repl);
1667 if (rename)
1668 mark_sym_for_renaming (repl);
1669
1670 if (!access->grp_partial_lhs
1671 && (TREE_CODE (access->type) == COMPLEX_TYPE
1672 || TREE_CODE (access->type) == VECTOR_TYPE))
1673 DECL_GIMPLE_REG_P (repl) = 1;
1674
1675 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1676 DECL_ARTIFICIAL (repl) = 1;
1677 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1678
1679 if (DECL_NAME (access->base)
1680 && !DECL_IGNORED_P (access->base)
1681 && !DECL_ARTIFICIAL (access->base))
1682 {
1683 char *pretty_name = make_fancy_name (access->expr);
1684 tree debug_expr = unshare_expr (access->expr), d;
1685
1686 DECL_NAME (repl) = get_identifier (pretty_name);
1687 obstack_free (&name_obstack, pretty_name);
1688
1689 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1690 as DECL_DEBUG_EXPR isn't considered when looking for still
1691 used SSA_NAMEs and thus they could be freed. All debug info
1692 generation cares is whether something is constant or variable
1693 and that get_ref_base_and_extent works properly on the
1694 expression. */
1695 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1696 switch (TREE_CODE (d))
1697 {
1698 case ARRAY_REF:
1699 case ARRAY_RANGE_REF:
1700 if (TREE_OPERAND (d, 1)
1701 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1702 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1703 if (TREE_OPERAND (d, 3)
1704 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1705 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1706 /* FALLTHRU */
1707 case COMPONENT_REF:
1708 if (TREE_OPERAND (d, 2)
1709 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1710 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1711 break;
1712 default:
1713 break;
1714 }
1715 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1716 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1717 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1718 }
1719 else
1720 TREE_NO_WARNING (repl) = 1;
1721
1722 if (dump_file)
1723 {
1724 fprintf (dump_file, "Created a replacement for ");
1725 print_generic_expr (dump_file, access->base, 0);
1726 fprintf (dump_file, " offset: %u, size: %u: ",
1727 (unsigned) access->offset, (unsigned) access->size);
1728 print_generic_expr (dump_file, repl, 0);
1729 fprintf (dump_file, "\n");
1730 }
1731 sra_stats.replacements++;
1732
1733 return repl;
1734 }
1735
1736 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1737
1738 static inline tree
1739 get_access_replacement (struct access *access)
1740 {
1741 gcc_assert (access->grp_to_be_replaced);
1742
1743 if (!access->replacement_decl)
1744 access->replacement_decl = create_access_replacement (access, true);
1745 return access->replacement_decl;
1746 }
1747
1748 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1749 not mark it for renaming. */
1750
1751 static inline tree
1752 get_unrenamed_access_replacement (struct access *access)
1753 {
1754 gcc_assert (!access->grp_to_be_replaced);
1755
1756 if (!access->replacement_decl)
1757 access->replacement_decl = create_access_replacement (access, false);
1758 return access->replacement_decl;
1759 }
1760
1761
1762 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1763 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1764 to it is not "within" the root. Return false iff some accesses partially
1765 overlap. */
1766
1767 static bool
1768 build_access_subtree (struct access **access)
1769 {
1770 struct access *root = *access, *last_child = NULL;
1771 HOST_WIDE_INT limit = root->offset + root->size;
1772
1773 *access = (*access)->next_grp;
1774 while (*access && (*access)->offset + (*access)->size <= limit)
1775 {
1776 if (!last_child)
1777 root->first_child = *access;
1778 else
1779 last_child->next_sibling = *access;
1780 last_child = *access;
1781
1782 if (!build_access_subtree (access))
1783 return false;
1784 }
1785
1786 if (*access && (*access)->offset < limit)
1787 return false;
1788
1789 return true;
1790 }
1791
1792 /* Build a tree of access representatives, ACCESS is the pointer to the first
1793 one, others are linked in a list by the next_grp field. Return false iff
1794 some accesses partially overlap. */
1795
1796 static bool
1797 build_access_trees (struct access *access)
1798 {
1799 while (access)
1800 {
1801 struct access *root = access;
1802
1803 if (!build_access_subtree (&access))
1804 return false;
1805 root->next_grp = access;
1806 }
1807 return true;
1808 }
1809
1810 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1811 array. */
1812
1813 static bool
1814 expr_with_var_bounded_array_refs_p (tree expr)
1815 {
1816 while (handled_component_p (expr))
1817 {
1818 if (TREE_CODE (expr) == ARRAY_REF
1819 && !host_integerp (array_ref_low_bound (expr), 0))
1820 return true;
1821 expr = TREE_OPERAND (expr, 0);
1822 }
1823 return false;
1824 }
1825
1826 enum mark_read_status { SRA_MR_NOT_READ, SRA_MR_READ, SRA_MR_ASSIGN_READ};
1827
1828 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1829 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
1830 sorts of access flags appropriately along the way, notably always set
1831 grp_read and grp_assign_read according to MARK_READ and grp_write when
1832 MARK_WRITE is true. */
1833
1834 static bool
1835 analyze_access_subtree (struct access *root, bool allow_replacements,
1836 enum mark_read_status mark_read, bool mark_write)
1837 {
1838 struct access *child;
1839 HOST_WIDE_INT limit = root->offset + root->size;
1840 HOST_WIDE_INT covered_to = root->offset;
1841 bool scalar = is_gimple_reg_type (root->type);
1842 bool hole = false, sth_created = false;
1843 bool direct_read = root->grp_read;
1844
1845 if (mark_read == SRA_MR_ASSIGN_READ)
1846 {
1847 root->grp_read = 1;
1848 root->grp_assignment_read = 1;
1849 }
1850 if (mark_read == SRA_MR_READ)
1851 root->grp_read = 1;
1852 else if (root->grp_assignment_read)
1853 mark_read = SRA_MR_ASSIGN_READ;
1854 else if (root->grp_read)
1855 mark_read = SRA_MR_READ;
1856
1857 if (mark_write)
1858 root->grp_write = true;
1859 else if (root->grp_write)
1860 mark_write = true;
1861
1862 if (root->grp_unscalarizable_region)
1863 allow_replacements = false;
1864
1865 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
1866 allow_replacements = false;
1867
1868 for (child = root->first_child; child; child = child->next_sibling)
1869 {
1870 if (!hole && child->offset < covered_to)
1871 hole = true;
1872 else
1873 covered_to += child->size;
1874
1875 sth_created |= analyze_access_subtree (child,
1876 allow_replacements && !scalar,
1877 mark_read, mark_write);
1878
1879 root->grp_unscalarized_data |= child->grp_unscalarized_data;
1880 hole |= !child->grp_covered;
1881 }
1882
1883 if (allow_replacements && scalar && !root->first_child
1884 && (root->grp_hint
1885 || (root->grp_write && (direct_read || root->grp_assignment_read))))
1886 {
1887 if (dump_file && (dump_flags & TDF_DETAILS))
1888 {
1889 fprintf (dump_file, "Marking ");
1890 print_generic_expr (dump_file, root->base, 0);
1891 fprintf (dump_file, " offset: %u, size: %u: ",
1892 (unsigned) root->offset, (unsigned) root->size);
1893 fprintf (dump_file, " to be replaced.\n");
1894 }
1895
1896 root->grp_to_be_replaced = 1;
1897 sth_created = true;
1898 hole = false;
1899 }
1900 else if (covered_to < limit)
1901 hole = true;
1902
1903 if (sth_created && !hole)
1904 {
1905 root->grp_covered = 1;
1906 return true;
1907 }
1908 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1909 root->grp_unscalarized_data = 1; /* not covered and written to */
1910 if (sth_created)
1911 return true;
1912 return false;
1913 }
1914
1915 /* Analyze all access trees linked by next_grp by the means of
1916 analyze_access_subtree. */
1917 static bool
1918 analyze_access_trees (struct access *access)
1919 {
1920 bool ret = false;
1921
1922 while (access)
1923 {
1924 if (analyze_access_subtree (access, true, SRA_MR_NOT_READ, false))
1925 ret = true;
1926 access = access->next_grp;
1927 }
1928
1929 return ret;
1930 }
1931
1932 /* Return true iff a potential new child of LACC at offset OFFSET and with size
1933 SIZE would conflict with an already existing one. If exactly such a child
1934 already exists in LACC, store a pointer to it in EXACT_MATCH. */
1935
1936 static bool
1937 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
1938 HOST_WIDE_INT size, struct access **exact_match)
1939 {
1940 struct access *child;
1941
1942 for (child = lacc->first_child; child; child = child->next_sibling)
1943 {
1944 if (child->offset == norm_offset && child->size == size)
1945 {
1946 *exact_match = child;
1947 return true;
1948 }
1949
1950 if (child->offset < norm_offset + size
1951 && child->offset + child->size > norm_offset)
1952 return true;
1953 }
1954
1955 return false;
1956 }
1957
1958 /* Create a new child access of PARENT, with all properties just like MODEL
1959 except for its offset and with its grp_write false and grp_read true.
1960 Return the new access or NULL if it cannot be created. Note that this access
1961 is created long after all splicing and sorting, it's not located in any
1962 access vector and is automatically a representative of its group. */
1963
1964 static struct access *
1965 create_artificial_child_access (struct access *parent, struct access *model,
1966 HOST_WIDE_INT new_offset)
1967 {
1968 struct access *access;
1969 struct access **child;
1970 tree expr = parent->base;
1971
1972 gcc_assert (!model->grp_unscalarizable_region);
1973 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
1974 model->type))
1975 return NULL;
1976
1977 access = (struct access *) pool_alloc (access_pool);
1978 memset (access, 0, sizeof (struct access));
1979 access->base = parent->base;
1980 access->expr = expr;
1981 access->offset = new_offset;
1982 access->size = model->size;
1983 access->type = model->type;
1984 access->grp_write = true;
1985 access->grp_read = false;
1986
1987 child = &parent->first_child;
1988 while (*child && (*child)->offset < new_offset)
1989 child = &(*child)->next_sibling;
1990
1991 access->next_sibling = *child;
1992 *child = access;
1993
1994 return access;
1995 }
1996
1997
1998 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
1999 true if any new subaccess was created. Additionally, if RACC is a scalar
2000 access but LACC is not, change the type of the latter, if possible. */
2001
2002 static bool
2003 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2004 {
2005 struct access *rchild;
2006 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2007 bool ret = false;
2008
2009 if (is_gimple_reg_type (lacc->type)
2010 || lacc->grp_unscalarizable_region
2011 || racc->grp_unscalarizable_region)
2012 return false;
2013
2014 if (!lacc->first_child && !racc->first_child
2015 && is_gimple_reg_type (racc->type))
2016 {
2017 tree t = lacc->base;
2018
2019 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t), lacc->offset,
2020 racc->type))
2021 {
2022 lacc->expr = t;
2023 lacc->type = racc->type;
2024 }
2025 return false;
2026 }
2027
2028 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2029 {
2030 struct access *new_acc = NULL;
2031 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2032
2033 if (rchild->grp_unscalarizable_region)
2034 continue;
2035
2036 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2037 &new_acc))
2038 {
2039 if (new_acc)
2040 {
2041 rchild->grp_hint = 1;
2042 new_acc->grp_hint |= new_acc->grp_read;
2043 if (rchild->first_child)
2044 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2045 }
2046 continue;
2047 }
2048
2049 rchild->grp_hint = 1;
2050 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2051 if (new_acc)
2052 {
2053 ret = true;
2054 if (racc->first_child)
2055 propagate_subaccesses_across_link (new_acc, rchild);
2056 }
2057 }
2058
2059 return ret;
2060 }
2061
2062 /* Propagate all subaccesses across assignment links. */
2063
2064 static void
2065 propagate_all_subaccesses (void)
2066 {
2067 while (work_queue_head)
2068 {
2069 struct access *racc = pop_access_from_work_queue ();
2070 struct assign_link *link;
2071
2072 gcc_assert (racc->first_link);
2073
2074 for (link = racc->first_link; link; link = link->next)
2075 {
2076 struct access *lacc = link->lacc;
2077
2078 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2079 continue;
2080 lacc = lacc->group_representative;
2081 if (propagate_subaccesses_across_link (lacc, racc)
2082 && lacc->first_link)
2083 add_access_to_work_queue (lacc);
2084 }
2085 }
2086 }
2087
2088 /* Go through all accesses collected throughout the (intraprocedural) analysis
2089 stage, exclude overlapping ones, identify representatives and build trees
2090 out of them, making decisions about scalarization on the way. Return true
2091 iff there are any to-be-scalarized variables after this stage. */
2092
2093 static bool
2094 analyze_all_variable_accesses (void)
2095 {
2096 int res = 0;
2097 bitmap tmp = BITMAP_ALLOC (NULL);
2098 bitmap_iterator bi;
2099 unsigned i, max_total_scalarization_size;
2100
2101 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2102 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2103
2104 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2105 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2106 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2107 {
2108 tree var = referenced_var (i);
2109
2110 if (TREE_CODE (var) == VAR_DECL
2111 && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2112 <= max_total_scalarization_size)
2113 && type_consists_of_records_p (TREE_TYPE (var)))
2114 {
2115 completely_scalarize_record (var, var, 0, var);
2116 if (dump_file && (dump_flags & TDF_DETAILS))
2117 {
2118 fprintf (dump_file, "Will attempt to totally scalarize ");
2119 print_generic_expr (dump_file, var, 0);
2120 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2121 }
2122 }
2123 }
2124
2125 bitmap_copy (tmp, candidate_bitmap);
2126 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2127 {
2128 tree var = referenced_var (i);
2129 struct access *access;
2130
2131 access = sort_and_splice_var_accesses (var);
2132 if (!access || !build_access_trees (access))
2133 disqualify_candidate (var,
2134 "No or inhibitingly overlapping accesses.");
2135 }
2136
2137 propagate_all_subaccesses ();
2138
2139 bitmap_copy (tmp, candidate_bitmap);
2140 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2141 {
2142 tree var = referenced_var (i);
2143 struct access *access = get_first_repr_for_decl (var);
2144
2145 if (analyze_access_trees (access))
2146 {
2147 res++;
2148 if (dump_file && (dump_flags & TDF_DETAILS))
2149 {
2150 fprintf (dump_file, "\nAccess trees for ");
2151 print_generic_expr (dump_file, var, 0);
2152 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2153 dump_access_tree (dump_file, access);
2154 fprintf (dump_file, "\n");
2155 }
2156 }
2157 else
2158 disqualify_candidate (var, "No scalar replacements to be created.");
2159 }
2160
2161 BITMAP_FREE (tmp);
2162
2163 if (res)
2164 {
2165 statistics_counter_event (cfun, "Scalarized aggregates", res);
2166 return true;
2167 }
2168 else
2169 return false;
2170 }
2171
2172 /* Generate statements copying scalar replacements of accesses within a subtree
2173 into or out of AGG. ACCESS, all its children, siblings and their children
2174 are to be processed. AGG is an aggregate type expression (can be a
2175 declaration but does not have to be, it can for example also be a mem_ref or
2176 a series of handled components). TOP_OFFSET is the offset of the processed
2177 subtree which has to be subtracted from offsets of individual accesses to
2178 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2179 replacements in the interval <start_offset, start_offset + chunk_size>,
2180 otherwise copy all. GSI is a statement iterator used to place the new
2181 statements. WRITE should be true when the statements should write from AGG
2182 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2183 statements will be added after the current statement in GSI, they will be
2184 added before the statement otherwise. */
2185
2186 static void
2187 generate_subtree_copies (struct access *access, tree agg,
2188 HOST_WIDE_INT top_offset,
2189 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2190 gimple_stmt_iterator *gsi, bool write,
2191 bool insert_after, location_t loc)
2192 {
2193 do
2194 {
2195 if (chunk_size && access->offset >= start_offset + chunk_size)
2196 return;
2197
2198 if (access->grp_to_be_replaced
2199 && (chunk_size == 0
2200 || access->offset + access->size > start_offset))
2201 {
2202 tree expr, repl = get_access_replacement (access);
2203 gimple stmt;
2204
2205 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2206 access, gsi, insert_after);
2207
2208 if (write)
2209 {
2210 if (access->grp_partial_lhs)
2211 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2212 !insert_after,
2213 insert_after ? GSI_NEW_STMT
2214 : GSI_SAME_STMT);
2215 stmt = gimple_build_assign (repl, expr);
2216 }
2217 else
2218 {
2219 TREE_NO_WARNING (repl) = 1;
2220 if (access->grp_partial_lhs)
2221 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2222 !insert_after,
2223 insert_after ? GSI_NEW_STMT
2224 : GSI_SAME_STMT);
2225 stmt = gimple_build_assign (expr, repl);
2226 }
2227 gimple_set_location (stmt, loc);
2228
2229 if (insert_after)
2230 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2231 else
2232 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2233 update_stmt (stmt);
2234 sra_stats.subtree_copies++;
2235 }
2236
2237 if (access->first_child)
2238 generate_subtree_copies (access->first_child, agg, top_offset,
2239 start_offset, chunk_size, gsi,
2240 write, insert_after, loc);
2241
2242 access = access->next_sibling;
2243 }
2244 while (access);
2245 }
2246
2247 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2248 the root of the subtree to be processed. GSI is the statement iterator used
2249 for inserting statements which are added after the current statement if
2250 INSERT_AFTER is true or before it otherwise. */
2251
2252 static void
2253 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2254 bool insert_after, location_t loc)
2255
2256 {
2257 struct access *child;
2258
2259 if (access->grp_to_be_replaced)
2260 {
2261 gimple stmt;
2262
2263 stmt = gimple_build_assign (get_access_replacement (access),
2264 fold_convert (access->type,
2265 integer_zero_node));
2266 if (insert_after)
2267 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2268 else
2269 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2270 update_stmt (stmt);
2271 gimple_set_location (stmt, loc);
2272 }
2273
2274 for (child = access->first_child; child; child = child->next_sibling)
2275 init_subtree_with_zero (child, gsi, insert_after, loc);
2276 }
2277
2278 /* Search for an access representative for the given expression EXPR and
2279 return it or NULL if it cannot be found. */
2280
2281 static struct access *
2282 get_access_for_expr (tree expr)
2283 {
2284 HOST_WIDE_INT offset, size, max_size;
2285 tree base;
2286
2287 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2288 a different size than the size of its argument and we need the latter
2289 one. */
2290 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2291 expr = TREE_OPERAND (expr, 0);
2292
2293 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2294 if (max_size == -1 || !DECL_P (base))
2295 return NULL;
2296
2297 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2298 return NULL;
2299
2300 return get_var_base_offset_size_access (base, offset, max_size);
2301 }
2302
2303 /* Replace the expression EXPR with a scalar replacement if there is one and
2304 generate other statements to do type conversion or subtree copying if
2305 necessary. GSI is used to place newly created statements, WRITE is true if
2306 the expression is being written to (it is on a LHS of a statement or output
2307 in an assembly statement). */
2308
2309 static bool
2310 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2311 {
2312 location_t loc;
2313 struct access *access;
2314 tree type, bfr;
2315
2316 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2317 {
2318 bfr = *expr;
2319 expr = &TREE_OPERAND (*expr, 0);
2320 }
2321 else
2322 bfr = NULL_TREE;
2323
2324 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2325 expr = &TREE_OPERAND (*expr, 0);
2326 access = get_access_for_expr (*expr);
2327 if (!access)
2328 return false;
2329 type = TREE_TYPE (*expr);
2330
2331 loc = gimple_location (gsi_stmt (*gsi));
2332 if (access->grp_to_be_replaced)
2333 {
2334 tree repl = get_access_replacement (access);
2335 /* If we replace a non-register typed access simply use the original
2336 access expression to extract the scalar component afterwards.
2337 This happens if scalarizing a function return value or parameter
2338 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2339 gcc.c-torture/compile/20011217-1.c.
2340
2341 We also want to use this when accessing a complex or vector which can
2342 be accessed as a different type too, potentially creating a need for
2343 type conversion (see PR42196) and when scalarized unions are involved
2344 in assembler statements (see PR42398). */
2345 if (!useless_type_conversion_p (type, access->type))
2346 {
2347 tree ref;
2348
2349 ref = build_ref_for_model (loc, access->base, access->offset, access,
2350 NULL, false);
2351
2352 if (write)
2353 {
2354 gimple stmt;
2355
2356 if (access->grp_partial_lhs)
2357 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2358 false, GSI_NEW_STMT);
2359 stmt = gimple_build_assign (repl, ref);
2360 gimple_set_location (stmt, loc);
2361 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2362 }
2363 else
2364 {
2365 gimple stmt;
2366
2367 if (access->grp_partial_lhs)
2368 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2369 true, GSI_SAME_STMT);
2370 stmt = gimple_build_assign (ref, repl);
2371 gimple_set_location (stmt, loc);
2372 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2373 }
2374 }
2375 else
2376 *expr = repl;
2377 sra_stats.exprs++;
2378 }
2379
2380 if (access->first_child)
2381 {
2382 HOST_WIDE_INT start_offset, chunk_size;
2383 if (bfr
2384 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2385 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2386 {
2387 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2388 start_offset = access->offset
2389 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2390 }
2391 else
2392 start_offset = chunk_size = 0;
2393
2394 generate_subtree_copies (access->first_child, access->base, 0,
2395 start_offset, chunk_size, gsi, write, write,
2396 loc);
2397 }
2398 return true;
2399 }
2400
2401 /* Where scalar replacements of the RHS have been written to when a replacement
2402 of a LHS of an assigments cannot be direclty loaded from a replacement of
2403 the RHS. */
2404 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2405 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2406 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2407
2408 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2409 base aggregate if there are unscalarized data or directly to LHS of the
2410 statement that is pointed to by GSI otherwise. */
2411
2412 static enum unscalarized_data_handling
2413 handle_unscalarized_data_in_subtree (struct access *top_racc,
2414 gimple_stmt_iterator *gsi)
2415 {
2416 if (top_racc->grp_unscalarized_data)
2417 {
2418 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2419 gsi, false, false,
2420 gimple_location (gsi_stmt (*gsi)));
2421 return SRA_UDH_RIGHT;
2422 }
2423 else
2424 {
2425 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2426 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2427 0, 0, gsi, false, false,
2428 gimple_location (gsi_stmt (*gsi)));
2429 return SRA_UDH_LEFT;
2430 }
2431 }
2432
2433
2434 /* Try to generate statements to load all sub-replacements in an access subtree
2435 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2436 If that is not possible, refresh the TOP_RACC base aggregate and load the
2437 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2438 copied. NEW_GSI is stmt iterator used for statement insertions after the
2439 original assignment, OLD_GSI is used to insert statements before the
2440 assignment. *REFRESHED keeps the information whether we have needed to
2441 refresh replacements of the LHS and from which side of the assignments this
2442 takes place. */
2443
2444 static void
2445 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2446 HOST_WIDE_INT left_offset,
2447 gimple_stmt_iterator *old_gsi,
2448 gimple_stmt_iterator *new_gsi,
2449 enum unscalarized_data_handling *refreshed)
2450 {
2451 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2452 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2453 {
2454 if (lacc->grp_to_be_replaced)
2455 {
2456 struct access *racc;
2457 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2458 gimple stmt;
2459 tree rhs;
2460
2461 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2462 if (racc && racc->grp_to_be_replaced)
2463 {
2464 rhs = get_access_replacement (racc);
2465 if (!useless_type_conversion_p (lacc->type, racc->type))
2466 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2467 }
2468 else
2469 {
2470 /* No suitable access on the right hand side, need to load from
2471 the aggregate. See if we have to update it first... */
2472 if (*refreshed == SRA_UDH_NONE)
2473 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2474 old_gsi);
2475
2476 if (*refreshed == SRA_UDH_LEFT)
2477 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2478 new_gsi, true);
2479 else
2480 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2481 new_gsi, true);
2482 }
2483
2484 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2485 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2486 gimple_set_location (stmt, loc);
2487 update_stmt (stmt);
2488 sra_stats.subreplacements++;
2489 }
2490 else if (*refreshed == SRA_UDH_NONE
2491 && lacc->grp_read && !lacc->grp_covered)
2492 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2493 old_gsi);
2494
2495 if (lacc->first_child)
2496 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2497 old_gsi, new_gsi, refreshed);
2498 }
2499 }
2500
2501 /* Result code for SRA assignment modification. */
2502 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2503 SRA_AM_MODIFIED, /* stmt changed but not
2504 removed */
2505 SRA_AM_REMOVED }; /* stmt eliminated */
2506
2507 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2508 to the assignment and GSI is the statement iterator pointing at it. Returns
2509 the same values as sra_modify_assign. */
2510
2511 static enum assignment_mod_result
2512 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2513 {
2514 tree lhs = gimple_assign_lhs (*stmt);
2515 struct access *acc;
2516 location_t loc;
2517
2518 acc = get_access_for_expr (lhs);
2519 if (!acc)
2520 return SRA_AM_NONE;
2521
2522 loc = gimple_location (*stmt);
2523 if (VEC_length (constructor_elt,
2524 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2525 {
2526 /* I have never seen this code path trigger but if it can happen the
2527 following should handle it gracefully. */
2528 if (access_has_children_p (acc))
2529 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2530 true, true, loc);
2531 return SRA_AM_MODIFIED;
2532 }
2533
2534 if (acc->grp_covered)
2535 {
2536 init_subtree_with_zero (acc, gsi, false, loc);
2537 unlink_stmt_vdef (*stmt);
2538 gsi_remove (gsi, true);
2539 return SRA_AM_REMOVED;
2540 }
2541 else
2542 {
2543 init_subtree_with_zero (acc, gsi, true, loc);
2544 return SRA_AM_MODIFIED;
2545 }
2546 }
2547
2548 /* Create and return a new suitable default definition SSA_NAME for RACC which
2549 is an access describing an uninitialized part of an aggregate that is being
2550 loaded. */
2551
2552 static tree
2553 get_repl_default_def_ssa_name (struct access *racc)
2554 {
2555 tree repl, decl;
2556
2557 decl = get_unrenamed_access_replacement (racc);
2558
2559 repl = gimple_default_def (cfun, decl);
2560 if (!repl)
2561 {
2562 repl = make_ssa_name (decl, gimple_build_nop ());
2563 set_default_def (decl, repl);
2564 }
2565
2566 return repl;
2567 }
2568
2569 /* Examine both sides of the assignment statement pointed to by STMT, replace
2570 them with a scalare replacement if there is one and generate copying of
2571 replacements if scalarized aggregates have been used in the assignment. GSI
2572 is used to hold generated statements for type conversions and subtree
2573 copying. */
2574
2575 static enum assignment_mod_result
2576 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2577 {
2578 struct access *lacc, *racc;
2579 tree lhs, rhs;
2580 bool modify_this_stmt = false;
2581 bool force_gimple_rhs = false;
2582 location_t loc;
2583 gimple_stmt_iterator orig_gsi = *gsi;
2584
2585 if (!gimple_assign_single_p (*stmt))
2586 return SRA_AM_NONE;
2587 lhs = gimple_assign_lhs (*stmt);
2588 rhs = gimple_assign_rhs1 (*stmt);
2589
2590 if (TREE_CODE (rhs) == CONSTRUCTOR)
2591 return sra_modify_constructor_assign (stmt, gsi);
2592
2593 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2594 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2595 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2596 {
2597 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2598 gsi, false);
2599 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2600 gsi, true);
2601 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2602 }
2603
2604 lacc = get_access_for_expr (lhs);
2605 racc = get_access_for_expr (rhs);
2606 if (!lacc && !racc)
2607 return SRA_AM_NONE;
2608
2609 loc = gimple_location (*stmt);
2610 if (lacc && lacc->grp_to_be_replaced)
2611 {
2612 lhs = get_access_replacement (lacc);
2613 gimple_assign_set_lhs (*stmt, lhs);
2614 modify_this_stmt = true;
2615 if (lacc->grp_partial_lhs)
2616 force_gimple_rhs = true;
2617 sra_stats.exprs++;
2618 }
2619
2620 if (racc && racc->grp_to_be_replaced)
2621 {
2622 rhs = get_access_replacement (racc);
2623 modify_this_stmt = true;
2624 if (racc->grp_partial_lhs)
2625 force_gimple_rhs = true;
2626 sra_stats.exprs++;
2627 }
2628
2629 if (modify_this_stmt)
2630 {
2631 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2632 {
2633 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2634 ??? This should move to fold_stmt which we simply should
2635 call after building a VIEW_CONVERT_EXPR here. */
2636 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2637 && !access_has_children_p (lacc))
2638 {
2639 lhs = build_ref_for_offset (loc, lhs, 0, TREE_TYPE (rhs),
2640 gsi, false);
2641 gimple_assign_set_lhs (*stmt, lhs);
2642 }
2643 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2644 && !contains_view_convert_expr_p (rhs)
2645 && !access_has_children_p (racc))
2646 rhs = build_ref_for_offset (loc, rhs, 0, TREE_TYPE (lhs),
2647 gsi, false);
2648
2649 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2650 {
2651 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
2652 rhs);
2653 if (is_gimple_reg_type (TREE_TYPE (lhs))
2654 && TREE_CODE (lhs) != SSA_NAME)
2655 force_gimple_rhs = true;
2656 }
2657 }
2658 }
2659
2660 /* From this point on, the function deals with assignments in between
2661 aggregates when at least one has scalar reductions of some of its
2662 components. There are three possible scenarios: Both the LHS and RHS have
2663 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2664
2665 In the first case, we would like to load the LHS components from RHS
2666 components whenever possible. If that is not possible, we would like to
2667 read it directly from the RHS (after updating it by storing in it its own
2668 components). If there are some necessary unscalarized data in the LHS,
2669 those will be loaded by the original assignment too. If neither of these
2670 cases happen, the original statement can be removed. Most of this is done
2671 by load_assign_lhs_subreplacements.
2672
2673 In the second case, we would like to store all RHS scalarized components
2674 directly into LHS and if they cover the aggregate completely, remove the
2675 statement too. In the third case, we want the LHS components to be loaded
2676 directly from the RHS (DSE will remove the original statement if it
2677 becomes redundant).
2678
2679 This is a bit complex but manageable when types match and when unions do
2680 not cause confusion in a way that we cannot really load a component of LHS
2681 from the RHS or vice versa (the access representing this level can have
2682 subaccesses that are accessible only through a different union field at a
2683 higher level - different from the one used in the examined expression).
2684 Unions are fun.
2685
2686 Therefore, I specially handle a fourth case, happening when there is a
2687 specific type cast or it is impossible to locate a scalarized subaccess on
2688 the other side of the expression. If that happens, I simply "refresh" the
2689 RHS by storing in it is scalarized components leave the original statement
2690 there to do the copying and then load the scalar replacements of the LHS.
2691 This is what the first branch does. */
2692
2693 if (gimple_has_volatile_ops (*stmt)
2694 || contains_view_convert_expr_p (rhs)
2695 || contains_view_convert_expr_p (lhs))
2696 {
2697 if (access_has_children_p (racc))
2698 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2699 gsi, false, false, loc);
2700 if (access_has_children_p (lacc))
2701 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2702 gsi, true, true, loc);
2703 sra_stats.separate_lhs_rhs_handling++;
2704 }
2705 else
2706 {
2707 if (access_has_children_p (lacc) && access_has_children_p (racc))
2708 {
2709 gimple_stmt_iterator orig_gsi = *gsi;
2710 enum unscalarized_data_handling refreshed;
2711
2712 if (lacc->grp_read && !lacc->grp_covered)
2713 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
2714 else
2715 refreshed = SRA_UDH_NONE;
2716
2717 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
2718 &orig_gsi, gsi, &refreshed);
2719 if (refreshed != SRA_UDH_RIGHT)
2720 {
2721 gsi_next (gsi);
2722 unlink_stmt_vdef (*stmt);
2723 gsi_remove (&orig_gsi, true);
2724 sra_stats.deleted++;
2725 return SRA_AM_REMOVED;
2726 }
2727 }
2728 else
2729 {
2730 if (racc)
2731 {
2732 if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
2733 {
2734 if (dump_file)
2735 {
2736 fprintf (dump_file, "Removing load: ");
2737 print_gimple_stmt (dump_file, *stmt, 0, 0);
2738 }
2739
2740 if (TREE_CODE (lhs) == SSA_NAME)
2741 {
2742 rhs = get_repl_default_def_ssa_name (racc);
2743 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2744 TREE_TYPE (rhs)))
2745 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
2746 TREE_TYPE (lhs), rhs);
2747 }
2748 else
2749 {
2750 if (racc->first_child)
2751 generate_subtree_copies (racc->first_child, lhs,
2752 racc->offset, 0, 0, gsi,
2753 false, false, loc);
2754
2755 gcc_assert (*stmt == gsi_stmt (*gsi));
2756 unlink_stmt_vdef (*stmt);
2757 gsi_remove (gsi, true);
2758 sra_stats.deleted++;
2759 return SRA_AM_REMOVED;
2760 }
2761 }
2762 else if (racc->first_child)
2763 generate_subtree_copies (racc->first_child, lhs, racc->offset,
2764 0, 0, gsi, false, true, loc);
2765 }
2766 if (access_has_children_p (lacc))
2767 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2768 0, 0, gsi, true, true, loc);
2769 }
2770 }
2771
2772 /* This gimplification must be done after generate_subtree_copies, lest we
2773 insert the subtree copies in the middle of the gimplified sequence. */
2774 if (force_gimple_rhs)
2775 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
2776 true, GSI_SAME_STMT);
2777 if (gimple_assign_rhs1 (*stmt) != rhs)
2778 {
2779 modify_this_stmt = true;
2780 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
2781 gcc_assert (*stmt == gsi_stmt (orig_gsi));
2782 }
2783
2784 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2785 }
2786
2787 /* Traverse the function body and all modifications as decided in
2788 analyze_all_variable_accesses. Return true iff the CFG has been
2789 changed. */
2790
2791 static bool
2792 sra_modify_function_body (void)
2793 {
2794 bool cfg_changed = false;
2795 basic_block bb;
2796
2797 FOR_EACH_BB (bb)
2798 {
2799 gimple_stmt_iterator gsi = gsi_start_bb (bb);
2800 while (!gsi_end_p (gsi))
2801 {
2802 gimple stmt = gsi_stmt (gsi);
2803 enum assignment_mod_result assign_result;
2804 bool modified = false, deleted = false;
2805 tree *t;
2806 unsigned i;
2807
2808 switch (gimple_code (stmt))
2809 {
2810 case GIMPLE_RETURN:
2811 t = gimple_return_retval_ptr (stmt);
2812 if (*t != NULL_TREE)
2813 modified |= sra_modify_expr (t, &gsi, false);
2814 break;
2815
2816 case GIMPLE_ASSIGN:
2817 assign_result = sra_modify_assign (&stmt, &gsi);
2818 modified |= assign_result == SRA_AM_MODIFIED;
2819 deleted = assign_result == SRA_AM_REMOVED;
2820 break;
2821
2822 case GIMPLE_CALL:
2823 /* Operands must be processed before the lhs. */
2824 for (i = 0; i < gimple_call_num_args (stmt); i++)
2825 {
2826 t = gimple_call_arg_ptr (stmt, i);
2827 modified |= sra_modify_expr (t, &gsi, false);
2828 }
2829
2830 if (gimple_call_lhs (stmt))
2831 {
2832 t = gimple_call_lhs_ptr (stmt);
2833 modified |= sra_modify_expr (t, &gsi, true);
2834 }
2835 break;
2836
2837 case GIMPLE_ASM:
2838 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
2839 {
2840 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
2841 modified |= sra_modify_expr (t, &gsi, false);
2842 }
2843 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
2844 {
2845 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
2846 modified |= sra_modify_expr (t, &gsi, true);
2847 }
2848 break;
2849
2850 default:
2851 break;
2852 }
2853
2854 if (modified)
2855 {
2856 update_stmt (stmt);
2857 if (maybe_clean_eh_stmt (stmt)
2858 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2859 cfg_changed = true;
2860 }
2861 if (!deleted)
2862 gsi_next (&gsi);
2863 }
2864 }
2865
2866 return cfg_changed;
2867 }
2868
2869 /* Generate statements initializing scalar replacements of parts of function
2870 parameters. */
2871
2872 static void
2873 initialize_parameter_reductions (void)
2874 {
2875 gimple_stmt_iterator gsi;
2876 gimple_seq seq = NULL;
2877 tree parm;
2878
2879 for (parm = DECL_ARGUMENTS (current_function_decl);
2880 parm;
2881 parm = DECL_CHAIN (parm))
2882 {
2883 VEC (access_p, heap) *access_vec;
2884 struct access *access;
2885
2886 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
2887 continue;
2888 access_vec = get_base_access_vector (parm);
2889 if (!access_vec)
2890 continue;
2891
2892 if (!seq)
2893 {
2894 seq = gimple_seq_alloc ();
2895 gsi = gsi_start (seq);
2896 }
2897
2898 for (access = VEC_index (access_p, access_vec, 0);
2899 access;
2900 access = access->next_grp)
2901 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
2902 EXPR_LOCATION (parm));
2903 }
2904
2905 if (seq)
2906 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
2907 }
2908
2909 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
2910 it reveals there are components of some aggregates to be scalarized, it runs
2911 the required transformations. */
2912 static unsigned int
2913 perform_intra_sra (void)
2914 {
2915 int ret = 0;
2916 sra_initialize ();
2917
2918 if (!find_var_candidates ())
2919 goto out;
2920
2921 if (!scan_function ())
2922 goto out;
2923
2924 if (!analyze_all_variable_accesses ())
2925 goto out;
2926
2927 if (sra_modify_function_body ())
2928 ret = TODO_update_ssa | TODO_cleanup_cfg;
2929 else
2930 ret = TODO_update_ssa;
2931 initialize_parameter_reductions ();
2932
2933 statistics_counter_event (cfun, "Scalar replacements created",
2934 sra_stats.replacements);
2935 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
2936 statistics_counter_event (cfun, "Subtree copy stmts",
2937 sra_stats.subtree_copies);
2938 statistics_counter_event (cfun, "Subreplacement stmts",
2939 sra_stats.subreplacements);
2940 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
2941 statistics_counter_event (cfun, "Separate LHS and RHS handling",
2942 sra_stats.separate_lhs_rhs_handling);
2943
2944 out:
2945 sra_deinitialize ();
2946 return ret;
2947 }
2948
2949 /* Perform early intraprocedural SRA. */
2950 static unsigned int
2951 early_intra_sra (void)
2952 {
2953 sra_mode = SRA_MODE_EARLY_INTRA;
2954 return perform_intra_sra ();
2955 }
2956
2957 /* Perform "late" intraprocedural SRA. */
2958 static unsigned int
2959 late_intra_sra (void)
2960 {
2961 sra_mode = SRA_MODE_INTRA;
2962 return perform_intra_sra ();
2963 }
2964
2965
2966 static bool
2967 gate_intra_sra (void)
2968 {
2969 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
2970 }
2971
2972
2973 struct gimple_opt_pass pass_sra_early =
2974 {
2975 {
2976 GIMPLE_PASS,
2977 "esra", /* name */
2978 gate_intra_sra, /* gate */
2979 early_intra_sra, /* execute */
2980 NULL, /* sub */
2981 NULL, /* next */
2982 0, /* static_pass_number */
2983 TV_TREE_SRA, /* tv_id */
2984 PROP_cfg | PROP_ssa, /* properties_required */
2985 0, /* properties_provided */
2986 0, /* properties_destroyed */
2987 0, /* todo_flags_start */
2988 TODO_dump_func
2989 | TODO_update_ssa
2990 | TODO_ggc_collect
2991 | TODO_verify_ssa /* todo_flags_finish */
2992 }
2993 };
2994
2995 struct gimple_opt_pass pass_sra =
2996 {
2997 {
2998 GIMPLE_PASS,
2999 "sra", /* name */
3000 gate_intra_sra, /* gate */
3001 late_intra_sra, /* execute */
3002 NULL, /* sub */
3003 NULL, /* next */
3004 0, /* static_pass_number */
3005 TV_TREE_SRA, /* tv_id */
3006 PROP_cfg | PROP_ssa, /* properties_required */
3007 0, /* properties_provided */
3008 0, /* properties_destroyed */
3009 TODO_update_address_taken, /* todo_flags_start */
3010 TODO_dump_func
3011 | TODO_update_ssa
3012 | TODO_ggc_collect
3013 | TODO_verify_ssa /* todo_flags_finish */
3014 }
3015 };
3016
3017
3018 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3019 parameter. */
3020
3021 static bool
3022 is_unused_scalar_param (tree parm)
3023 {
3024 tree name;
3025 return (is_gimple_reg (parm)
3026 && (!(name = gimple_default_def (cfun, parm))
3027 || has_zero_uses (name)));
3028 }
3029
3030 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3031 examine whether there are any direct or otherwise infeasible ones. If so,
3032 return true, otherwise return false. PARM must be a gimple register with a
3033 non-NULL default definition. */
3034
3035 static bool
3036 ptr_parm_has_direct_uses (tree parm)
3037 {
3038 imm_use_iterator ui;
3039 gimple stmt;
3040 tree name = gimple_default_def (cfun, parm);
3041 bool ret = false;
3042
3043 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3044 {
3045 int uses_ok = 0;
3046 use_operand_p use_p;
3047
3048 if (is_gimple_debug (stmt))
3049 continue;
3050
3051 /* Valid uses include dereferences on the lhs and the rhs. */
3052 if (gimple_has_lhs (stmt))
3053 {
3054 tree lhs = gimple_get_lhs (stmt);
3055 while (handled_component_p (lhs))
3056 lhs = TREE_OPERAND (lhs, 0);
3057 if (TREE_CODE (lhs) == MEM_REF
3058 && TREE_OPERAND (lhs, 0) == name
3059 && integer_zerop (TREE_OPERAND (lhs, 1))
3060 && types_compatible_p (TREE_TYPE (lhs),
3061 TREE_TYPE (TREE_TYPE (name))))
3062 uses_ok++;
3063 }
3064 if (gimple_assign_single_p (stmt))
3065 {
3066 tree rhs = gimple_assign_rhs1 (stmt);
3067 while (handled_component_p (rhs))
3068 rhs = TREE_OPERAND (rhs, 0);
3069 if (TREE_CODE (rhs) == MEM_REF
3070 && TREE_OPERAND (rhs, 0) == name
3071 && integer_zerop (TREE_OPERAND (rhs, 1))
3072 && types_compatible_p (TREE_TYPE (rhs),
3073 TREE_TYPE (TREE_TYPE (name))))
3074 uses_ok++;
3075 }
3076 else if (is_gimple_call (stmt))
3077 {
3078 unsigned i;
3079 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3080 {
3081 tree arg = gimple_call_arg (stmt, i);
3082 while (handled_component_p (arg))
3083 arg = TREE_OPERAND (arg, 0);
3084 if (TREE_CODE (arg) == MEM_REF
3085 && TREE_OPERAND (arg, 0) == name
3086 && integer_zerop (TREE_OPERAND (arg, 1))
3087 && types_compatible_p (TREE_TYPE (arg),
3088 TREE_TYPE (TREE_TYPE (name))))
3089 uses_ok++;
3090 }
3091 }
3092
3093 /* If the number of valid uses does not match the number of
3094 uses in this stmt there is an unhandled use. */
3095 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3096 --uses_ok;
3097
3098 if (uses_ok != 0)
3099 ret = true;
3100
3101 if (ret)
3102 BREAK_FROM_IMM_USE_STMT (ui);
3103 }
3104
3105 return ret;
3106 }
3107
3108 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3109 them in candidate_bitmap. Note that these do not necessarily include
3110 parameter which are unused and thus can be removed. Return true iff any
3111 such candidate has been found. */
3112
3113 static bool
3114 find_param_candidates (void)
3115 {
3116 tree parm;
3117 int count = 0;
3118 bool ret = false;
3119
3120 for (parm = DECL_ARGUMENTS (current_function_decl);
3121 parm;
3122 parm = DECL_CHAIN (parm))
3123 {
3124 tree type = TREE_TYPE (parm);
3125
3126 count++;
3127
3128 if (TREE_THIS_VOLATILE (parm)
3129 || TREE_ADDRESSABLE (parm)
3130 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3131 continue;
3132
3133 if (is_unused_scalar_param (parm))
3134 {
3135 ret = true;
3136 continue;
3137 }
3138
3139 if (POINTER_TYPE_P (type))
3140 {
3141 type = TREE_TYPE (type);
3142
3143 if (TREE_CODE (type) == FUNCTION_TYPE
3144 || TYPE_VOLATILE (type)
3145 || (TREE_CODE (type) == ARRAY_TYPE
3146 && TYPE_NONALIASED_COMPONENT (type))
3147 || !is_gimple_reg (parm)
3148 || is_va_list_type (type)
3149 || ptr_parm_has_direct_uses (parm))
3150 continue;
3151 }
3152 else if (!AGGREGATE_TYPE_P (type))
3153 continue;
3154
3155 if (!COMPLETE_TYPE_P (type)
3156 || !host_integerp (TYPE_SIZE (type), 1)
3157 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3158 || (AGGREGATE_TYPE_P (type)
3159 && type_internals_preclude_sra_p (type)))
3160 continue;
3161
3162 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3163 ret = true;
3164 if (dump_file && (dump_flags & TDF_DETAILS))
3165 {
3166 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3167 print_generic_expr (dump_file, parm, 0);
3168 fprintf (dump_file, "\n");
3169 }
3170 }
3171
3172 func_param_count = count;
3173 return ret;
3174 }
3175
3176 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3177 maybe_modified. */
3178
3179 static bool
3180 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3181 void *data)
3182 {
3183 struct access *repr = (struct access *) data;
3184
3185 repr->grp_maybe_modified = 1;
3186 return true;
3187 }
3188
3189 /* Analyze what representatives (in linked lists accessible from
3190 REPRESENTATIVES) can be modified by side effects of statements in the
3191 current function. */
3192
3193 static void
3194 analyze_modified_params (VEC (access_p, heap) *representatives)
3195 {
3196 int i;
3197
3198 for (i = 0; i < func_param_count; i++)
3199 {
3200 struct access *repr;
3201
3202 for (repr = VEC_index (access_p, representatives, i);
3203 repr;
3204 repr = repr->next_grp)
3205 {
3206 struct access *access;
3207 bitmap visited;
3208 ao_ref ar;
3209
3210 if (no_accesses_p (repr))
3211 continue;
3212 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3213 || repr->grp_maybe_modified)
3214 continue;
3215
3216 ao_ref_init (&ar, repr->expr);
3217 visited = BITMAP_ALLOC (NULL);
3218 for (access = repr; access; access = access->next_sibling)
3219 {
3220 /* All accesses are read ones, otherwise grp_maybe_modified would
3221 be trivially set. */
3222 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3223 mark_maybe_modified, repr, &visited);
3224 if (repr->grp_maybe_modified)
3225 break;
3226 }
3227 BITMAP_FREE (visited);
3228 }
3229 }
3230 }
3231
3232 /* Propagate distances in bb_dereferences in the opposite direction than the
3233 control flow edges, in each step storing the maximum of the current value
3234 and the minimum of all successors. These steps are repeated until the table
3235 stabilizes. Note that BBs which might terminate the functions (according to
3236 final_bbs bitmap) never updated in this way. */
3237
3238 static void
3239 propagate_dereference_distances (void)
3240 {
3241 VEC (basic_block, heap) *queue;
3242 basic_block bb;
3243
3244 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3245 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3246 FOR_EACH_BB (bb)
3247 {
3248 VEC_quick_push (basic_block, queue, bb);
3249 bb->aux = bb;
3250 }
3251
3252 while (!VEC_empty (basic_block, queue))
3253 {
3254 edge_iterator ei;
3255 edge e;
3256 bool change = false;
3257 int i;
3258
3259 bb = VEC_pop (basic_block, queue);
3260 bb->aux = NULL;
3261
3262 if (bitmap_bit_p (final_bbs, bb->index))
3263 continue;
3264
3265 for (i = 0; i < func_param_count; i++)
3266 {
3267 int idx = bb->index * func_param_count + i;
3268 bool first = true;
3269 HOST_WIDE_INT inh = 0;
3270
3271 FOR_EACH_EDGE (e, ei, bb->succs)
3272 {
3273 int succ_idx = e->dest->index * func_param_count + i;
3274
3275 if (e->src == EXIT_BLOCK_PTR)
3276 continue;
3277
3278 if (first)
3279 {
3280 first = false;
3281 inh = bb_dereferences [succ_idx];
3282 }
3283 else if (bb_dereferences [succ_idx] < inh)
3284 inh = bb_dereferences [succ_idx];
3285 }
3286
3287 if (!first && bb_dereferences[idx] < inh)
3288 {
3289 bb_dereferences[idx] = inh;
3290 change = true;
3291 }
3292 }
3293
3294 if (change && !bitmap_bit_p (final_bbs, bb->index))
3295 FOR_EACH_EDGE (e, ei, bb->preds)
3296 {
3297 if (e->src->aux)
3298 continue;
3299
3300 e->src->aux = e->src;
3301 VEC_quick_push (basic_block, queue, e->src);
3302 }
3303 }
3304
3305 VEC_free (basic_block, heap, queue);
3306 }
3307
3308 /* Dump a dereferences TABLE with heading STR to file F. */
3309
3310 static void
3311 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3312 {
3313 basic_block bb;
3314
3315 fprintf (dump_file, str);
3316 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3317 {
3318 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3319 if (bb != EXIT_BLOCK_PTR)
3320 {
3321 int i;
3322 for (i = 0; i < func_param_count; i++)
3323 {
3324 int idx = bb->index * func_param_count + i;
3325 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3326 }
3327 }
3328 fprintf (f, "\n");
3329 }
3330 fprintf (dump_file, "\n");
3331 }
3332
3333 /* Determine what (parts of) parameters passed by reference that are not
3334 assigned to are not certainly dereferenced in this function and thus the
3335 dereferencing cannot be safely moved to the caller without potentially
3336 introducing a segfault. Mark such REPRESENTATIVES as
3337 grp_not_necessarilly_dereferenced.
3338
3339 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3340 part is calculated rather than simple booleans are calculated for each
3341 pointer parameter to handle cases when only a fraction of the whole
3342 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3343 an example).
3344
3345 The maximum dereference distances for each pointer parameter and BB are
3346 already stored in bb_dereference. This routine simply propagates these
3347 values upwards by propagate_dereference_distances and then compares the
3348 distances of individual parameters in the ENTRY BB to the equivalent
3349 distances of each representative of a (fraction of a) parameter. */
3350
3351 static void
3352 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3353 {
3354 int i;
3355
3356 if (dump_file && (dump_flags & TDF_DETAILS))
3357 dump_dereferences_table (dump_file,
3358 "Dereference table before propagation:\n",
3359 bb_dereferences);
3360
3361 propagate_dereference_distances ();
3362
3363 if (dump_file && (dump_flags & TDF_DETAILS))
3364 dump_dereferences_table (dump_file,
3365 "Dereference table after propagation:\n",
3366 bb_dereferences);
3367
3368 for (i = 0; i < func_param_count; i++)
3369 {
3370 struct access *repr = VEC_index (access_p, representatives, i);
3371 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3372
3373 if (!repr || no_accesses_p (repr))
3374 continue;
3375
3376 do
3377 {
3378 if ((repr->offset + repr->size) > bb_dereferences[idx])
3379 repr->grp_not_necessarilly_dereferenced = 1;
3380 repr = repr->next_grp;
3381 }
3382 while (repr);
3383 }
3384 }
3385
3386 /* Return the representative access for the parameter declaration PARM if it is
3387 a scalar passed by reference which is not written to and the pointer value
3388 is not used directly. Thus, if it is legal to dereference it in the caller
3389 and we can rule out modifications through aliases, such parameter should be
3390 turned into one passed by value. Return NULL otherwise. */
3391
3392 static struct access *
3393 unmodified_by_ref_scalar_representative (tree parm)
3394 {
3395 int i, access_count;
3396 struct access *repr;
3397 VEC (access_p, heap) *access_vec;
3398
3399 access_vec = get_base_access_vector (parm);
3400 gcc_assert (access_vec);
3401 repr = VEC_index (access_p, access_vec, 0);
3402 if (repr->write)
3403 return NULL;
3404 repr->group_representative = repr;
3405
3406 access_count = VEC_length (access_p, access_vec);
3407 for (i = 1; i < access_count; i++)
3408 {
3409 struct access *access = VEC_index (access_p, access_vec, i);
3410 if (access->write)
3411 return NULL;
3412 access->group_representative = repr;
3413 access->next_sibling = repr->next_sibling;
3414 repr->next_sibling = access;
3415 }
3416
3417 repr->grp_read = 1;
3418 repr->grp_scalar_ptr = 1;
3419 return repr;
3420 }
3421
3422 /* Return true iff this access precludes IPA-SRA of the parameter it is
3423 associated with. */
3424
3425 static bool
3426 access_precludes_ipa_sra_p (struct access *access)
3427 {
3428 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3429 is incompatible assign in a call statement (and possibly even in asm
3430 statements). This can be relaxed by using a new temporary but only for
3431 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3432 intraprocedural SRA we deal with this by keeping the old aggregate around,
3433 something we cannot do in IPA-SRA.) */
3434 if (access->write
3435 && (is_gimple_call (access->stmt)
3436 || gimple_code (access->stmt) == GIMPLE_ASM))
3437 return true;
3438
3439 return false;
3440 }
3441
3442
3443 /* Sort collected accesses for parameter PARM, identify representatives for
3444 each accessed region and link them together. Return NULL if there are
3445 different but overlapping accesses, return the special ptr value meaning
3446 there are no accesses for this parameter if that is the case and return the
3447 first representative otherwise. Set *RO_GRP if there is a group of accesses
3448 with only read (i.e. no write) accesses. */
3449
3450 static struct access *
3451 splice_param_accesses (tree parm, bool *ro_grp)
3452 {
3453 int i, j, access_count, group_count;
3454 int agg_size, total_size = 0;
3455 struct access *access, *res, **prev_acc_ptr = &res;
3456 VEC (access_p, heap) *access_vec;
3457
3458 access_vec = get_base_access_vector (parm);
3459 if (!access_vec)
3460 return &no_accesses_representant;
3461 access_count = VEC_length (access_p, access_vec);
3462
3463 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
3464 compare_access_positions);
3465
3466 i = 0;
3467 total_size = 0;
3468 group_count = 0;
3469 while (i < access_count)
3470 {
3471 bool modification;
3472 access = VEC_index (access_p, access_vec, i);
3473 modification = access->write;
3474 if (access_precludes_ipa_sra_p (access))
3475 return NULL;
3476
3477 /* Access is about to become group representative unless we find some
3478 nasty overlap which would preclude us from breaking this parameter
3479 apart. */
3480
3481 j = i + 1;
3482 while (j < access_count)
3483 {
3484 struct access *ac2 = VEC_index (access_p, access_vec, j);
3485 if (ac2->offset != access->offset)
3486 {
3487 /* All or nothing law for parameters. */
3488 if (access->offset + access->size > ac2->offset)
3489 return NULL;
3490 else
3491 break;
3492 }
3493 else if (ac2->size != access->size)
3494 return NULL;
3495
3496 if (access_precludes_ipa_sra_p (ac2))
3497 return NULL;
3498
3499 modification |= ac2->write;
3500 ac2->group_representative = access;
3501 ac2->next_sibling = access->next_sibling;
3502 access->next_sibling = ac2;
3503 j++;
3504 }
3505
3506 group_count++;
3507 access->grp_maybe_modified = modification;
3508 if (!modification)
3509 *ro_grp = true;
3510 *prev_acc_ptr = access;
3511 prev_acc_ptr = &access->next_grp;
3512 total_size += access->size;
3513 i = j;
3514 }
3515
3516 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3517 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3518 else
3519 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3520 if (total_size >= agg_size)
3521 return NULL;
3522
3523 gcc_assert (group_count > 0);
3524 return res;
3525 }
3526
3527 /* Decide whether parameters with representative accesses given by REPR should
3528 be reduced into components. */
3529
3530 static int
3531 decide_one_param_reduction (struct access *repr)
3532 {
3533 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3534 bool by_ref;
3535 tree parm;
3536
3537 parm = repr->base;
3538 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3539 gcc_assert (cur_parm_size > 0);
3540
3541 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3542 {
3543 by_ref = true;
3544 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3545 }
3546 else
3547 {
3548 by_ref = false;
3549 agg_size = cur_parm_size;
3550 }
3551
3552 if (dump_file)
3553 {
3554 struct access *acc;
3555 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3556 print_generic_expr (dump_file, parm, 0);
3557 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3558 for (acc = repr; acc; acc = acc->next_grp)
3559 dump_access (dump_file, acc, true);
3560 }
3561
3562 total_size = 0;
3563 new_param_count = 0;
3564
3565 for (; repr; repr = repr->next_grp)
3566 {
3567 gcc_assert (parm == repr->base);
3568 new_param_count++;
3569
3570 if (!by_ref || (!repr->grp_maybe_modified
3571 && !repr->grp_not_necessarilly_dereferenced))
3572 total_size += repr->size;
3573 else
3574 total_size += cur_parm_size;
3575 }
3576
3577 gcc_assert (new_param_count > 0);
3578
3579 if (optimize_function_for_size_p (cfun))
3580 parm_size_limit = cur_parm_size;
3581 else
3582 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3583 * cur_parm_size);
3584
3585 if (total_size < agg_size
3586 && total_size <= parm_size_limit)
3587 {
3588 if (dump_file)
3589 fprintf (dump_file, " ....will be split into %i components\n",
3590 new_param_count);
3591 return new_param_count;
3592 }
3593 else
3594 return 0;
3595 }
3596
3597 /* The order of the following enums is important, we need to do extra work for
3598 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3599 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3600 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3601
3602 /* Identify representatives of all accesses to all candidate parameters for
3603 IPA-SRA. Return result based on what representatives have been found. */
3604
3605 static enum ipa_splicing_result
3606 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3607 {
3608 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3609 tree parm;
3610 struct access *repr;
3611
3612 *representatives = VEC_alloc (access_p, heap, func_param_count);
3613
3614 for (parm = DECL_ARGUMENTS (current_function_decl);
3615 parm;
3616 parm = DECL_CHAIN (parm))
3617 {
3618 if (is_unused_scalar_param (parm))
3619 {
3620 VEC_quick_push (access_p, *representatives,
3621 &no_accesses_representant);
3622 if (result == NO_GOOD_ACCESS)
3623 result = UNUSED_PARAMS;
3624 }
3625 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3626 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3627 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3628 {
3629 repr = unmodified_by_ref_scalar_representative (parm);
3630 VEC_quick_push (access_p, *representatives, repr);
3631 if (repr)
3632 result = UNMODIF_BY_REF_ACCESSES;
3633 }
3634 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3635 {
3636 bool ro_grp = false;
3637 repr = splice_param_accesses (parm, &ro_grp);
3638 VEC_quick_push (access_p, *representatives, repr);
3639
3640 if (repr && !no_accesses_p (repr))
3641 {
3642 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3643 {
3644 if (ro_grp)
3645 result = UNMODIF_BY_REF_ACCESSES;
3646 else if (result < MODIF_BY_REF_ACCESSES)
3647 result = MODIF_BY_REF_ACCESSES;
3648 }
3649 else if (result < BY_VAL_ACCESSES)
3650 result = BY_VAL_ACCESSES;
3651 }
3652 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3653 result = UNUSED_PARAMS;
3654 }
3655 else
3656 VEC_quick_push (access_p, *representatives, NULL);
3657 }
3658
3659 if (result == NO_GOOD_ACCESS)
3660 {
3661 VEC_free (access_p, heap, *representatives);
3662 *representatives = NULL;
3663 return NO_GOOD_ACCESS;
3664 }
3665
3666 return result;
3667 }
3668
3669 /* Return the index of BASE in PARMS. Abort if it is not found. */
3670
3671 static inline int
3672 get_param_index (tree base, VEC(tree, heap) *parms)
3673 {
3674 int i, len;
3675
3676 len = VEC_length (tree, parms);
3677 for (i = 0; i < len; i++)
3678 if (VEC_index (tree, parms, i) == base)
3679 return i;
3680 gcc_unreachable ();
3681 }
3682
3683 /* Convert the decisions made at the representative level into compact
3684 parameter adjustments. REPRESENTATIVES are pointers to first
3685 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3686 final number of adjustments. */
3687
3688 static ipa_parm_adjustment_vec
3689 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3690 int adjustments_count)
3691 {
3692 VEC (tree, heap) *parms;
3693 ipa_parm_adjustment_vec adjustments;
3694 tree parm;
3695 int i;
3696
3697 gcc_assert (adjustments_count > 0);
3698 parms = ipa_get_vector_of_formal_parms (current_function_decl);
3699 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3700 parm = DECL_ARGUMENTS (current_function_decl);
3701 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
3702 {
3703 struct access *repr = VEC_index (access_p, representatives, i);
3704
3705 if (!repr || no_accesses_p (repr))
3706 {
3707 struct ipa_parm_adjustment *adj;
3708
3709 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3710 memset (adj, 0, sizeof (*adj));
3711 adj->base_index = get_param_index (parm, parms);
3712 adj->base = parm;
3713 if (!repr)
3714 adj->copy_param = 1;
3715 else
3716 adj->remove_param = 1;
3717 }
3718 else
3719 {
3720 struct ipa_parm_adjustment *adj;
3721 int index = get_param_index (parm, parms);
3722
3723 for (; repr; repr = repr->next_grp)
3724 {
3725 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3726 memset (adj, 0, sizeof (*adj));
3727 gcc_assert (repr->base == parm);
3728 adj->base_index = index;
3729 adj->base = repr->base;
3730 adj->type = repr->type;
3731 adj->offset = repr->offset;
3732 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3733 && (repr->grp_maybe_modified
3734 || repr->grp_not_necessarilly_dereferenced));
3735
3736 }
3737 }
3738 }
3739 VEC_free (tree, heap, parms);
3740 return adjustments;
3741 }
3742
3743 /* Analyze the collected accesses and produce a plan what to do with the
3744 parameters in the form of adjustments, NULL meaning nothing. */
3745
3746 static ipa_parm_adjustment_vec
3747 analyze_all_param_acesses (void)
3748 {
3749 enum ipa_splicing_result repr_state;
3750 bool proceed = false;
3751 int i, adjustments_count = 0;
3752 VEC (access_p, heap) *representatives;
3753 ipa_parm_adjustment_vec adjustments;
3754
3755 repr_state = splice_all_param_accesses (&representatives);
3756 if (repr_state == NO_GOOD_ACCESS)
3757 return NULL;
3758
3759 /* If there are any parameters passed by reference which are not modified
3760 directly, we need to check whether they can be modified indirectly. */
3761 if (repr_state == UNMODIF_BY_REF_ACCESSES)
3762 {
3763 analyze_caller_dereference_legality (representatives);
3764 analyze_modified_params (representatives);
3765 }
3766
3767 for (i = 0; i < func_param_count; i++)
3768 {
3769 struct access *repr = VEC_index (access_p, representatives, i);
3770
3771 if (repr && !no_accesses_p (repr))
3772 {
3773 if (repr->grp_scalar_ptr)
3774 {
3775 adjustments_count++;
3776 if (repr->grp_not_necessarilly_dereferenced
3777 || repr->grp_maybe_modified)
3778 VEC_replace (access_p, representatives, i, NULL);
3779 else
3780 {
3781 proceed = true;
3782 sra_stats.scalar_by_ref_to_by_val++;
3783 }
3784 }
3785 else
3786 {
3787 int new_components = decide_one_param_reduction (repr);
3788
3789 if (new_components == 0)
3790 {
3791 VEC_replace (access_p, representatives, i, NULL);
3792 adjustments_count++;
3793 }
3794 else
3795 {
3796 adjustments_count += new_components;
3797 sra_stats.aggregate_params_reduced++;
3798 sra_stats.param_reductions_created += new_components;
3799 proceed = true;
3800 }
3801 }
3802 }
3803 else
3804 {
3805 if (no_accesses_p (repr))
3806 {
3807 proceed = true;
3808 sra_stats.deleted_unused_parameters++;
3809 }
3810 adjustments_count++;
3811 }
3812 }
3813
3814 if (!proceed && dump_file)
3815 fprintf (dump_file, "NOT proceeding to change params.\n");
3816
3817 if (proceed)
3818 adjustments = turn_representatives_into_adjustments (representatives,
3819 adjustments_count);
3820 else
3821 adjustments = NULL;
3822
3823 VEC_free (access_p, heap, representatives);
3824 return adjustments;
3825 }
3826
3827 /* If a parameter replacement identified by ADJ does not yet exist in the form
3828 of declaration, create it and record it, otherwise return the previously
3829 created one. */
3830
3831 static tree
3832 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
3833 {
3834 tree repl;
3835 if (!adj->new_ssa_base)
3836 {
3837 char *pretty_name = make_fancy_name (adj->base);
3838
3839 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
3840 DECL_NAME (repl) = get_identifier (pretty_name);
3841 obstack_free (&name_obstack, pretty_name);
3842
3843 get_var_ann (repl);
3844 add_referenced_var (repl);
3845 adj->new_ssa_base = repl;
3846 }
3847 else
3848 repl = adj->new_ssa_base;
3849 return repl;
3850 }
3851
3852 /* Find the first adjustment for a particular parameter BASE in a vector of
3853 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
3854 adjustment. */
3855
3856 static struct ipa_parm_adjustment *
3857 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
3858 {
3859 int i, len;
3860
3861 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3862 for (i = 0; i < len; i++)
3863 {
3864 struct ipa_parm_adjustment *adj;
3865
3866 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3867 if (!adj->copy_param && adj->base == base)
3868 return adj;
3869 }
3870
3871 return NULL;
3872 }
3873
3874 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
3875 removed because its value is not used, replace the SSA_NAME with a one
3876 relating to a created VAR_DECL together all of its uses and return true.
3877 ADJUSTMENTS is a pointer to an adjustments vector. */
3878
3879 static bool
3880 replace_removed_params_ssa_names (gimple stmt,
3881 ipa_parm_adjustment_vec adjustments)
3882 {
3883 struct ipa_parm_adjustment *adj;
3884 tree lhs, decl, repl, name;
3885
3886 if (gimple_code (stmt) == GIMPLE_PHI)
3887 lhs = gimple_phi_result (stmt);
3888 else if (is_gimple_assign (stmt))
3889 lhs = gimple_assign_lhs (stmt);
3890 else if (is_gimple_call (stmt))
3891 lhs = gimple_call_lhs (stmt);
3892 else
3893 gcc_unreachable ();
3894
3895 if (TREE_CODE (lhs) != SSA_NAME)
3896 return false;
3897 decl = SSA_NAME_VAR (lhs);
3898 if (TREE_CODE (decl) != PARM_DECL)
3899 return false;
3900
3901 adj = get_adjustment_for_base (adjustments, decl);
3902 if (!adj)
3903 return false;
3904
3905 repl = get_replaced_param_substitute (adj);
3906 name = make_ssa_name (repl, stmt);
3907
3908 if (dump_file)
3909 {
3910 fprintf (dump_file, "replacing an SSA name of a removed param ");
3911 print_generic_expr (dump_file, lhs, 0);
3912 fprintf (dump_file, " with ");
3913 print_generic_expr (dump_file, name, 0);
3914 fprintf (dump_file, "\n");
3915 }
3916
3917 if (is_gimple_assign (stmt))
3918 gimple_assign_set_lhs (stmt, name);
3919 else if (is_gimple_call (stmt))
3920 gimple_call_set_lhs (stmt, name);
3921 else
3922 gimple_phi_set_result (stmt, name);
3923
3924 replace_uses_by (lhs, name);
3925 release_ssa_name (lhs);
3926 return true;
3927 }
3928
3929 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3930 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3931 specifies whether the function should care about type incompatibility the
3932 current and new expressions. If it is false, the function will leave
3933 incompatibility issues to the caller. Return true iff the expression
3934 was modified. */
3935
3936 static bool
3937 sra_ipa_modify_expr (tree *expr, bool convert,
3938 ipa_parm_adjustment_vec adjustments)
3939 {
3940 int i, len;
3941 struct ipa_parm_adjustment *adj, *cand = NULL;
3942 HOST_WIDE_INT offset, size, max_size;
3943 tree base, src;
3944
3945 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3946
3947 if (TREE_CODE (*expr) == BIT_FIELD_REF
3948 || TREE_CODE (*expr) == IMAGPART_EXPR
3949 || TREE_CODE (*expr) == REALPART_EXPR)
3950 {
3951 expr = &TREE_OPERAND (*expr, 0);
3952 convert = true;
3953 }
3954
3955 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
3956 if (!base || size == -1 || max_size == -1)
3957 return false;
3958
3959 if (TREE_CODE (base) == MEM_REF)
3960 {
3961 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
3962 base = TREE_OPERAND (base, 0);
3963 }
3964
3965 base = get_ssa_base_param (base);
3966 if (!base || TREE_CODE (base) != PARM_DECL)
3967 return false;
3968
3969 for (i = 0; i < len; i++)
3970 {
3971 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3972
3973 if (adj->base == base &&
3974 (adj->offset == offset || adj->remove_param))
3975 {
3976 cand = adj;
3977 break;
3978 }
3979 }
3980 if (!cand || cand->copy_param || cand->remove_param)
3981 return false;
3982
3983 if (cand->by_ref)
3984 src = build_simple_mem_ref (cand->reduction);
3985 else
3986 src = cand->reduction;
3987
3988 if (dump_file && (dump_flags & TDF_DETAILS))
3989 {
3990 fprintf (dump_file, "About to replace expr ");
3991 print_generic_expr (dump_file, *expr, 0);
3992 fprintf (dump_file, " with ");
3993 print_generic_expr (dump_file, src, 0);
3994 fprintf (dump_file, "\n");
3995 }
3996
3997 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3998 {
3999 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4000 *expr = vce;
4001 }
4002 else
4003 *expr = src;
4004 return true;
4005 }
4006
4007 /* If the statement pointed to by STMT_PTR contains any expressions that need
4008 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4009 potential type incompatibilities (GSI is used to accommodate conversion
4010 statements and must point to the statement). Return true iff the statement
4011 was modified. */
4012
4013 static bool
4014 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4015 ipa_parm_adjustment_vec adjustments)
4016 {
4017 gimple stmt = *stmt_ptr;
4018 tree *lhs_p, *rhs_p;
4019 bool any;
4020
4021 if (!gimple_assign_single_p (stmt))
4022 return false;
4023
4024 rhs_p = gimple_assign_rhs1_ptr (stmt);
4025 lhs_p = gimple_assign_lhs_ptr (stmt);
4026
4027 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4028 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4029 if (any)
4030 {
4031 tree new_rhs = NULL_TREE;
4032
4033 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4034 {
4035 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4036 {
4037 /* V_C_Es of constructors can cause trouble (PR 42714). */
4038 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4039 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
4040 else
4041 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4042 }
4043 else
4044 new_rhs = fold_build1_loc (gimple_location (stmt),
4045 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4046 *rhs_p);
4047 }
4048 else if (REFERENCE_CLASS_P (*rhs_p)
4049 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4050 && !is_gimple_reg (*lhs_p))
4051 /* This can happen when an assignment in between two single field
4052 structures is turned into an assignment in between two pointers to
4053 scalars (PR 42237). */
4054 new_rhs = *rhs_p;
4055
4056 if (new_rhs)
4057 {
4058 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4059 true, GSI_SAME_STMT);
4060
4061 gimple_assign_set_rhs_from_tree (gsi, tmp);
4062 }
4063
4064 return true;
4065 }
4066
4067 return false;
4068 }
4069
4070 /* Traverse the function body and all modifications as described in
4071 ADJUSTMENTS. Return true iff the CFG has been changed. */
4072
4073 static bool
4074 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4075 {
4076 bool cfg_changed = false;
4077 basic_block bb;
4078
4079 FOR_EACH_BB (bb)
4080 {
4081 gimple_stmt_iterator gsi;
4082
4083 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4084 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4085
4086 gsi = gsi_start_bb (bb);
4087 while (!gsi_end_p (gsi))
4088 {
4089 gimple stmt = gsi_stmt (gsi);
4090 bool modified = false;
4091 tree *t;
4092 unsigned i;
4093
4094 switch (gimple_code (stmt))
4095 {
4096 case GIMPLE_RETURN:
4097 t = gimple_return_retval_ptr (stmt);
4098 if (*t != NULL_TREE)
4099 modified |= sra_ipa_modify_expr (t, true, adjustments);
4100 break;
4101
4102 case GIMPLE_ASSIGN:
4103 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4104 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4105 break;
4106
4107 case GIMPLE_CALL:
4108 /* Operands must be processed before the lhs. */
4109 for (i = 0; i < gimple_call_num_args (stmt); i++)
4110 {
4111 t = gimple_call_arg_ptr (stmt, i);
4112 modified |= sra_ipa_modify_expr (t, true, adjustments);
4113 }
4114
4115 if (gimple_call_lhs (stmt))
4116 {
4117 t = gimple_call_lhs_ptr (stmt);
4118 modified |= sra_ipa_modify_expr (t, false, adjustments);
4119 modified |= replace_removed_params_ssa_names (stmt,
4120 adjustments);
4121 }
4122 break;
4123
4124 case GIMPLE_ASM:
4125 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4126 {
4127 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4128 modified |= sra_ipa_modify_expr (t, true, adjustments);
4129 }
4130 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4131 {
4132 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4133 modified |= sra_ipa_modify_expr (t, false, adjustments);
4134 }
4135 break;
4136
4137 default:
4138 break;
4139 }
4140
4141 if (modified)
4142 {
4143 update_stmt (stmt);
4144 if (maybe_clean_eh_stmt (stmt)
4145 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4146 cfg_changed = true;
4147 }
4148 gsi_next (&gsi);
4149 }
4150 }
4151
4152 return cfg_changed;
4153 }
4154
4155 /* Call gimple_debug_bind_reset_value on all debug statements describing
4156 gimple register parameters that are being removed or replaced. */
4157
4158 static void
4159 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4160 {
4161 int i, len;
4162
4163 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4164 for (i = 0; i < len; i++)
4165 {
4166 struct ipa_parm_adjustment *adj;
4167 imm_use_iterator ui;
4168 gimple stmt;
4169 tree name;
4170
4171 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4172 if (adj->copy_param || !is_gimple_reg (adj->base))
4173 continue;
4174 name = gimple_default_def (cfun, adj->base);
4175 if (!name)
4176 continue;
4177 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4178 {
4179 /* All other users must have been removed by
4180 ipa_sra_modify_function_body. */
4181 gcc_assert (is_gimple_debug (stmt));
4182 gimple_debug_bind_reset_value (stmt);
4183 update_stmt (stmt);
4184 }
4185 }
4186 }
4187
4188 /* Return true iff all callers have at least as many actual arguments as there
4189 are formal parameters in the current function. */
4190
4191 static bool
4192 all_callers_have_enough_arguments_p (struct cgraph_node *node)
4193 {
4194 struct cgraph_edge *cs;
4195 for (cs = node->callers; cs; cs = cs->next_caller)
4196 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4197 return false;
4198
4199 return true;
4200 }
4201
4202
4203 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4204
4205 static void
4206 convert_callers (struct cgraph_node *node, tree old_decl,
4207 ipa_parm_adjustment_vec adjustments)
4208 {
4209 tree old_cur_fndecl = current_function_decl;
4210 struct cgraph_edge *cs;
4211 basic_block this_block;
4212 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4213
4214 for (cs = node->callers; cs; cs = cs->next_caller)
4215 {
4216 current_function_decl = cs->caller->decl;
4217 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4218
4219 if (dump_file)
4220 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4221 cs->caller->uid, cs->callee->uid,
4222 cgraph_node_name (cs->caller),
4223 cgraph_node_name (cs->callee));
4224
4225 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4226
4227 pop_cfun ();
4228 }
4229
4230 for (cs = node->callers; cs; cs = cs->next_caller)
4231 if (bitmap_set_bit (recomputed_callers, cs->caller->uid))
4232 compute_inline_parameters (cs->caller);
4233 BITMAP_FREE (recomputed_callers);
4234
4235 current_function_decl = old_cur_fndecl;
4236
4237 if (!encountered_recursive_call)
4238 return;
4239
4240 FOR_EACH_BB (this_block)
4241 {
4242 gimple_stmt_iterator gsi;
4243
4244 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4245 {
4246 gimple stmt = gsi_stmt (gsi);
4247 tree call_fndecl;
4248 if (gimple_code (stmt) != GIMPLE_CALL)
4249 continue;
4250 call_fndecl = gimple_call_fndecl (stmt);
4251 if (call_fndecl == old_decl)
4252 {
4253 if (dump_file)
4254 fprintf (dump_file, "Adjusting recursive call");
4255 gimple_call_set_fndecl (stmt, node->decl);
4256 ipa_modify_call_arguments (NULL, stmt, adjustments);
4257 }
4258 }
4259 }
4260
4261 return;
4262 }
4263
4264 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4265 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4266
4267 static bool
4268 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4269 {
4270 struct cgraph_node *new_node;
4271 struct cgraph_edge *cs;
4272 bool cfg_changed;
4273 VEC (cgraph_edge_p, heap) * redirect_callers;
4274 int node_callers;
4275
4276 node_callers = 0;
4277 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4278 node_callers++;
4279 redirect_callers = VEC_alloc (cgraph_edge_p, heap, node_callers);
4280 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4281 VEC_quick_push (cgraph_edge_p, redirect_callers, cs);
4282
4283 rebuild_cgraph_edges ();
4284 pop_cfun ();
4285 current_function_decl = NULL_TREE;
4286
4287 new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4288 NULL, NULL, "isra");
4289 current_function_decl = new_node->decl;
4290 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4291
4292 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4293 cfg_changed = ipa_sra_modify_function_body (adjustments);
4294 sra_ipa_reset_debug_stmts (adjustments);
4295 convert_callers (new_node, node->decl, adjustments);
4296 cgraph_make_node_local (new_node);
4297 return cfg_changed;
4298 }
4299
4300 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4301 attributes, return true otherwise. NODE is the cgraph node of the current
4302 function. */
4303
4304 static bool
4305 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4306 {
4307 if (!cgraph_node_can_be_local_p (node))
4308 {
4309 if (dump_file)
4310 fprintf (dump_file, "Function not local to this compilation unit.\n");
4311 return false;
4312 }
4313
4314 if (!tree_versionable_function_p (node->decl))
4315 {
4316 if (dump_file)
4317 fprintf (dump_file, "Function is not versionable.\n");
4318 return false;
4319 }
4320
4321 if (DECL_VIRTUAL_P (current_function_decl))
4322 {
4323 if (dump_file)
4324 fprintf (dump_file, "Function is a virtual method.\n");
4325 return false;
4326 }
4327
4328 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4329 && node->global.size >= MAX_INLINE_INSNS_AUTO)
4330 {
4331 if (dump_file)
4332 fprintf (dump_file, "Function too big to be made truly local.\n");
4333 return false;
4334 }
4335
4336 if (!node->callers)
4337 {
4338 if (dump_file)
4339 fprintf (dump_file,
4340 "Function has no callers in this compilation unit.\n");
4341 return false;
4342 }
4343
4344 if (cfun->stdarg)
4345 {
4346 if (dump_file)
4347 fprintf (dump_file, "Function uses stdarg. \n");
4348 return false;
4349 }
4350
4351 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4352 return false;
4353
4354 return true;
4355 }
4356
4357 /* Perform early interprocedural SRA. */
4358
4359 static unsigned int
4360 ipa_early_sra (void)
4361 {
4362 struct cgraph_node *node = cgraph_node (current_function_decl);
4363 ipa_parm_adjustment_vec adjustments;
4364 int ret = 0;
4365
4366 if (!ipa_sra_preliminary_function_checks (node))
4367 return 0;
4368
4369 sra_initialize ();
4370 sra_mode = SRA_MODE_EARLY_IPA;
4371
4372 if (!find_param_candidates ())
4373 {
4374 if (dump_file)
4375 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4376 goto simple_out;
4377 }
4378
4379 if (!all_callers_have_enough_arguments_p (node))
4380 {
4381 if (dump_file)
4382 fprintf (dump_file, "There are callers with insufficient number of "
4383 "arguments.\n");
4384 goto simple_out;
4385 }
4386
4387 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4388 func_param_count
4389 * last_basic_block_for_function (cfun));
4390 final_bbs = BITMAP_ALLOC (NULL);
4391
4392 scan_function ();
4393 if (encountered_apply_args)
4394 {
4395 if (dump_file)
4396 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4397 goto out;
4398 }
4399
4400 if (encountered_unchangable_recursive_call)
4401 {
4402 if (dump_file)
4403 fprintf (dump_file, "Function calls itself with insufficient "
4404 "number of arguments.\n");
4405 goto out;
4406 }
4407
4408 adjustments = analyze_all_param_acesses ();
4409 if (!adjustments)
4410 goto out;
4411 if (dump_file)
4412 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4413
4414 if (modify_function (node, adjustments))
4415 ret = TODO_update_ssa | TODO_cleanup_cfg;
4416 else
4417 ret = TODO_update_ssa;
4418 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4419
4420 statistics_counter_event (cfun, "Unused parameters deleted",
4421 sra_stats.deleted_unused_parameters);
4422 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4423 sra_stats.scalar_by_ref_to_by_val);
4424 statistics_counter_event (cfun, "Aggregate parameters broken up",
4425 sra_stats.aggregate_params_reduced);
4426 statistics_counter_event (cfun, "Aggregate parameter components created",
4427 sra_stats.param_reductions_created);
4428
4429 out:
4430 BITMAP_FREE (final_bbs);
4431 free (bb_dereferences);
4432 simple_out:
4433 sra_deinitialize ();
4434 return ret;
4435 }
4436
4437 /* Return if early ipa sra shall be performed. */
4438 static bool
4439 ipa_early_sra_gate (void)
4440 {
4441 return flag_ipa_sra && dbg_cnt (eipa_sra);
4442 }
4443
4444 struct gimple_opt_pass pass_early_ipa_sra =
4445 {
4446 {
4447 GIMPLE_PASS,
4448 "eipa_sra", /* name */
4449 ipa_early_sra_gate, /* gate */
4450 ipa_early_sra, /* execute */
4451 NULL, /* sub */
4452 NULL, /* next */
4453 0, /* static_pass_number */
4454 TV_IPA_SRA, /* tv_id */
4455 0, /* properties_required */
4456 0, /* properties_provided */
4457 0, /* properties_destroyed */
4458 0, /* todo_flags_start */
4459 TODO_dump_func | TODO_dump_cgraph /* todo_flags_finish */
4460 }
4461 };
4462
4463