1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
77 #include "alloc-pool.h"
82 #include "tree-flow.h"
83 #include "tree-pass.h"
85 #include "statistics.h"
90 #include "tree-inline.h"
91 #include "gimple-pretty-print.h"
92 #include "ipa-inline.h"
94 /* Enumeration of all aggregate reductions we can do. */
95 enum sra_mode
{ SRA_MODE_EARLY_IPA
, /* early call regularization */
96 SRA_MODE_EARLY_INTRA
, /* early intraprocedural SRA */
97 SRA_MODE_INTRA
}; /* late intraprocedural SRA */
99 /* Global variable describing which aggregate reduction we are performing at
101 static enum sra_mode sra_mode
;
105 /* ACCESS represents each access to an aggregate variable (as a whole or a
106 part). It can also represent a group of accesses that refer to exactly the
107 same fragment of an aggregate (i.e. those that have exactly the same offset
108 and size). Such representatives for a single aggregate, once determined,
109 are linked in a linked list and have the group fields set.
111 Moreover, when doing intraprocedural SRA, a tree is built from those
112 representatives (by the means of first_child and next_sibling pointers), in
113 which all items in a subtree are "within" the root, i.e. their offset is
114 greater or equal to offset of the root and offset+size is smaller or equal
115 to offset+size of the root. Children of an access are sorted by offset.
117 Note that accesses to parts of vector and complex number types always
118 represented by an access to the whole complex number or a vector. It is a
119 duty of the modifying functions to replace them appropriately. */
123 /* Values returned by `get_ref_base_and_extent' for each component reference
124 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
125 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
126 HOST_WIDE_INT offset
;
130 /* Expression. It is context dependent so do not use it to create new
131 expressions to access the original aggregate. See PR 42154 for a
137 /* The statement this access belongs to. */
140 /* Next group representative for this aggregate. */
141 struct access
*next_grp
;
143 /* Pointer to the group representative. Pointer to itself if the struct is
144 the representative. */
145 struct access
*group_representative
;
147 /* If this access has any children (in terms of the definition above), this
148 points to the first one. */
149 struct access
*first_child
;
151 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
152 described above. In IPA-SRA this is a pointer to the next access
153 belonging to the same group (having the same representative). */
154 struct access
*next_sibling
;
156 /* Pointers to the first and last element in the linked list of assign
158 struct assign_link
*first_link
, *last_link
;
160 /* Pointer to the next access in the work queue. */
161 struct access
*next_queued
;
163 /* Replacement variable for this access "region." Never to be accessed
164 directly, always only by the means of get_access_replacement() and only
165 when grp_to_be_replaced flag is set. */
166 tree replacement_decl
;
168 /* Is this particular access write access? */
171 /* Is this access an access to a non-addressable field? */
172 unsigned non_addressable
: 1;
174 /* Is this access currently in the work queue? */
175 unsigned grp_queued
: 1;
177 /* Does this group contain a write access? This flag is propagated down the
179 unsigned grp_write
: 1;
181 /* Does this group contain a read access? This flag is propagated down the
183 unsigned grp_read
: 1;
185 /* Does this group contain a read access that comes from an assignment
186 statement? This flag is propagated down the access tree. */
187 unsigned grp_assignment_read
: 1;
189 /* Does this group contain a write access that comes from an assignment
190 statement? This flag is propagated down the access tree. */
191 unsigned grp_assignment_write
: 1;
193 /* Does this group contain a read access through a scalar type? This flag is
194 not propagated in the access tree in any direction. */
195 unsigned grp_scalar_read
: 1;
197 /* Does this group contain a write access through a scalar type? This flag
198 is not propagated in the access tree in any direction. */
199 unsigned grp_scalar_write
: 1;
201 /* Is this access an artificial one created to scalarize some record
203 unsigned grp_total_scalarization
: 1;
205 /* Other passes of the analysis use this bit to make function
206 analyze_access_subtree create scalar replacements for this group if
208 unsigned grp_hint
: 1;
210 /* Is the subtree rooted in this access fully covered by scalar
212 unsigned grp_covered
: 1;
214 /* If set to true, this access and all below it in an access tree must not be
216 unsigned grp_unscalarizable_region
: 1;
218 /* Whether data have been written to parts of the aggregate covered by this
219 access which is not to be scalarized. This flag is propagated up in the
221 unsigned grp_unscalarized_data
: 1;
223 /* Does this access and/or group contain a write access through a
225 unsigned grp_partial_lhs
: 1;
227 /* Set when a scalar replacement should be created for this variable. */
228 unsigned grp_to_be_replaced
: 1;
230 /* Set when we want a replacement for the sole purpose of having it in
231 generated debug statements. */
232 unsigned grp_to_be_debug_replaced
: 1;
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning
: 1;
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified
: 1;
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr
: 1;
246 /* Set when we discover that this pointer is not safe to dereference in the
248 unsigned grp_not_necessarilly_dereferenced
: 1;
251 typedef struct access
*access_p
;
254 /* Alloc pool for allocating access structures. */
255 static alloc_pool access_pool
;
257 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
258 are used to propagate subaccesses from rhs to lhs as long as they don't
259 conflict with what is already there. */
262 struct access
*lacc
, *racc
;
263 struct assign_link
*next
;
266 /* Alloc pool for allocating assign link structures. */
267 static alloc_pool link_pool
;
269 /* Base (tree) -> Vector (vec<access_p> *) map. */
270 static struct pointer_map_t
*base_access_vec
;
272 /* Set of candidates. */
273 static bitmap candidate_bitmap
;
274 static htab_t candidates
;
276 /* For a candidate UID return the candidates decl. */
279 candidate (unsigned uid
)
281 struct tree_decl_minimal t
;
283 return (tree
) htab_find_with_hash (candidates
, &t
, uid
);
286 /* Bitmap of candidates which we should try to entirely scalarize away and
287 those which cannot be (because they are and need be used as a whole). */
288 static bitmap should_scalarize_away_bitmap
, cannot_scalarize_away_bitmap
;
290 /* Obstack for creation of fancy names. */
291 static struct obstack name_obstack
;
293 /* Head of a linked list of accesses that need to have its subaccesses
294 propagated to their assignment counterparts. */
295 static struct access
*work_queue_head
;
297 /* Number of parameters of the analyzed function when doing early ipa SRA. */
298 static int func_param_count
;
300 /* scan_function sets the following to true if it encounters a call to
301 __builtin_apply_args. */
302 static bool encountered_apply_args
;
304 /* Set by scan_function when it finds a recursive call. */
305 static bool encountered_recursive_call
;
307 /* Set by scan_function when it finds a recursive call with less actual
308 arguments than formal parameters.. */
309 static bool encountered_unchangable_recursive_call
;
311 /* This is a table in which for each basic block and parameter there is a
312 distance (offset + size) in that parameter which is dereferenced and
313 accessed in that BB. */
314 static HOST_WIDE_INT
*bb_dereferences
;
315 /* Bitmap of BBs that can cause the function to "stop" progressing by
316 returning, throwing externally, looping infinitely or calling a function
317 which might abort etc.. */
318 static bitmap final_bbs
;
320 /* Representative of no accesses at all. */
321 static struct access no_accesses_representant
;
323 /* Predicate to test the special value. */
326 no_accesses_p (struct access
*access
)
328 return access
== &no_accesses_representant
;
331 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
332 representative fields are dumped, otherwise those which only describe the
333 individual access are. */
337 /* Number of processed aggregates is readily available in
338 analyze_all_variable_accesses and so is not stored here. */
340 /* Number of created scalar replacements. */
343 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
347 /* Number of statements created by generate_subtree_copies. */
350 /* Number of statements created by load_assign_lhs_subreplacements. */
353 /* Number of times sra_modify_assign has deleted a statement. */
356 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
357 RHS reparately due to type conversions or nonexistent matching
359 int separate_lhs_rhs_handling
;
361 /* Number of parameters that were removed because they were unused. */
362 int deleted_unused_parameters
;
364 /* Number of scalars passed as parameters by reference that have been
365 converted to be passed by value. */
366 int scalar_by_ref_to_by_val
;
368 /* Number of aggregate parameters that were replaced by one or more of their
370 int aggregate_params_reduced
;
372 /* Numbber of components created when splitting aggregate parameters. */
373 int param_reductions_created
;
377 dump_access (FILE *f
, struct access
*access
, bool grp
)
379 fprintf (f
, "access { ");
380 fprintf (f
, "base = (%d)'", DECL_UID (access
->base
));
381 print_generic_expr (f
, access
->base
, 0);
382 fprintf (f
, "', offset = " HOST_WIDE_INT_PRINT_DEC
, access
->offset
);
383 fprintf (f
, ", size = " HOST_WIDE_INT_PRINT_DEC
, access
->size
);
384 fprintf (f
, ", expr = ");
385 print_generic_expr (f
, access
->expr
, 0);
386 fprintf (f
, ", type = ");
387 print_generic_expr (f
, access
->type
, 0);
389 fprintf (f
, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
390 "grp_assignment_write = %d, grp_scalar_read = %d, "
391 "grp_scalar_write = %d, grp_total_scalarization = %d, "
392 "grp_hint = %d, grp_covered = %d, "
393 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
394 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
395 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
396 "grp_not_necessarilly_dereferenced = %d\n",
397 access
->grp_read
, access
->grp_write
, access
->grp_assignment_read
,
398 access
->grp_assignment_write
, access
->grp_scalar_read
,
399 access
->grp_scalar_write
, access
->grp_total_scalarization
,
400 access
->grp_hint
, access
->grp_covered
,
401 access
->grp_unscalarizable_region
, access
->grp_unscalarized_data
,
402 access
->grp_partial_lhs
, access
->grp_to_be_replaced
,
403 access
->grp_to_be_debug_replaced
, access
->grp_maybe_modified
,
404 access
->grp_not_necessarilly_dereferenced
);
406 fprintf (f
, ", write = %d, grp_total_scalarization = %d, "
407 "grp_partial_lhs = %d\n",
408 access
->write
, access
->grp_total_scalarization
,
409 access
->grp_partial_lhs
);
412 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
415 dump_access_tree_1 (FILE *f
, struct access
*access
, int level
)
421 for (i
= 0; i
< level
; i
++)
422 fputs ("* ", dump_file
);
424 dump_access (f
, access
, true);
426 if (access
->first_child
)
427 dump_access_tree_1 (f
, access
->first_child
, level
+ 1);
429 access
= access
->next_sibling
;
434 /* Dump all access trees for a variable, given the pointer to the first root in
438 dump_access_tree (FILE *f
, struct access
*access
)
440 for (; access
; access
= access
->next_grp
)
441 dump_access_tree_1 (f
, access
, 0);
444 /* Return true iff ACC is non-NULL and has subaccesses. */
447 access_has_children_p (struct access
*acc
)
449 return acc
&& acc
->first_child
;
452 /* Return true iff ACC is (partly) covered by at least one replacement. */
455 access_has_replacements_p (struct access
*acc
)
457 struct access
*child
;
458 if (acc
->grp_to_be_replaced
)
460 for (child
= acc
->first_child
; child
; child
= child
->next_sibling
)
461 if (access_has_replacements_p (child
))
466 /* Return a vector of pointers to accesses for the variable given in BASE or
467 NULL if there is none. */
469 static vec
<access_p
> *
470 get_base_access_vector (tree base
)
474 slot
= pointer_map_contains (base_access_vec
, base
);
478 return *(vec
<access_p
> **) slot
;
481 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
482 in ACCESS. Return NULL if it cannot be found. */
484 static struct access
*
485 find_access_in_subtree (struct access
*access
, HOST_WIDE_INT offset
,
488 while (access
&& (access
->offset
!= offset
|| access
->size
!= size
))
490 struct access
*child
= access
->first_child
;
492 while (child
&& (child
->offset
+ child
->size
<= offset
))
493 child
= child
->next_sibling
;
500 /* Return the first group representative for DECL or NULL if none exists. */
502 static struct access
*
503 get_first_repr_for_decl (tree base
)
505 vec
<access_p
> *access_vec
;
507 access_vec
= get_base_access_vector (base
);
511 return (*access_vec
)[0];
514 /* Find an access representative for the variable BASE and given OFFSET and
515 SIZE. Requires that access trees have already been built. Return NULL if
516 it cannot be found. */
518 static struct access
*
519 get_var_base_offset_size_access (tree base
, HOST_WIDE_INT offset
,
522 struct access
*access
;
524 access
= get_first_repr_for_decl (base
);
525 while (access
&& (access
->offset
+ access
->size
<= offset
))
526 access
= access
->next_grp
;
530 return find_access_in_subtree (access
, offset
, size
);
533 /* Add LINK to the linked list of assign links of RACC. */
535 add_link_to_rhs (struct access
*racc
, struct assign_link
*link
)
537 gcc_assert (link
->racc
== racc
);
539 if (!racc
->first_link
)
541 gcc_assert (!racc
->last_link
);
542 racc
->first_link
= link
;
545 racc
->last_link
->next
= link
;
547 racc
->last_link
= link
;
551 /* Move all link structures in their linked list in OLD_RACC to the linked list
554 relink_to_new_repr (struct access
*new_racc
, struct access
*old_racc
)
556 if (!old_racc
->first_link
)
558 gcc_assert (!old_racc
->last_link
);
562 if (new_racc
->first_link
)
564 gcc_assert (!new_racc
->last_link
->next
);
565 gcc_assert (!old_racc
->last_link
|| !old_racc
->last_link
->next
);
567 new_racc
->last_link
->next
= old_racc
->first_link
;
568 new_racc
->last_link
= old_racc
->last_link
;
572 gcc_assert (!new_racc
->last_link
);
574 new_racc
->first_link
= old_racc
->first_link
;
575 new_racc
->last_link
= old_racc
->last_link
;
577 old_racc
->first_link
= old_racc
->last_link
= NULL
;
580 /* Add ACCESS to the work queue (which is actually a stack). */
583 add_access_to_work_queue (struct access
*access
)
585 if (!access
->grp_queued
)
587 gcc_assert (!access
->next_queued
);
588 access
->next_queued
= work_queue_head
;
589 access
->grp_queued
= 1;
590 work_queue_head
= access
;
594 /* Pop an access from the work queue, and return it, assuming there is one. */
596 static struct access
*
597 pop_access_from_work_queue (void)
599 struct access
*access
= work_queue_head
;
601 work_queue_head
= access
->next_queued
;
602 access
->next_queued
= NULL
;
603 access
->grp_queued
= 0;
608 /* Allocate necessary structures. */
611 sra_initialize (void)
613 candidate_bitmap
= BITMAP_ALLOC (NULL
);
614 candidates
= htab_create (vec_safe_length (cfun
->local_decls
) / 2,
615 uid_decl_map_hash
, uid_decl_map_eq
, NULL
);
616 should_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
617 cannot_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
618 gcc_obstack_init (&name_obstack
);
619 access_pool
= create_alloc_pool ("SRA accesses", sizeof (struct access
), 16);
620 link_pool
= create_alloc_pool ("SRA links", sizeof (struct assign_link
), 16);
621 base_access_vec
= pointer_map_create ();
622 memset (&sra_stats
, 0, sizeof (sra_stats
));
623 encountered_apply_args
= false;
624 encountered_recursive_call
= false;
625 encountered_unchangable_recursive_call
= false;
628 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
631 delete_base_accesses (const void *key ATTRIBUTE_UNUSED
, void **value
,
632 void *data ATTRIBUTE_UNUSED
)
634 vec
<access_p
> *access_vec
= (vec
<access_p
> *) *value
;
635 vec_free (access_vec
);
639 /* Deallocate all general structures. */
642 sra_deinitialize (void)
644 BITMAP_FREE (candidate_bitmap
);
645 htab_delete (candidates
);
646 BITMAP_FREE (should_scalarize_away_bitmap
);
647 BITMAP_FREE (cannot_scalarize_away_bitmap
);
648 free_alloc_pool (access_pool
);
649 free_alloc_pool (link_pool
);
650 obstack_free (&name_obstack
, NULL
);
652 pointer_map_traverse (base_access_vec
, delete_base_accesses
, NULL
);
653 pointer_map_destroy (base_access_vec
);
656 /* Remove DECL from candidates for SRA and write REASON to the dump file if
659 disqualify_candidate (tree decl
, const char *reason
)
661 if (bitmap_clear_bit (candidate_bitmap
, DECL_UID (decl
)))
662 htab_clear_slot (candidates
,
663 htab_find_slot_with_hash (candidates
, decl
,
664 DECL_UID (decl
), NO_INSERT
));
666 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
668 fprintf (dump_file
, "! Disqualifying ");
669 print_generic_expr (dump_file
, decl
, 0);
670 fprintf (dump_file
, " - %s\n", reason
);
674 /* Return true iff the type contains a field or an element which does not allow
678 type_internals_preclude_sra_p (tree type
, const char **msg
)
683 switch (TREE_CODE (type
))
687 case QUAL_UNION_TYPE
:
688 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
689 if (TREE_CODE (fld
) == FIELD_DECL
)
691 tree ft
= TREE_TYPE (fld
);
693 if (TREE_THIS_VOLATILE (fld
))
695 *msg
= "volatile structure field";
698 if (!DECL_FIELD_OFFSET (fld
))
700 *msg
= "no structure field offset";
703 if (!DECL_SIZE (fld
))
705 *msg
= "zero structure field size";
708 if (!host_integerp (DECL_FIELD_OFFSET (fld
), 1))
710 *msg
= "structure field offset not fixed";
713 if (!host_integerp (DECL_SIZE (fld
), 1))
715 *msg
= "structure field size not fixed";
718 if (!host_integerp (bit_position (fld
), 0))
720 *msg
= "structure field size too big";
723 if (AGGREGATE_TYPE_P (ft
)
724 && int_bit_position (fld
) % BITS_PER_UNIT
!= 0)
726 *msg
= "structure field is bit field";
730 if (AGGREGATE_TYPE_P (ft
) && type_internals_preclude_sra_p (ft
, msg
))
737 et
= TREE_TYPE (type
);
739 if (TYPE_VOLATILE (et
))
741 *msg
= "element type is volatile";
745 if (AGGREGATE_TYPE_P (et
) && type_internals_preclude_sra_p (et
, msg
))
755 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
756 base variable if it is. Return T if it is not an SSA_NAME. */
759 get_ssa_base_param (tree t
)
761 if (TREE_CODE (t
) == SSA_NAME
)
763 if (SSA_NAME_IS_DEFAULT_DEF (t
))
764 return SSA_NAME_VAR (t
);
771 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
772 belongs to, unless the BB has already been marked as a potentially
776 mark_parm_dereference (tree base
, HOST_WIDE_INT dist
, gimple stmt
)
778 basic_block bb
= gimple_bb (stmt
);
779 int idx
, parm_index
= 0;
782 if (bitmap_bit_p (final_bbs
, bb
->index
))
785 for (parm
= DECL_ARGUMENTS (current_function_decl
);
786 parm
&& parm
!= base
;
787 parm
= DECL_CHAIN (parm
))
790 gcc_assert (parm_index
< func_param_count
);
792 idx
= bb
->index
* func_param_count
+ parm_index
;
793 if (bb_dereferences
[idx
] < dist
)
794 bb_dereferences
[idx
] = dist
;
797 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
798 the three fields. Also add it to the vector of accesses corresponding to
799 the base. Finally, return the new access. */
801 static struct access
*
802 create_access_1 (tree base
, HOST_WIDE_INT offset
, HOST_WIDE_INT size
)
805 struct access
*access
;
808 access
= (struct access
*) pool_alloc (access_pool
);
809 memset (access
, 0, sizeof (struct access
));
811 access
->offset
= offset
;
814 slot
= pointer_map_contains (base_access_vec
, base
);
816 v
= (vec
<access_p
> *) *slot
;
820 v
->safe_push (access
);
823 pointer_map_insert (base_access_vec
, base
)) = v
;
828 /* Create and insert access for EXPR. Return created access, or NULL if it is
831 static struct access
*
832 create_access (tree expr
, gimple stmt
, bool write
)
834 struct access
*access
;
835 HOST_WIDE_INT offset
, size
, max_size
;
837 bool ptr
, unscalarizable_region
= false;
839 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
841 if (sra_mode
== SRA_MODE_EARLY_IPA
842 && TREE_CODE (base
) == MEM_REF
)
844 base
= get_ssa_base_param (TREE_OPERAND (base
, 0));
852 if (!DECL_P (base
) || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
855 if (sra_mode
== SRA_MODE_EARLY_IPA
)
857 if (size
< 0 || size
!= max_size
)
859 disqualify_candidate (base
, "Encountered a variable sized access.");
862 if (TREE_CODE (expr
) == COMPONENT_REF
863 && DECL_BIT_FIELD (TREE_OPERAND (expr
, 1)))
865 disqualify_candidate (base
, "Encountered a bit-field access.");
868 gcc_checking_assert ((offset
% BITS_PER_UNIT
) == 0);
871 mark_parm_dereference (base
, offset
+ size
, stmt
);
875 if (size
!= max_size
)
878 unscalarizable_region
= true;
882 disqualify_candidate (base
, "Encountered an unconstrained access.");
887 access
= create_access_1 (base
, offset
, size
);
889 access
->type
= TREE_TYPE (expr
);
890 access
->write
= write
;
891 access
->grp_unscalarizable_region
= unscalarizable_region
;
894 if (TREE_CODE (expr
) == COMPONENT_REF
895 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr
, 1)))
896 access
->non_addressable
= 1;
902 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
903 register types or (recursively) records with only these two kinds of fields.
904 It also returns false if any of these records contains a bit-field. */
907 type_consists_of_records_p (tree type
)
911 if (TREE_CODE (type
) != RECORD_TYPE
)
914 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
915 if (TREE_CODE (fld
) == FIELD_DECL
)
917 tree ft
= TREE_TYPE (fld
);
919 if (DECL_BIT_FIELD (fld
))
922 if (!is_gimple_reg_type (ft
)
923 && !type_consists_of_records_p (ft
))
930 /* Create total_scalarization accesses for all scalar type fields in DECL that
931 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
932 must be the top-most VAR_DECL representing the variable, OFFSET must be the
933 offset of DECL within BASE. REF must be the memory reference expression for
937 completely_scalarize_record (tree base
, tree decl
, HOST_WIDE_INT offset
,
940 tree fld
, decl_type
= TREE_TYPE (decl
);
942 for (fld
= TYPE_FIELDS (decl_type
); fld
; fld
= DECL_CHAIN (fld
))
943 if (TREE_CODE (fld
) == FIELD_DECL
)
945 HOST_WIDE_INT pos
= offset
+ int_bit_position (fld
);
946 tree ft
= TREE_TYPE (fld
);
947 tree nref
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), ref
, fld
,
950 if (is_gimple_reg_type (ft
))
952 struct access
*access
;
955 size
= tree_low_cst (DECL_SIZE (fld
), 1);
956 access
= create_access_1 (base
, pos
, size
);
959 access
->grp_total_scalarization
= 1;
960 /* Accesses for intraprocedural SRA can have their stmt NULL. */
963 completely_scalarize_record (base
, fld
, pos
, nref
);
967 /* Create total_scalarization accesses for all scalar type fields in VAR and
968 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
969 type_consists_of_records_p. */
972 completely_scalarize_var (tree var
)
974 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (var
), 1);
975 struct access
*access
;
977 access
= create_access_1 (var
, 0, size
);
979 access
->type
= TREE_TYPE (var
);
980 access
->grp_total_scalarization
= 1;
982 completely_scalarize_record (var
, var
, 0, var
);
985 /* Search the given tree for a declaration by skipping handled components and
986 exclude it from the candidates. */
989 disqualify_base_of_expr (tree t
, const char *reason
)
991 t
= get_base_address (t
);
992 if (sra_mode
== SRA_MODE_EARLY_IPA
993 && TREE_CODE (t
) == MEM_REF
)
994 t
= get_ssa_base_param (TREE_OPERAND (t
, 0));
997 disqualify_candidate (t
, reason
);
1000 /* Scan expression EXPR and create access structures for all accesses to
1001 candidates for scalarization. Return the created access or NULL if none is
1004 static struct access
*
1005 build_access_from_expr_1 (tree expr
, gimple stmt
, bool write
)
1007 struct access
*ret
= NULL
;
1010 if (TREE_CODE (expr
) == BIT_FIELD_REF
1011 || TREE_CODE (expr
) == IMAGPART_EXPR
1012 || TREE_CODE (expr
) == REALPART_EXPR
)
1014 expr
= TREE_OPERAND (expr
, 0);
1018 partial_ref
= false;
1020 /* We need to dive through V_C_Es in order to get the size of its parameter
1021 and not the result type. Ada produces such statements. We are also
1022 capable of handling the topmost V_C_E but not any of those buried in other
1023 handled components. */
1024 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
1025 expr
= TREE_OPERAND (expr
, 0);
1027 if (contains_view_convert_expr_p (expr
))
1029 disqualify_base_of_expr (expr
, "V_C_E under a different handled "
1034 switch (TREE_CODE (expr
))
1037 if (TREE_CODE (TREE_OPERAND (expr
, 0)) != ADDR_EXPR
1038 && sra_mode
!= SRA_MODE_EARLY_IPA
)
1046 case ARRAY_RANGE_REF
:
1047 ret
= create_access (expr
, stmt
, write
);
1054 if (write
&& partial_ref
&& ret
)
1055 ret
->grp_partial_lhs
= 1;
1060 /* Scan expression EXPR and create access structures for all accesses to
1061 candidates for scalarization. Return true if any access has been inserted.
1062 STMT must be the statement from which the expression is taken, WRITE must be
1063 true if the expression is a store and false otherwise. */
1066 build_access_from_expr (tree expr
, gimple stmt
, bool write
)
1068 struct access
*access
;
1070 access
= build_access_from_expr_1 (expr
, stmt
, write
);
1073 /* This means the aggregate is accesses as a whole in a way other than an
1074 assign statement and thus cannot be removed even if we had a scalar
1075 replacement for everything. */
1076 if (cannot_scalarize_away_bitmap
)
1077 bitmap_set_bit (cannot_scalarize_away_bitmap
, DECL_UID (access
->base
));
1083 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1084 modes in which it matters, return true iff they have been disqualified. RHS
1085 may be NULL, in that case ignore it. If we scalarize an aggregate in
1086 intra-SRA we may need to add statements after each statement. This is not
1087 possible if a statement unconditionally has to end the basic block. */
1089 disqualify_ops_if_throwing_stmt (gimple stmt
, tree lhs
, tree rhs
)
1091 if ((sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1092 && (stmt_can_throw_internal (stmt
) || stmt_ends_bb_p (stmt
)))
1094 disqualify_base_of_expr (lhs
, "LHS of a throwing stmt.");
1096 disqualify_base_of_expr (rhs
, "RHS of a throwing stmt.");
1102 /* Scan expressions occurring in STMT, create access structures for all accesses
1103 to candidates for scalarization and remove those candidates which occur in
1104 statements or expressions that prevent them from being split apart. Return
1105 true if any access has been inserted. */
1108 build_accesses_from_assign (gimple stmt
)
1111 struct access
*lacc
, *racc
;
1113 if (!gimple_assign_single_p (stmt
)
1114 /* Scope clobbers don't influence scalarization. */
1115 || gimple_clobber_p (stmt
))
1118 lhs
= gimple_assign_lhs (stmt
);
1119 rhs
= gimple_assign_rhs1 (stmt
);
1121 if (disqualify_ops_if_throwing_stmt (stmt
, lhs
, rhs
))
1124 racc
= build_access_from_expr_1 (rhs
, stmt
, false);
1125 lacc
= build_access_from_expr_1 (lhs
, stmt
, true);
1128 lacc
->grp_assignment_write
= 1;
1132 racc
->grp_assignment_read
= 1;
1133 if (should_scalarize_away_bitmap
&& !gimple_has_volatile_ops (stmt
)
1134 && !is_gimple_reg_type (racc
->type
))
1135 bitmap_set_bit (should_scalarize_away_bitmap
, DECL_UID (racc
->base
));
1139 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1140 && !lacc
->grp_unscalarizable_region
1141 && !racc
->grp_unscalarizable_region
1142 && AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
1143 && lacc
->size
== racc
->size
1144 && useless_type_conversion_p (lacc
->type
, racc
->type
))
1146 struct assign_link
*link
;
1148 link
= (struct assign_link
*) pool_alloc (link_pool
);
1149 memset (link
, 0, sizeof (struct assign_link
));
1154 add_link_to_rhs (racc
, link
);
1157 return lacc
|| racc
;
1160 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1161 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1164 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED
, tree op
,
1165 void *data ATTRIBUTE_UNUSED
)
1167 op
= get_base_address (op
);
1170 disqualify_candidate (op
, "Non-scalarizable GIMPLE_ASM operand.");
1175 /* Return true iff callsite CALL has at least as many actual arguments as there
1176 are formal parameters of the function currently processed by IPA-SRA. */
1179 callsite_has_enough_arguments_p (gimple call
)
1181 return gimple_call_num_args (call
) >= (unsigned) func_param_count
;
1184 /* Scan function and look for interesting expressions and create access
1185 structures for them. Return true iff any access is created. */
1188 scan_function (void)
1195 gimple_stmt_iterator gsi
;
1196 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1198 gimple stmt
= gsi_stmt (gsi
);
1202 if (final_bbs
&& stmt_can_throw_external (stmt
))
1203 bitmap_set_bit (final_bbs
, bb
->index
);
1204 switch (gimple_code (stmt
))
1207 t
= gimple_return_retval (stmt
);
1209 ret
|= build_access_from_expr (t
, stmt
, false);
1211 bitmap_set_bit (final_bbs
, bb
->index
);
1215 ret
|= build_accesses_from_assign (stmt
);
1219 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1220 ret
|= build_access_from_expr (gimple_call_arg (stmt
, i
),
1223 if (sra_mode
== SRA_MODE_EARLY_IPA
)
1225 tree dest
= gimple_call_fndecl (stmt
);
1226 int flags
= gimple_call_flags (stmt
);
1230 if (DECL_BUILT_IN_CLASS (dest
) == BUILT_IN_NORMAL
1231 && DECL_FUNCTION_CODE (dest
) == BUILT_IN_APPLY_ARGS
)
1232 encountered_apply_args
= true;
1233 if (cgraph_get_node (dest
)
1234 == cgraph_get_node (current_function_decl
))
1236 encountered_recursive_call
= true;
1237 if (!callsite_has_enough_arguments_p (stmt
))
1238 encountered_unchangable_recursive_call
= true;
1243 && (flags
& (ECF_CONST
| ECF_PURE
)) == 0)
1244 bitmap_set_bit (final_bbs
, bb
->index
);
1247 t
= gimple_call_lhs (stmt
);
1248 if (t
&& !disqualify_ops_if_throwing_stmt (stmt
, t
, NULL
))
1249 ret
|= build_access_from_expr (t
, stmt
, true);
1253 walk_stmt_load_store_addr_ops (stmt
, NULL
, NULL
, NULL
,
1256 bitmap_set_bit (final_bbs
, bb
->index
);
1258 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
1260 t
= TREE_VALUE (gimple_asm_input_op (stmt
, i
));
1261 ret
|= build_access_from_expr (t
, stmt
, false);
1263 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
1265 t
= TREE_VALUE (gimple_asm_output_op (stmt
, i
));
1266 ret
|= build_access_from_expr (t
, stmt
, true);
1279 /* Helper of QSORT function. There are pointers to accesses in the array. An
1280 access is considered smaller than another if it has smaller offset or if the
1281 offsets are the same but is size is bigger. */
1284 compare_access_positions (const void *a
, const void *b
)
1286 const access_p
*fp1
= (const access_p
*) a
;
1287 const access_p
*fp2
= (const access_p
*) b
;
1288 const access_p f1
= *fp1
;
1289 const access_p f2
= *fp2
;
1291 if (f1
->offset
!= f2
->offset
)
1292 return f1
->offset
< f2
->offset
? -1 : 1;
1294 if (f1
->size
== f2
->size
)
1296 if (f1
->type
== f2
->type
)
1298 /* Put any non-aggregate type before any aggregate type. */
1299 else if (!is_gimple_reg_type (f1
->type
)
1300 && is_gimple_reg_type (f2
->type
))
1302 else if (is_gimple_reg_type (f1
->type
)
1303 && !is_gimple_reg_type (f2
->type
))
1305 /* Put any complex or vector type before any other scalar type. */
1306 else if (TREE_CODE (f1
->type
) != COMPLEX_TYPE
1307 && TREE_CODE (f1
->type
) != VECTOR_TYPE
1308 && (TREE_CODE (f2
->type
) == COMPLEX_TYPE
1309 || TREE_CODE (f2
->type
) == VECTOR_TYPE
))
1311 else if ((TREE_CODE (f1
->type
) == COMPLEX_TYPE
1312 || TREE_CODE (f1
->type
) == VECTOR_TYPE
)
1313 && TREE_CODE (f2
->type
) != COMPLEX_TYPE
1314 && TREE_CODE (f2
->type
) != VECTOR_TYPE
)
1316 /* Put the integral type with the bigger precision first. */
1317 else if (INTEGRAL_TYPE_P (f1
->type
)
1318 && INTEGRAL_TYPE_P (f2
->type
))
1319 return TYPE_PRECISION (f2
->type
) - TYPE_PRECISION (f1
->type
);
1320 /* Put any integral type with non-full precision last. */
1321 else if (INTEGRAL_TYPE_P (f1
->type
)
1322 && (TREE_INT_CST_LOW (TYPE_SIZE (f1
->type
))
1323 != TYPE_PRECISION (f1
->type
)))
1325 else if (INTEGRAL_TYPE_P (f2
->type
)
1326 && (TREE_INT_CST_LOW (TYPE_SIZE (f2
->type
))
1327 != TYPE_PRECISION (f2
->type
)))
1329 /* Stabilize the sort. */
1330 return TYPE_UID (f1
->type
) - TYPE_UID (f2
->type
);
1333 /* We want the bigger accesses first, thus the opposite operator in the next
1335 return f1
->size
> f2
->size
? -1 : 1;
1339 /* Append a name of the declaration to the name obstack. A helper function for
1343 make_fancy_decl_name (tree decl
)
1347 tree name
= DECL_NAME (decl
);
1349 obstack_grow (&name_obstack
, IDENTIFIER_POINTER (name
),
1350 IDENTIFIER_LENGTH (name
));
1353 sprintf (buffer
, "D%u", DECL_UID (decl
));
1354 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1358 /* Helper for make_fancy_name. */
1361 make_fancy_name_1 (tree expr
)
1368 make_fancy_decl_name (expr
);
1372 switch (TREE_CODE (expr
))
1375 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1376 obstack_1grow (&name_obstack
, '$');
1377 make_fancy_decl_name (TREE_OPERAND (expr
, 1));
1381 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1382 obstack_1grow (&name_obstack
, '$');
1383 /* Arrays with only one element may not have a constant as their
1385 index
= TREE_OPERAND (expr
, 1);
1386 if (TREE_CODE (index
) != INTEGER_CST
)
1388 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (index
));
1389 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1393 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1397 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1398 if (!integer_zerop (TREE_OPERAND (expr
, 1)))
1400 obstack_1grow (&name_obstack
, '$');
1401 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
,
1402 TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)));
1403 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1410 gcc_unreachable (); /* we treat these as scalars. */
1417 /* Create a human readable name for replacement variable of ACCESS. */
1420 make_fancy_name (tree expr
)
1422 make_fancy_name_1 (expr
);
1423 obstack_1grow (&name_obstack
, '\0');
1424 return XOBFINISH (&name_obstack
, char *);
1427 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1428 EXP_TYPE at the given OFFSET. If BASE is something for which
1429 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1430 to insert new statements either before or below the current one as specified
1431 by INSERT_AFTER. This function is not capable of handling bitfields.
1433 BASE must be either a declaration or a memory reference that has correct
1434 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1437 build_ref_for_offset (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1438 tree exp_type
, gimple_stmt_iterator
*gsi
,
1441 tree prev_base
= base
;
1443 HOST_WIDE_INT base_offset
;
1444 unsigned HOST_WIDE_INT misalign
;
1447 gcc_checking_assert (offset
% BITS_PER_UNIT
== 0);
1448 get_object_alignment_1 (base
, &align
, &misalign
);
1449 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1451 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1452 offset such as array[var_index]. */
1458 gcc_checking_assert (gsi
);
1459 tmp
= make_ssa_name (build_pointer_type (TREE_TYPE (prev_base
)), NULL
);
1460 addr
= build_fold_addr_expr (unshare_expr (prev_base
));
1461 STRIP_USELESS_TYPE_CONVERSION (addr
);
1462 stmt
= gimple_build_assign (tmp
, addr
);
1463 gimple_set_location (stmt
, loc
);
1465 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1467 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1469 off
= build_int_cst (reference_alias_ptr_type (prev_base
),
1470 offset
/ BITS_PER_UNIT
);
1473 else if (TREE_CODE (base
) == MEM_REF
)
1475 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1476 base_offset
+ offset
/ BITS_PER_UNIT
);
1477 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1478 base
= unshare_expr (TREE_OPERAND (base
, 0));
1482 off
= build_int_cst (reference_alias_ptr_type (base
),
1483 base_offset
+ offset
/ BITS_PER_UNIT
);
1484 base
= build_fold_addr_expr (unshare_expr (base
));
1487 misalign
= (misalign
+ offset
) & (align
- 1);
1489 align
= (misalign
& -misalign
);
1490 if (align
< TYPE_ALIGN (exp_type
))
1491 exp_type
= build_aligned_type (exp_type
, align
);
1493 return fold_build2_loc (loc
, MEM_REF
, exp_type
, base
, off
);
1496 /* Construct a memory reference to a part of an aggregate BASE at the given
1497 OFFSET and of the same type as MODEL. In case this is a reference to a
1498 bit-field, the function will replicate the last component_ref of model's
1499 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1500 build_ref_for_offset. */
1503 build_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1504 struct access
*model
, gimple_stmt_iterator
*gsi
,
1507 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1508 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1510 /* This access represents a bit-field. */
1511 tree t
, exp_type
, fld
= TREE_OPERAND (model
->expr
, 1);
1513 offset
-= int_bit_position (fld
);
1514 exp_type
= TREE_TYPE (TREE_OPERAND (model
->expr
, 0));
1515 t
= build_ref_for_offset (loc
, base
, offset
, exp_type
, gsi
, insert_after
);
1516 return fold_build3_loc (loc
, COMPONENT_REF
, TREE_TYPE (fld
), t
, fld
,
1520 return build_ref_for_offset (loc
, base
, offset
, model
->type
,
1524 /* Attempt to build a memory reference that we could but into a gimple
1525 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1526 create statements and return s NULL instead. This function also ignores
1527 alignment issues and so its results should never end up in non-debug
1531 build_debug_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1532 struct access
*model
)
1534 HOST_WIDE_INT base_offset
;
1537 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1538 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1541 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1544 if (TREE_CODE (base
) == MEM_REF
)
1546 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1547 base_offset
+ offset
/ BITS_PER_UNIT
);
1548 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1549 base
= unshare_expr (TREE_OPERAND (base
, 0));
1553 off
= build_int_cst (reference_alias_ptr_type (base
),
1554 base_offset
+ offset
/ BITS_PER_UNIT
);
1555 base
= build_fold_addr_expr (unshare_expr (base
));
1558 return fold_build2_loc (loc
, MEM_REF
, model
->type
, base
, off
);
1561 /* Construct a memory reference consisting of component_refs and array_refs to
1562 a part of an aggregate *RES (which is of type TYPE). The requested part
1563 should have type EXP_TYPE at be the given OFFSET. This function might not
1564 succeed, it returns true when it does and only then *RES points to something
1565 meaningful. This function should be used only to build expressions that we
1566 might need to present to user (e.g. in warnings). In all other situations,
1567 build_ref_for_model or build_ref_for_offset should be used instead. */
1570 build_user_friendly_ref_for_offset (tree
*res
, tree type
, HOST_WIDE_INT offset
,
1576 tree tr_size
, index
, minidx
;
1577 HOST_WIDE_INT el_size
;
1579 if (offset
== 0 && exp_type
1580 && types_compatible_p (exp_type
, type
))
1583 switch (TREE_CODE (type
))
1586 case QUAL_UNION_TYPE
:
1588 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1590 HOST_WIDE_INT pos
, size
;
1591 tree tr_pos
, expr
, *expr_ptr
;
1593 if (TREE_CODE (fld
) != FIELD_DECL
)
1596 tr_pos
= bit_position (fld
);
1597 if (!tr_pos
|| !host_integerp (tr_pos
, 1))
1599 pos
= TREE_INT_CST_LOW (tr_pos
);
1600 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| pos
== 0);
1601 tr_size
= DECL_SIZE (fld
);
1602 if (!tr_size
|| !host_integerp (tr_size
, 1))
1604 size
= TREE_INT_CST_LOW (tr_size
);
1610 else if (pos
> offset
|| (pos
+ size
) <= offset
)
1613 expr
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), *res
, fld
,
1616 if (build_user_friendly_ref_for_offset (expr_ptr
, TREE_TYPE (fld
),
1617 offset
- pos
, exp_type
))
1626 tr_size
= TYPE_SIZE (TREE_TYPE (type
));
1627 if (!tr_size
|| !host_integerp (tr_size
, 1))
1629 el_size
= tree_low_cst (tr_size
, 1);
1631 minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1632 if (TREE_CODE (minidx
) != INTEGER_CST
|| el_size
== 0)
1634 index
= build_int_cst (TYPE_DOMAIN (type
), offset
/ el_size
);
1635 if (!integer_zerop (minidx
))
1636 index
= int_const_binop (PLUS_EXPR
, index
, minidx
);
1637 *res
= build4 (ARRAY_REF
, TREE_TYPE (type
), *res
, index
,
1638 NULL_TREE
, NULL_TREE
);
1639 offset
= offset
% el_size
;
1640 type
= TREE_TYPE (type
);
1655 /* Return true iff TYPE is stdarg va_list type. */
1658 is_va_list_type (tree type
)
1660 return TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (va_list_type_node
);
1663 /* Print message to dump file why a variable was rejected. */
1666 reject (tree var
, const char *msg
)
1668 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1670 fprintf (dump_file
, "Rejected (%d): %s: ", DECL_UID (var
), msg
);
1671 print_generic_expr (dump_file
, var
, 0);
1672 fprintf (dump_file
, "\n");
1676 /* Return true if VAR is a candidate for SRA. */
1679 maybe_add_sra_candidate (tree var
)
1681 tree type
= TREE_TYPE (var
);
1685 if (!AGGREGATE_TYPE_P (type
))
1687 reject (var
, "not aggregate");
1690 if (needs_to_live_in_memory (var
))
1692 reject (var
, "needs to live in memory");
1695 if (TREE_THIS_VOLATILE (var
))
1697 reject (var
, "is volatile");
1700 if (!COMPLETE_TYPE_P (type
))
1702 reject (var
, "has incomplete type");
1705 if (!host_integerp (TYPE_SIZE (type
), 1))
1707 reject (var
, "type size not fixed");
1710 if (tree_low_cst (TYPE_SIZE (type
), 1) == 0)
1712 reject (var
, "type size is zero");
1715 if (type_internals_preclude_sra_p (type
, &msg
))
1720 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1721 we also want to schedule it rather late. Thus we ignore it in
1723 (sra_mode
== SRA_MODE_EARLY_INTRA
1724 && is_va_list_type (type
)))
1726 reject (var
, "is va_list");
1730 bitmap_set_bit (candidate_bitmap
, DECL_UID (var
));
1731 slot
= htab_find_slot_with_hash (candidates
, var
, DECL_UID (var
), INSERT
);
1732 *slot
= (void *) var
;
1734 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1736 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (var
));
1737 print_generic_expr (dump_file
, var
, 0);
1738 fprintf (dump_file
, "\n");
1744 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1745 those with type which is suitable for scalarization. */
1748 find_var_candidates (void)
1754 for (parm
= DECL_ARGUMENTS (current_function_decl
);
1756 parm
= DECL_CHAIN (parm
))
1757 ret
|= maybe_add_sra_candidate (parm
);
1759 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1761 if (TREE_CODE (var
) != VAR_DECL
)
1764 ret
|= maybe_add_sra_candidate (var
);
1770 /* Sort all accesses for the given variable, check for partial overlaps and
1771 return NULL if there are any. If there are none, pick a representative for
1772 each combination of offset and size and create a linked list out of them.
1773 Return the pointer to the first representative and make sure it is the first
1774 one in the vector of accesses. */
1776 static struct access
*
1777 sort_and_splice_var_accesses (tree var
)
1779 int i
, j
, access_count
;
1780 struct access
*res
, **prev_acc_ptr
= &res
;
1781 vec
<access_p
> *access_vec
;
1783 HOST_WIDE_INT low
= -1, high
= 0;
1785 access_vec
= get_base_access_vector (var
);
1788 access_count
= access_vec
->length ();
1790 /* Sort by <OFFSET, SIZE>. */
1791 access_vec
->qsort (compare_access_positions
);
1794 while (i
< access_count
)
1796 struct access
*access
= (*access_vec
)[i
];
1797 bool grp_write
= access
->write
;
1798 bool grp_read
= !access
->write
;
1799 bool grp_scalar_write
= access
->write
1800 && is_gimple_reg_type (access
->type
);
1801 bool grp_scalar_read
= !access
->write
1802 && is_gimple_reg_type (access
->type
);
1803 bool grp_assignment_read
= access
->grp_assignment_read
;
1804 bool grp_assignment_write
= access
->grp_assignment_write
;
1805 bool multiple_scalar_reads
= false;
1806 bool total_scalarization
= access
->grp_total_scalarization
;
1807 bool grp_partial_lhs
= access
->grp_partial_lhs
;
1808 bool first_scalar
= is_gimple_reg_type (access
->type
);
1809 bool unscalarizable_region
= access
->grp_unscalarizable_region
;
1811 if (first
|| access
->offset
>= high
)
1814 low
= access
->offset
;
1815 high
= access
->offset
+ access
->size
;
1817 else if (access
->offset
> low
&& access
->offset
+ access
->size
> high
)
1820 gcc_assert (access
->offset
>= low
1821 && access
->offset
+ access
->size
<= high
);
1824 while (j
< access_count
)
1826 struct access
*ac2
= (*access_vec
)[j
];
1827 if (ac2
->offset
!= access
->offset
|| ac2
->size
!= access
->size
)
1832 grp_scalar_write
= (grp_scalar_write
1833 || is_gimple_reg_type (ac2
->type
));
1838 if (is_gimple_reg_type (ac2
->type
))
1840 if (grp_scalar_read
)
1841 multiple_scalar_reads
= true;
1843 grp_scalar_read
= true;
1846 grp_assignment_read
|= ac2
->grp_assignment_read
;
1847 grp_assignment_write
|= ac2
->grp_assignment_write
;
1848 grp_partial_lhs
|= ac2
->grp_partial_lhs
;
1849 unscalarizable_region
|= ac2
->grp_unscalarizable_region
;
1850 total_scalarization
|= ac2
->grp_total_scalarization
;
1851 relink_to_new_repr (access
, ac2
);
1853 /* If there are both aggregate-type and scalar-type accesses with
1854 this combination of size and offset, the comparison function
1855 should have put the scalars first. */
1856 gcc_assert (first_scalar
|| !is_gimple_reg_type (ac2
->type
));
1857 ac2
->group_representative
= access
;
1863 access
->group_representative
= access
;
1864 access
->grp_write
= grp_write
;
1865 access
->grp_read
= grp_read
;
1866 access
->grp_scalar_read
= grp_scalar_read
;
1867 access
->grp_scalar_write
= grp_scalar_write
;
1868 access
->grp_assignment_read
= grp_assignment_read
;
1869 access
->grp_assignment_write
= grp_assignment_write
;
1870 access
->grp_hint
= multiple_scalar_reads
|| total_scalarization
;
1871 access
->grp_total_scalarization
= total_scalarization
;
1872 access
->grp_partial_lhs
= grp_partial_lhs
;
1873 access
->grp_unscalarizable_region
= unscalarizable_region
;
1874 if (access
->first_link
)
1875 add_access_to_work_queue (access
);
1877 *prev_acc_ptr
= access
;
1878 prev_acc_ptr
= &access
->next_grp
;
1881 gcc_assert (res
== (*access_vec
)[0]);
1885 /* Create a variable for the given ACCESS which determines the type, name and a
1886 few other properties. Return the variable declaration and store it also to
1887 ACCESS->replacement. */
1890 create_access_replacement (struct access
*access
)
1894 if (access
->grp_to_be_debug_replaced
)
1896 repl
= create_tmp_var_raw (access
->type
, NULL
);
1897 DECL_CONTEXT (repl
) = current_function_decl
;
1900 repl
= create_tmp_var (access
->type
, "SR");
1901 if (TREE_CODE (access
->type
) == COMPLEX_TYPE
1902 || TREE_CODE (access
->type
) == VECTOR_TYPE
)
1904 if (!access
->grp_partial_lhs
)
1905 DECL_GIMPLE_REG_P (repl
) = 1;
1907 else if (access
->grp_partial_lhs
1908 && is_gimple_reg_type (access
->type
))
1909 TREE_ADDRESSABLE (repl
) = 1;
1911 DECL_SOURCE_LOCATION (repl
) = DECL_SOURCE_LOCATION (access
->base
);
1912 DECL_ARTIFICIAL (repl
) = 1;
1913 DECL_IGNORED_P (repl
) = DECL_IGNORED_P (access
->base
);
1915 if (DECL_NAME (access
->base
)
1916 && !DECL_IGNORED_P (access
->base
)
1917 && !DECL_ARTIFICIAL (access
->base
))
1919 char *pretty_name
= make_fancy_name (access
->expr
);
1920 tree debug_expr
= unshare_expr (access
->expr
), d
;
1923 DECL_NAME (repl
) = get_identifier (pretty_name
);
1924 obstack_free (&name_obstack
, pretty_name
);
1926 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1927 as DECL_DEBUG_EXPR isn't considered when looking for still
1928 used SSA_NAMEs and thus they could be freed. All debug info
1929 generation cares is whether something is constant or variable
1930 and that get_ref_base_and_extent works properly on the
1931 expression. It cannot handle accesses at a non-constant offset
1932 though, so just give up in those cases. */
1933 for (d
= debug_expr
;
1934 !fail
&& (handled_component_p (d
) || TREE_CODE (d
) == MEM_REF
);
1935 d
= TREE_OPERAND (d
, 0))
1936 switch (TREE_CODE (d
))
1939 case ARRAY_RANGE_REF
:
1940 if (TREE_OPERAND (d
, 1)
1941 && TREE_CODE (TREE_OPERAND (d
, 1)) != INTEGER_CST
)
1943 if (TREE_OPERAND (d
, 3)
1944 && TREE_CODE (TREE_OPERAND (d
, 3)) != INTEGER_CST
)
1948 if (TREE_OPERAND (d
, 2)
1949 && TREE_CODE (TREE_OPERAND (d
, 2)) != INTEGER_CST
)
1953 if (TREE_CODE (TREE_OPERAND (d
, 0)) != ADDR_EXPR
)
1956 d
= TREE_OPERAND (d
, 0);
1963 SET_DECL_DEBUG_EXPR (repl
, debug_expr
);
1964 DECL_DEBUG_EXPR_IS_FROM (repl
) = 1;
1966 if (access
->grp_no_warning
)
1967 TREE_NO_WARNING (repl
) = 1;
1969 TREE_NO_WARNING (repl
) = TREE_NO_WARNING (access
->base
);
1972 TREE_NO_WARNING (repl
) = 1;
1976 if (access
->grp_to_be_debug_replaced
)
1978 fprintf (dump_file
, "Created a debug-only replacement for ");
1979 print_generic_expr (dump_file
, access
->base
, 0);
1980 fprintf (dump_file
, " offset: %u, size: %u\n",
1981 (unsigned) access
->offset
, (unsigned) access
->size
);
1985 fprintf (dump_file
, "Created a replacement for ");
1986 print_generic_expr (dump_file
, access
->base
, 0);
1987 fprintf (dump_file
, " offset: %u, size: %u: ",
1988 (unsigned) access
->offset
, (unsigned) access
->size
);
1989 print_generic_expr (dump_file
, repl
, 0);
1990 fprintf (dump_file
, "\n");
1993 sra_stats
.replacements
++;
1998 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2001 get_access_replacement (struct access
*access
)
2003 if (!access
->replacement_decl
)
2004 access
->replacement_decl
= create_access_replacement (access
);
2005 return access
->replacement_decl
;
2009 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2010 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2011 to it is not "within" the root. Return false iff some accesses partially
2015 build_access_subtree (struct access
**access
)
2017 struct access
*root
= *access
, *last_child
= NULL
;
2018 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2020 *access
= (*access
)->next_grp
;
2021 while (*access
&& (*access
)->offset
+ (*access
)->size
<= limit
)
2024 root
->first_child
= *access
;
2026 last_child
->next_sibling
= *access
;
2027 last_child
= *access
;
2029 if (!build_access_subtree (access
))
2033 if (*access
&& (*access
)->offset
< limit
)
2039 /* Build a tree of access representatives, ACCESS is the pointer to the first
2040 one, others are linked in a list by the next_grp field. Return false iff
2041 some accesses partially overlap. */
2044 build_access_trees (struct access
*access
)
2048 struct access
*root
= access
;
2050 if (!build_access_subtree (&access
))
2052 root
->next_grp
= access
;
2057 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2061 expr_with_var_bounded_array_refs_p (tree expr
)
2063 while (handled_component_p (expr
))
2065 if (TREE_CODE (expr
) == ARRAY_REF
2066 && !host_integerp (array_ref_low_bound (expr
), 0))
2068 expr
= TREE_OPERAND (expr
, 0);
2073 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2074 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2075 sorts of access flags appropriately along the way, notably always set
2076 grp_read and grp_assign_read according to MARK_READ and grp_write when
2079 Creating a replacement for a scalar access is considered beneficial if its
2080 grp_hint is set (this means we are either attempting total scalarization or
2081 there is more than one direct read access) or according to the following
2084 Access written to through a scalar type (once or more times)
2086 | Written to in an assignment statement
2088 | | Access read as scalar _once_
2090 | | | Read in an assignment statement
2092 | | | | Scalarize Comment
2093 -----------------------------------------------------------------------------
2094 0 0 0 0 No access for the scalar
2095 0 0 0 1 No access for the scalar
2096 0 0 1 0 No Single read - won't help
2097 0 0 1 1 No The same case
2098 0 1 0 0 No access for the scalar
2099 0 1 0 1 No access for the scalar
2100 0 1 1 0 Yes s = *g; return s.i;
2101 0 1 1 1 Yes The same case as above
2102 1 0 0 0 No Won't help
2103 1 0 0 1 Yes s.i = 1; *g = s;
2104 1 0 1 0 Yes s.i = 5; g = s.i;
2105 1 0 1 1 Yes The same case as above
2106 1 1 0 0 No Won't help.
2107 1 1 0 1 Yes s.i = 1; *g = s;
2108 1 1 1 0 Yes s = *g; return s.i;
2109 1 1 1 1 Yes Any of the above yeses */
2112 analyze_access_subtree (struct access
*root
, struct access
*parent
,
2113 bool allow_replacements
)
2115 struct access
*child
;
2116 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2117 HOST_WIDE_INT covered_to
= root
->offset
;
2118 bool scalar
= is_gimple_reg_type (root
->type
);
2119 bool hole
= false, sth_created
= false;
2123 if (parent
->grp_read
)
2125 if (parent
->grp_assignment_read
)
2126 root
->grp_assignment_read
= 1;
2127 if (parent
->grp_write
)
2128 root
->grp_write
= 1;
2129 if (parent
->grp_assignment_write
)
2130 root
->grp_assignment_write
= 1;
2131 if (parent
->grp_total_scalarization
)
2132 root
->grp_total_scalarization
= 1;
2135 if (root
->grp_unscalarizable_region
)
2136 allow_replacements
= false;
2138 if (allow_replacements
&& expr_with_var_bounded_array_refs_p (root
->expr
))
2139 allow_replacements
= false;
2141 for (child
= root
->first_child
; child
; child
= child
->next_sibling
)
2143 hole
|= covered_to
< child
->offset
;
2144 sth_created
|= analyze_access_subtree (child
, root
,
2145 allow_replacements
&& !scalar
);
2147 root
->grp_unscalarized_data
|= child
->grp_unscalarized_data
;
2148 root
->grp_total_scalarization
&= child
->grp_total_scalarization
;
2149 if (child
->grp_covered
)
2150 covered_to
+= child
->size
;
2155 if (allow_replacements
&& scalar
&& !root
->first_child
2157 || ((root
->grp_scalar_read
|| root
->grp_assignment_read
)
2158 && (root
->grp_scalar_write
|| root
->grp_assignment_write
))))
2160 bool new_integer_type
;
2161 /* Always create access replacements that cover the whole access.
2162 For integral types this means the precision has to match.
2163 Avoid assumptions based on the integral type kind, too. */
2164 if (INTEGRAL_TYPE_P (root
->type
)
2165 && (TREE_CODE (root
->type
) != INTEGER_TYPE
2166 || TYPE_PRECISION (root
->type
) != root
->size
)
2167 /* But leave bitfield accesses alone. */
2168 && (TREE_CODE (root
->expr
) != COMPONENT_REF
2169 || !DECL_BIT_FIELD (TREE_OPERAND (root
->expr
, 1))))
2171 tree rt
= root
->type
;
2172 gcc_assert ((root
->offset
% BITS_PER_UNIT
) == 0
2173 && (root
->size
% BITS_PER_UNIT
) == 0);
2174 root
->type
= build_nonstandard_integer_type (root
->size
,
2175 TYPE_UNSIGNED (rt
));
2176 root
->expr
= build_ref_for_offset (UNKNOWN_LOCATION
,
2177 root
->base
, root
->offset
,
2178 root
->type
, NULL
, false);
2179 new_integer_type
= true;
2182 new_integer_type
= false;
2184 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2186 fprintf (dump_file
, "Marking ");
2187 print_generic_expr (dump_file
, root
->base
, 0);
2188 fprintf (dump_file
, " offset: %u, size: %u ",
2189 (unsigned) root
->offset
, (unsigned) root
->size
);
2190 fprintf (dump_file
, " to be replaced%s.\n",
2191 new_integer_type
? " with an integer": "");
2194 root
->grp_to_be_replaced
= 1;
2200 if (allow_replacements
2201 && scalar
&& !root
->first_child
2202 && (root
->grp_scalar_write
|| root
->grp_assignment_write
))
2204 gcc_checking_assert (!root
->grp_scalar_read
2205 && !root
->grp_assignment_read
);
2207 if (MAY_HAVE_DEBUG_STMTS
)
2209 root
->grp_to_be_debug_replaced
= 1;
2210 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2212 fprintf (dump_file
, "Marking ");
2213 print_generic_expr (dump_file
, root
->base
, 0);
2214 fprintf (dump_file
, " offset: %u, size: %u ",
2215 (unsigned) root
->offset
, (unsigned) root
->size
);
2216 fprintf (dump_file
, " to be replaced with debug "
2222 if (covered_to
< limit
)
2225 root
->grp_total_scalarization
= 0;
2228 if (!hole
|| root
->grp_total_scalarization
)
2229 root
->grp_covered
= 1;
2230 else if (root
->grp_write
|| TREE_CODE (root
->base
) == PARM_DECL
)
2231 root
->grp_unscalarized_data
= 1; /* not covered and written to */
2235 /* Analyze all access trees linked by next_grp by the means of
2236 analyze_access_subtree. */
2238 analyze_access_trees (struct access
*access
)
2244 if (analyze_access_subtree (access
, NULL
, true))
2246 access
= access
->next_grp
;
2252 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2253 SIZE would conflict with an already existing one. If exactly such a child
2254 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2257 child_would_conflict_in_lacc (struct access
*lacc
, HOST_WIDE_INT norm_offset
,
2258 HOST_WIDE_INT size
, struct access
**exact_match
)
2260 struct access
*child
;
2262 for (child
= lacc
->first_child
; child
; child
= child
->next_sibling
)
2264 if (child
->offset
== norm_offset
&& child
->size
== size
)
2266 *exact_match
= child
;
2270 if (child
->offset
< norm_offset
+ size
2271 && child
->offset
+ child
->size
> norm_offset
)
2278 /* Create a new child access of PARENT, with all properties just like MODEL
2279 except for its offset and with its grp_write false and grp_read true.
2280 Return the new access or NULL if it cannot be created. Note that this access
2281 is created long after all splicing and sorting, it's not located in any
2282 access vector and is automatically a representative of its group. */
2284 static struct access
*
2285 create_artificial_child_access (struct access
*parent
, struct access
*model
,
2286 HOST_WIDE_INT new_offset
)
2288 struct access
*access
;
2289 struct access
**child
;
2290 tree expr
= parent
->base
;
2292 gcc_assert (!model
->grp_unscalarizable_region
);
2294 access
= (struct access
*) pool_alloc (access_pool
);
2295 memset (access
, 0, sizeof (struct access
));
2296 if (!build_user_friendly_ref_for_offset (&expr
, TREE_TYPE (expr
), new_offset
,
2299 access
->grp_no_warning
= true;
2300 expr
= build_ref_for_model (EXPR_LOCATION (parent
->base
), parent
->base
,
2301 new_offset
, model
, NULL
, false);
2304 access
->base
= parent
->base
;
2305 access
->expr
= expr
;
2306 access
->offset
= new_offset
;
2307 access
->size
= model
->size
;
2308 access
->type
= model
->type
;
2309 access
->grp_write
= true;
2310 access
->grp_read
= false;
2312 child
= &parent
->first_child
;
2313 while (*child
&& (*child
)->offset
< new_offset
)
2314 child
= &(*child
)->next_sibling
;
2316 access
->next_sibling
= *child
;
2323 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2324 true if any new subaccess was created. Additionally, if RACC is a scalar
2325 access but LACC is not, change the type of the latter, if possible. */
2328 propagate_subaccesses_across_link (struct access
*lacc
, struct access
*racc
)
2330 struct access
*rchild
;
2331 HOST_WIDE_INT norm_delta
= lacc
->offset
- racc
->offset
;
2334 if (is_gimple_reg_type (lacc
->type
)
2335 || lacc
->grp_unscalarizable_region
2336 || racc
->grp_unscalarizable_region
)
2339 if (is_gimple_reg_type (racc
->type
))
2341 if (!lacc
->first_child
&& !racc
->first_child
)
2343 tree t
= lacc
->base
;
2345 lacc
->type
= racc
->type
;
2346 if (build_user_friendly_ref_for_offset (&t
, TREE_TYPE (t
),
2347 lacc
->offset
, racc
->type
))
2351 lacc
->expr
= build_ref_for_model (EXPR_LOCATION (lacc
->base
),
2352 lacc
->base
, lacc
->offset
,
2354 lacc
->grp_no_warning
= true;
2360 for (rchild
= racc
->first_child
; rchild
; rchild
= rchild
->next_sibling
)
2362 struct access
*new_acc
= NULL
;
2363 HOST_WIDE_INT norm_offset
= rchild
->offset
+ norm_delta
;
2365 if (rchild
->grp_unscalarizable_region
)
2368 if (child_would_conflict_in_lacc (lacc
, norm_offset
, rchild
->size
,
2373 rchild
->grp_hint
= 1;
2374 new_acc
->grp_hint
|= new_acc
->grp_read
;
2375 if (rchild
->first_child
)
2376 ret
|= propagate_subaccesses_across_link (new_acc
, rchild
);
2381 rchild
->grp_hint
= 1;
2382 new_acc
= create_artificial_child_access (lacc
, rchild
, norm_offset
);
2386 if (racc
->first_child
)
2387 propagate_subaccesses_across_link (new_acc
, rchild
);
2394 /* Propagate all subaccesses across assignment links. */
2397 propagate_all_subaccesses (void)
2399 while (work_queue_head
)
2401 struct access
*racc
= pop_access_from_work_queue ();
2402 struct assign_link
*link
;
2404 gcc_assert (racc
->first_link
);
2406 for (link
= racc
->first_link
; link
; link
= link
->next
)
2408 struct access
*lacc
= link
->lacc
;
2410 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
2412 lacc
= lacc
->group_representative
;
2413 if (propagate_subaccesses_across_link (lacc
, racc
)
2414 && lacc
->first_link
)
2415 add_access_to_work_queue (lacc
);
2420 /* Go through all accesses collected throughout the (intraprocedural) analysis
2421 stage, exclude overlapping ones, identify representatives and build trees
2422 out of them, making decisions about scalarization on the way. Return true
2423 iff there are any to-be-scalarized variables after this stage. */
2426 analyze_all_variable_accesses (void)
2429 bitmap tmp
= BITMAP_ALLOC (NULL
);
2431 unsigned i
, max_total_scalarization_size
;
2433 max_total_scalarization_size
= UNITS_PER_WORD
* BITS_PER_UNIT
2434 * MOVE_RATIO (optimize_function_for_speed_p (cfun
));
2436 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
2437 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
2438 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
2440 tree var
= candidate (i
);
2442 if (TREE_CODE (var
) == VAR_DECL
2443 && type_consists_of_records_p (TREE_TYPE (var
)))
2445 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1)
2446 <= max_total_scalarization_size
)
2448 completely_scalarize_var (var
);
2449 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2451 fprintf (dump_file
, "Will attempt to totally scalarize ");
2452 print_generic_expr (dump_file
, var
, 0);
2453 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2456 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2458 fprintf (dump_file
, "Too big to totally scalarize: ");
2459 print_generic_expr (dump_file
, var
, 0);
2460 fprintf (dump_file
, " (UID: %u)\n", DECL_UID (var
));
2465 bitmap_copy (tmp
, candidate_bitmap
);
2466 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2468 tree var
= candidate (i
);
2469 struct access
*access
;
2471 access
= sort_and_splice_var_accesses (var
);
2472 if (!access
|| !build_access_trees (access
))
2473 disqualify_candidate (var
,
2474 "No or inhibitingly overlapping accesses.");
2477 propagate_all_subaccesses ();
2479 bitmap_copy (tmp
, candidate_bitmap
);
2480 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2482 tree var
= candidate (i
);
2483 struct access
*access
= get_first_repr_for_decl (var
);
2485 if (analyze_access_trees (access
))
2488 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2490 fprintf (dump_file
, "\nAccess trees for ");
2491 print_generic_expr (dump_file
, var
, 0);
2492 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2493 dump_access_tree (dump_file
, access
);
2494 fprintf (dump_file
, "\n");
2498 disqualify_candidate (var
, "No scalar replacements to be created.");
2505 statistics_counter_event (cfun
, "Scalarized aggregates", res
);
2512 /* Generate statements copying scalar replacements of accesses within a subtree
2513 into or out of AGG. ACCESS, all its children, siblings and their children
2514 are to be processed. AGG is an aggregate type expression (can be a
2515 declaration but does not have to be, it can for example also be a mem_ref or
2516 a series of handled components). TOP_OFFSET is the offset of the processed
2517 subtree which has to be subtracted from offsets of individual accesses to
2518 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2519 replacements in the interval <start_offset, start_offset + chunk_size>,
2520 otherwise copy all. GSI is a statement iterator used to place the new
2521 statements. WRITE should be true when the statements should write from AGG
2522 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2523 statements will be added after the current statement in GSI, they will be
2524 added before the statement otherwise. */
2527 generate_subtree_copies (struct access
*access
, tree agg
,
2528 HOST_WIDE_INT top_offset
,
2529 HOST_WIDE_INT start_offset
, HOST_WIDE_INT chunk_size
,
2530 gimple_stmt_iterator
*gsi
, bool write
,
2531 bool insert_after
, location_t loc
)
2535 if (chunk_size
&& access
->offset
>= start_offset
+ chunk_size
)
2538 if (access
->grp_to_be_replaced
2540 || access
->offset
+ access
->size
> start_offset
))
2542 tree expr
, repl
= get_access_replacement (access
);
2545 expr
= build_ref_for_model (loc
, agg
, access
->offset
- top_offset
,
2546 access
, gsi
, insert_after
);
2550 if (access
->grp_partial_lhs
)
2551 expr
= force_gimple_operand_gsi (gsi
, expr
, true, NULL_TREE
,
2553 insert_after
? GSI_NEW_STMT
2555 stmt
= gimple_build_assign (repl
, expr
);
2559 TREE_NO_WARNING (repl
) = 1;
2560 if (access
->grp_partial_lhs
)
2561 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2563 insert_after
? GSI_NEW_STMT
2565 stmt
= gimple_build_assign (expr
, repl
);
2567 gimple_set_location (stmt
, loc
);
2570 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2572 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2574 sra_stats
.subtree_copies
++;
2577 && access
->grp_to_be_debug_replaced
2579 || access
->offset
+ access
->size
> start_offset
))
2582 tree drhs
= build_debug_ref_for_model (loc
, agg
,
2583 access
->offset
- top_offset
,
2585 ds
= gimple_build_debug_bind (get_access_replacement (access
),
2586 drhs
, gsi_stmt (*gsi
));
2588 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2590 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2593 if (access
->first_child
)
2594 generate_subtree_copies (access
->first_child
, agg
, top_offset
,
2595 start_offset
, chunk_size
, gsi
,
2596 write
, insert_after
, loc
);
2598 access
= access
->next_sibling
;
2603 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2604 the root of the subtree to be processed. GSI is the statement iterator used
2605 for inserting statements which are added after the current statement if
2606 INSERT_AFTER is true or before it otherwise. */
2609 init_subtree_with_zero (struct access
*access
, gimple_stmt_iterator
*gsi
,
2610 bool insert_after
, location_t loc
)
2613 struct access
*child
;
2615 if (access
->grp_to_be_replaced
)
2619 stmt
= gimple_build_assign (get_access_replacement (access
),
2620 build_zero_cst (access
->type
));
2622 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2624 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2626 gimple_set_location (stmt
, loc
);
2628 else if (access
->grp_to_be_debug_replaced
)
2630 gimple ds
= gimple_build_debug_bind (get_access_replacement (access
),
2631 build_zero_cst (access
->type
),
2634 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2636 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2639 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2640 init_subtree_with_zero (child
, gsi
, insert_after
, loc
);
2643 /* Search for an access representative for the given expression EXPR and
2644 return it or NULL if it cannot be found. */
2646 static struct access
*
2647 get_access_for_expr (tree expr
)
2649 HOST_WIDE_INT offset
, size
, max_size
;
2652 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2653 a different size than the size of its argument and we need the latter
2655 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2656 expr
= TREE_OPERAND (expr
, 0);
2658 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
2659 if (max_size
== -1 || !DECL_P (base
))
2662 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
2665 return get_var_base_offset_size_access (base
, offset
, max_size
);
2668 /* Replace the expression EXPR with a scalar replacement if there is one and
2669 generate other statements to do type conversion or subtree copying if
2670 necessary. GSI is used to place newly created statements, WRITE is true if
2671 the expression is being written to (it is on a LHS of a statement or output
2672 in an assembly statement). */
2675 sra_modify_expr (tree
*expr
, gimple_stmt_iterator
*gsi
, bool write
)
2678 struct access
*access
;
2681 if (TREE_CODE (*expr
) == BIT_FIELD_REF
)
2684 expr
= &TREE_OPERAND (*expr
, 0);
2689 if (TREE_CODE (*expr
) == REALPART_EXPR
|| TREE_CODE (*expr
) == IMAGPART_EXPR
)
2690 expr
= &TREE_OPERAND (*expr
, 0);
2691 access
= get_access_for_expr (*expr
);
2694 type
= TREE_TYPE (*expr
);
2696 loc
= gimple_location (gsi_stmt (*gsi
));
2697 if (access
->grp_to_be_replaced
)
2699 tree repl
= get_access_replacement (access
);
2700 /* If we replace a non-register typed access simply use the original
2701 access expression to extract the scalar component afterwards.
2702 This happens if scalarizing a function return value or parameter
2703 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2704 gcc.c-torture/compile/20011217-1.c.
2706 We also want to use this when accessing a complex or vector which can
2707 be accessed as a different type too, potentially creating a need for
2708 type conversion (see PR42196) and when scalarized unions are involved
2709 in assembler statements (see PR42398). */
2710 if (!useless_type_conversion_p (type
, access
->type
))
2714 ref
= build_ref_for_model (loc
, access
->base
, access
->offset
, access
,
2721 if (access
->grp_partial_lhs
)
2722 ref
= force_gimple_operand_gsi (gsi
, ref
, true, NULL_TREE
,
2723 false, GSI_NEW_STMT
);
2724 stmt
= gimple_build_assign (repl
, ref
);
2725 gimple_set_location (stmt
, loc
);
2726 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2732 if (access
->grp_partial_lhs
)
2733 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2734 true, GSI_SAME_STMT
);
2735 stmt
= gimple_build_assign (ref
, repl
);
2736 gimple_set_location (stmt
, loc
);
2737 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2744 else if (write
&& access
->grp_to_be_debug_replaced
)
2746 gimple ds
= gimple_build_debug_bind (get_access_replacement (access
),
2749 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2752 if (access
->first_child
)
2754 HOST_WIDE_INT start_offset
, chunk_size
;
2756 && host_integerp (TREE_OPERAND (bfr
, 1), 1)
2757 && host_integerp (TREE_OPERAND (bfr
, 2), 1))
2759 chunk_size
= tree_low_cst (TREE_OPERAND (bfr
, 1), 1);
2760 start_offset
= access
->offset
2761 + tree_low_cst (TREE_OPERAND (bfr
, 2), 1);
2764 start_offset
= chunk_size
= 0;
2766 generate_subtree_copies (access
->first_child
, access
->base
, 0,
2767 start_offset
, chunk_size
, gsi
, write
, write
,
2773 /* Where scalar replacements of the RHS have been written to when a replacement
2774 of a LHS of an assigments cannot be direclty loaded from a replacement of
2776 enum unscalarized_data_handling
{ SRA_UDH_NONE
, /* Nothing done so far. */
2777 SRA_UDH_RIGHT
, /* Data flushed to the RHS. */
2778 SRA_UDH_LEFT
}; /* Data flushed to the LHS. */
2780 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2781 base aggregate if there are unscalarized data or directly to LHS of the
2782 statement that is pointed to by GSI otherwise. */
2784 static enum unscalarized_data_handling
2785 handle_unscalarized_data_in_subtree (struct access
*top_racc
,
2786 gimple_stmt_iterator
*gsi
)
2788 if (top_racc
->grp_unscalarized_data
)
2790 generate_subtree_copies (top_racc
->first_child
, top_racc
->base
, 0, 0, 0,
2792 gimple_location (gsi_stmt (*gsi
)));
2793 return SRA_UDH_RIGHT
;
2797 tree lhs
= gimple_assign_lhs (gsi_stmt (*gsi
));
2798 generate_subtree_copies (top_racc
->first_child
, lhs
, top_racc
->offset
,
2799 0, 0, gsi
, false, false,
2800 gimple_location (gsi_stmt (*gsi
)));
2801 return SRA_UDH_LEFT
;
2806 /* Try to generate statements to load all sub-replacements in an access subtree
2807 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2808 If that is not possible, refresh the TOP_RACC base aggregate and load the
2809 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2810 copied. NEW_GSI is stmt iterator used for statement insertions after the
2811 original assignment, OLD_GSI is used to insert statements before the
2812 assignment. *REFRESHED keeps the information whether we have needed to
2813 refresh replacements of the LHS and from which side of the assignments this
2817 load_assign_lhs_subreplacements (struct access
*lacc
, struct access
*top_racc
,
2818 HOST_WIDE_INT left_offset
,
2819 gimple_stmt_iterator
*old_gsi
,
2820 gimple_stmt_iterator
*new_gsi
,
2821 enum unscalarized_data_handling
*refreshed
)
2823 location_t loc
= gimple_location (gsi_stmt (*old_gsi
));
2824 for (lacc
= lacc
->first_child
; lacc
; lacc
= lacc
->next_sibling
)
2826 HOST_WIDE_INT offset
= lacc
->offset
- left_offset
+ top_racc
->offset
;
2828 if (lacc
->grp_to_be_replaced
)
2830 struct access
*racc
;
2834 racc
= find_access_in_subtree (top_racc
, offset
, lacc
->size
);
2835 if (racc
&& racc
->grp_to_be_replaced
)
2837 rhs
= get_access_replacement (racc
);
2838 if (!useless_type_conversion_p (lacc
->type
, racc
->type
))
2839 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, lacc
->type
, rhs
);
2841 if (racc
->grp_partial_lhs
&& lacc
->grp_partial_lhs
)
2842 rhs
= force_gimple_operand_gsi (old_gsi
, rhs
, true, NULL_TREE
,
2843 true, GSI_SAME_STMT
);
2847 /* No suitable access on the right hand side, need to load from
2848 the aggregate. See if we have to update it first... */
2849 if (*refreshed
== SRA_UDH_NONE
)
2850 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
,
2853 if (*refreshed
== SRA_UDH_LEFT
)
2854 rhs
= build_ref_for_model (loc
, lacc
->base
, lacc
->offset
, lacc
,
2857 rhs
= build_ref_for_model (loc
, top_racc
->base
, offset
, lacc
,
2859 if (lacc
->grp_partial_lhs
)
2860 rhs
= force_gimple_operand_gsi (new_gsi
, rhs
, true, NULL_TREE
,
2861 false, GSI_NEW_STMT
);
2864 stmt
= gimple_build_assign (get_access_replacement (lacc
), rhs
);
2865 gsi_insert_after (new_gsi
, stmt
, GSI_NEW_STMT
);
2866 gimple_set_location (stmt
, loc
);
2868 sra_stats
.subreplacements
++;
2872 if (*refreshed
== SRA_UDH_NONE
2873 && lacc
->grp_read
&& !lacc
->grp_covered
)
2874 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
,
2876 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
2880 struct access
*racc
= find_access_in_subtree (top_racc
, offset
,
2883 if (racc
&& racc
->grp_to_be_replaced
)
2884 drhs
= get_access_replacement (racc
);
2885 else if (*refreshed
== SRA_UDH_LEFT
)
2886 drhs
= build_debug_ref_for_model (loc
, lacc
->base
, lacc
->offset
,
2888 else if (*refreshed
== SRA_UDH_RIGHT
)
2889 drhs
= build_debug_ref_for_model (loc
, top_racc
->base
, offset
,
2893 ds
= gimple_build_debug_bind (get_access_replacement (lacc
),
2894 drhs
, gsi_stmt (*old_gsi
));
2895 gsi_insert_after (new_gsi
, ds
, GSI_NEW_STMT
);
2899 if (lacc
->first_child
)
2900 load_assign_lhs_subreplacements (lacc
, top_racc
, left_offset
,
2901 old_gsi
, new_gsi
, refreshed
);
2905 /* Result code for SRA assignment modification. */
2906 enum assignment_mod_result
{ SRA_AM_NONE
, /* nothing done for the stmt */
2907 SRA_AM_MODIFIED
, /* stmt changed but not
2909 SRA_AM_REMOVED
}; /* stmt eliminated */
2911 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2912 to the assignment and GSI is the statement iterator pointing at it. Returns
2913 the same values as sra_modify_assign. */
2915 static enum assignment_mod_result
2916 sra_modify_constructor_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
2918 tree lhs
= gimple_assign_lhs (*stmt
);
2922 acc
= get_access_for_expr (lhs
);
2926 if (gimple_clobber_p (*stmt
))
2928 /* Remove clobbers of fully scalarized variables, otherwise
2930 if (acc
->grp_covered
)
2932 unlink_stmt_vdef (*stmt
);
2933 gsi_remove (gsi
, true);
2934 release_defs (*stmt
);
2935 return SRA_AM_REMOVED
;
2941 loc
= gimple_location (*stmt
);
2942 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt
))) > 0)
2944 /* I have never seen this code path trigger but if it can happen the
2945 following should handle it gracefully. */
2946 if (access_has_children_p (acc
))
2947 generate_subtree_copies (acc
->first_child
, acc
->base
, 0, 0, 0, gsi
,
2949 return SRA_AM_MODIFIED
;
2952 if (acc
->grp_covered
)
2954 init_subtree_with_zero (acc
, gsi
, false, loc
);
2955 unlink_stmt_vdef (*stmt
);
2956 gsi_remove (gsi
, true);
2957 release_defs (*stmt
);
2958 return SRA_AM_REMOVED
;
2962 init_subtree_with_zero (acc
, gsi
, true, loc
);
2963 return SRA_AM_MODIFIED
;
2967 /* Create and return a new suitable default definition SSA_NAME for RACC which
2968 is an access describing an uninitialized part of an aggregate that is being
2972 get_repl_default_def_ssa_name (struct access
*racc
)
2974 return get_or_create_ssa_default_def (cfun
, get_access_replacement (racc
));
2977 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2981 contains_bitfld_comp_ref_p (const_tree ref
)
2983 while (handled_component_p (ref
))
2985 if (TREE_CODE (ref
) == COMPONENT_REF
2986 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
2988 ref
= TREE_OPERAND (ref
, 0);
2994 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2995 bit-field field declaration somewhere in it. */
2998 contains_vce_or_bfcref_p (const_tree ref
)
3000 while (handled_component_p (ref
))
3002 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
3003 || (TREE_CODE (ref
) == COMPONENT_REF
3004 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1))))
3006 ref
= TREE_OPERAND (ref
, 0);
3012 /* Examine both sides of the assignment statement pointed to by STMT, replace
3013 them with a scalare replacement if there is one and generate copying of
3014 replacements if scalarized aggregates have been used in the assignment. GSI
3015 is used to hold generated statements for type conversions and subtree
3018 static enum assignment_mod_result
3019 sra_modify_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
3021 struct access
*lacc
, *racc
;
3023 bool modify_this_stmt
= false;
3024 bool force_gimple_rhs
= false;
3026 gimple_stmt_iterator orig_gsi
= *gsi
;
3028 if (!gimple_assign_single_p (*stmt
))
3030 lhs
= gimple_assign_lhs (*stmt
);
3031 rhs
= gimple_assign_rhs1 (*stmt
);
3033 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3034 return sra_modify_constructor_assign (stmt
, gsi
);
3036 if (TREE_CODE (rhs
) == REALPART_EXPR
|| TREE_CODE (lhs
) == REALPART_EXPR
3037 || TREE_CODE (rhs
) == IMAGPART_EXPR
|| TREE_CODE (lhs
) == IMAGPART_EXPR
3038 || TREE_CODE (rhs
) == BIT_FIELD_REF
|| TREE_CODE (lhs
) == BIT_FIELD_REF
)
3040 modify_this_stmt
= sra_modify_expr (gimple_assign_rhs1_ptr (*stmt
),
3042 modify_this_stmt
|= sra_modify_expr (gimple_assign_lhs_ptr (*stmt
),
3044 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3047 lacc
= get_access_for_expr (lhs
);
3048 racc
= get_access_for_expr (rhs
);
3052 loc
= gimple_location (*stmt
);
3053 if (lacc
&& lacc
->grp_to_be_replaced
)
3055 lhs
= get_access_replacement (lacc
);
3056 gimple_assign_set_lhs (*stmt
, lhs
);
3057 modify_this_stmt
= true;
3058 if (lacc
->grp_partial_lhs
)
3059 force_gimple_rhs
= true;
3063 if (racc
&& racc
->grp_to_be_replaced
)
3065 rhs
= get_access_replacement (racc
);
3066 modify_this_stmt
= true;
3067 if (racc
->grp_partial_lhs
)
3068 force_gimple_rhs
= true;
3072 && !racc
->grp_unscalarized_data
3073 && TREE_CODE (lhs
) == SSA_NAME
3074 && !access_has_replacements_p (racc
))
3076 rhs
= get_repl_default_def_ssa_name (racc
);
3077 modify_this_stmt
= true;
3081 if (modify_this_stmt
)
3083 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3085 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3086 ??? This should move to fold_stmt which we simply should
3087 call after building a VIEW_CONVERT_EXPR here. */
3088 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
3089 && !contains_bitfld_comp_ref_p (lhs
))
3091 lhs
= build_ref_for_model (loc
, lhs
, 0, racc
, gsi
, false);
3092 gimple_assign_set_lhs (*stmt
, lhs
);
3094 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs
))
3095 && !contains_vce_or_bfcref_p (rhs
))
3096 rhs
= build_ref_for_model (loc
, rhs
, 0, lacc
, gsi
, false);
3098 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3100 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
3102 if (is_gimple_reg_type (TREE_TYPE (lhs
))
3103 && TREE_CODE (lhs
) != SSA_NAME
)
3104 force_gimple_rhs
= true;
3109 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
3111 gimple ds
= gimple_build_debug_bind (get_access_replacement (lacc
),
3112 unshare_expr (rhs
), *stmt
);
3113 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
3116 /* From this point on, the function deals with assignments in between
3117 aggregates when at least one has scalar reductions of some of its
3118 components. There are three possible scenarios: Both the LHS and RHS have
3119 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3121 In the first case, we would like to load the LHS components from RHS
3122 components whenever possible. If that is not possible, we would like to
3123 read it directly from the RHS (after updating it by storing in it its own
3124 components). If there are some necessary unscalarized data in the LHS,
3125 those will be loaded by the original assignment too. If neither of these
3126 cases happen, the original statement can be removed. Most of this is done
3127 by load_assign_lhs_subreplacements.
3129 In the second case, we would like to store all RHS scalarized components
3130 directly into LHS and if they cover the aggregate completely, remove the
3131 statement too. In the third case, we want the LHS components to be loaded
3132 directly from the RHS (DSE will remove the original statement if it
3135 This is a bit complex but manageable when types match and when unions do
3136 not cause confusion in a way that we cannot really load a component of LHS
3137 from the RHS or vice versa (the access representing this level can have
3138 subaccesses that are accessible only through a different union field at a
3139 higher level - different from the one used in the examined expression).
3142 Therefore, I specially handle a fourth case, happening when there is a
3143 specific type cast or it is impossible to locate a scalarized subaccess on
3144 the other side of the expression. If that happens, I simply "refresh" the
3145 RHS by storing in it is scalarized components leave the original statement
3146 there to do the copying and then load the scalar replacements of the LHS.
3147 This is what the first branch does. */
3149 if (modify_this_stmt
3150 || gimple_has_volatile_ops (*stmt
)
3151 || contains_vce_or_bfcref_p (rhs
)
3152 || contains_vce_or_bfcref_p (lhs
))
3154 if (access_has_children_p (racc
))
3155 generate_subtree_copies (racc
->first_child
, racc
->base
, 0, 0, 0,
3156 gsi
, false, false, loc
);
3157 if (access_has_children_p (lacc
))
3158 generate_subtree_copies (lacc
->first_child
, lacc
->base
, 0, 0, 0,
3159 gsi
, true, true, loc
);
3160 sra_stats
.separate_lhs_rhs_handling
++;
3162 /* This gimplification must be done after generate_subtree_copies,
3163 lest we insert the subtree copies in the middle of the gimplified
3165 if (force_gimple_rhs
)
3166 rhs
= force_gimple_operand_gsi (&orig_gsi
, rhs
, true, NULL_TREE
,
3167 true, GSI_SAME_STMT
);
3168 if (gimple_assign_rhs1 (*stmt
) != rhs
)
3170 modify_this_stmt
= true;
3171 gimple_assign_set_rhs_from_tree (&orig_gsi
, rhs
);
3172 gcc_assert (*stmt
== gsi_stmt (orig_gsi
));
3175 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3179 if (access_has_children_p (lacc
)
3180 && access_has_children_p (racc
)
3181 /* When an access represents an unscalarizable region, it usually
3182 represents accesses with variable offset and thus must not be used
3183 to generate new memory accesses. */
3184 && !lacc
->grp_unscalarizable_region
3185 && !racc
->grp_unscalarizable_region
)
3187 gimple_stmt_iterator orig_gsi
= *gsi
;
3188 enum unscalarized_data_handling refreshed
;
3190 if (lacc
->grp_read
&& !lacc
->grp_covered
)
3191 refreshed
= handle_unscalarized_data_in_subtree (racc
, gsi
);
3193 refreshed
= SRA_UDH_NONE
;
3195 load_assign_lhs_subreplacements (lacc
, racc
, lacc
->offset
,
3196 &orig_gsi
, gsi
, &refreshed
);
3197 if (refreshed
!= SRA_UDH_RIGHT
)
3200 unlink_stmt_vdef (*stmt
);
3201 gsi_remove (&orig_gsi
, true);
3202 release_defs (*stmt
);
3203 sra_stats
.deleted
++;
3204 return SRA_AM_REMOVED
;
3209 if (access_has_children_p (racc
)
3210 && !racc
->grp_unscalarized_data
)
3214 fprintf (dump_file
, "Removing load: ");
3215 print_gimple_stmt (dump_file
, *stmt
, 0, 0);
3217 generate_subtree_copies (racc
->first_child
, lhs
,
3218 racc
->offset
, 0, 0, gsi
,
3220 gcc_assert (*stmt
== gsi_stmt (*gsi
));
3221 unlink_stmt_vdef (*stmt
);
3222 gsi_remove (gsi
, true);
3223 release_defs (*stmt
);
3224 sra_stats
.deleted
++;
3225 return SRA_AM_REMOVED
;
3227 /* Restore the aggregate RHS from its components so the
3228 prevailing aggregate copy does the right thing. */
3229 if (access_has_children_p (racc
))
3230 generate_subtree_copies (racc
->first_child
, racc
->base
, 0, 0, 0,
3231 gsi
, false, false, loc
);
3232 /* Re-load the components of the aggregate copy destination.
3233 But use the RHS aggregate to load from to expose more
3234 optimization opportunities. */
3235 if (access_has_children_p (lacc
))
3236 generate_subtree_copies (lacc
->first_child
, rhs
, lacc
->offset
,
3237 0, 0, gsi
, true, true, loc
);
3244 /* Traverse the function body and all modifications as decided in
3245 analyze_all_variable_accesses. Return true iff the CFG has been
3249 sra_modify_function_body (void)
3251 bool cfg_changed
= false;
3256 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
3257 while (!gsi_end_p (gsi
))
3259 gimple stmt
= gsi_stmt (gsi
);
3260 enum assignment_mod_result assign_result
;
3261 bool modified
= false, deleted
= false;
3265 switch (gimple_code (stmt
))
3268 t
= gimple_return_retval_ptr (stmt
);
3269 if (*t
!= NULL_TREE
)
3270 modified
|= sra_modify_expr (t
, &gsi
, false);
3274 assign_result
= sra_modify_assign (&stmt
, &gsi
);
3275 modified
|= assign_result
== SRA_AM_MODIFIED
;
3276 deleted
= assign_result
== SRA_AM_REMOVED
;
3280 /* Operands must be processed before the lhs. */
3281 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
3283 t
= gimple_call_arg_ptr (stmt
, i
);
3284 modified
|= sra_modify_expr (t
, &gsi
, false);
3287 if (gimple_call_lhs (stmt
))
3289 t
= gimple_call_lhs_ptr (stmt
);
3290 modified
|= sra_modify_expr (t
, &gsi
, true);
3295 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
3297 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
3298 modified
|= sra_modify_expr (t
, &gsi
, false);
3300 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
3302 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
3303 modified
|= sra_modify_expr (t
, &gsi
, true);
3314 if (maybe_clean_eh_stmt (stmt
)
3315 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
3326 /* Generate statements initializing scalar replacements of parts of function
3330 initialize_parameter_reductions (void)
3332 gimple_stmt_iterator gsi
;
3333 gimple_seq seq
= NULL
;
3336 gsi
= gsi_start (seq
);
3337 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3339 parm
= DECL_CHAIN (parm
))
3341 vec
<access_p
> *access_vec
;
3342 struct access
*access
;
3344 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3346 access_vec
= get_base_access_vector (parm
);
3350 for (access
= (*access_vec
)[0];
3352 access
= access
->next_grp
)
3353 generate_subtree_copies (access
, parm
, 0, 0, 0, &gsi
, true, true,
3354 EXPR_LOCATION (parm
));
3357 seq
= gsi_seq (gsi
);
3359 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR
), seq
);
3362 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3363 it reveals there are components of some aggregates to be scalarized, it runs
3364 the required transformations. */
3366 perform_intra_sra (void)
3371 if (!find_var_candidates ())
3374 if (!scan_function ())
3377 if (!analyze_all_variable_accesses ())
3380 if (sra_modify_function_body ())
3381 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
3383 ret
= TODO_update_ssa
;
3384 initialize_parameter_reductions ();
3386 statistics_counter_event (cfun
, "Scalar replacements created",
3387 sra_stats
.replacements
);
3388 statistics_counter_event (cfun
, "Modified expressions", sra_stats
.exprs
);
3389 statistics_counter_event (cfun
, "Subtree copy stmts",
3390 sra_stats
.subtree_copies
);
3391 statistics_counter_event (cfun
, "Subreplacement stmts",
3392 sra_stats
.subreplacements
);
3393 statistics_counter_event (cfun
, "Deleted stmts", sra_stats
.deleted
);
3394 statistics_counter_event (cfun
, "Separate LHS and RHS handling",
3395 sra_stats
.separate_lhs_rhs_handling
);
3398 sra_deinitialize ();
3402 /* Perform early intraprocedural SRA. */
3404 early_intra_sra (void)
3406 sra_mode
= SRA_MODE_EARLY_INTRA
;
3407 return perform_intra_sra ();
3410 /* Perform "late" intraprocedural SRA. */
3412 late_intra_sra (void)
3414 sra_mode
= SRA_MODE_INTRA
;
3415 return perform_intra_sra ();
3420 gate_intra_sra (void)
3422 return flag_tree_sra
!= 0 && dbg_cnt (tree_sra
);
3426 struct gimple_opt_pass pass_sra_early
=
3431 OPTGROUP_NONE
, /* optinfo_flags */
3432 gate_intra_sra
, /* gate */
3433 early_intra_sra
, /* execute */
3436 0, /* static_pass_number */
3437 TV_TREE_SRA
, /* tv_id */
3438 PROP_cfg
| PROP_ssa
, /* properties_required */
3439 0, /* properties_provided */
3440 0, /* properties_destroyed */
3441 0, /* todo_flags_start */
3444 | TODO_verify_ssa
/* todo_flags_finish */
3448 struct gimple_opt_pass pass_sra
=
3453 OPTGROUP_NONE
, /* optinfo_flags */
3454 gate_intra_sra
, /* gate */
3455 late_intra_sra
, /* execute */
3458 0, /* static_pass_number */
3459 TV_TREE_SRA
, /* tv_id */
3460 PROP_cfg
| PROP_ssa
, /* properties_required */
3461 0, /* properties_provided */
3462 0, /* properties_destroyed */
3463 TODO_update_address_taken
, /* todo_flags_start */
3466 | TODO_verify_ssa
/* todo_flags_finish */
3471 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3475 is_unused_scalar_param (tree parm
)
3478 return (is_gimple_reg (parm
)
3479 && (!(name
= ssa_default_def (cfun
, parm
))
3480 || has_zero_uses (name
)));
3483 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3484 examine whether there are any direct or otherwise infeasible ones. If so,
3485 return true, otherwise return false. PARM must be a gimple register with a
3486 non-NULL default definition. */
3489 ptr_parm_has_direct_uses (tree parm
)
3491 imm_use_iterator ui
;
3493 tree name
= ssa_default_def (cfun
, parm
);
3496 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
3499 use_operand_p use_p
;
3501 if (is_gimple_debug (stmt
))
3504 /* Valid uses include dereferences on the lhs and the rhs. */
3505 if (gimple_has_lhs (stmt
))
3507 tree lhs
= gimple_get_lhs (stmt
);
3508 while (handled_component_p (lhs
))
3509 lhs
= TREE_OPERAND (lhs
, 0);
3510 if (TREE_CODE (lhs
) == MEM_REF
3511 && TREE_OPERAND (lhs
, 0) == name
3512 && integer_zerop (TREE_OPERAND (lhs
, 1))
3513 && types_compatible_p (TREE_TYPE (lhs
),
3514 TREE_TYPE (TREE_TYPE (name
)))
3515 && !TREE_THIS_VOLATILE (lhs
))
3518 if (gimple_assign_single_p (stmt
))
3520 tree rhs
= gimple_assign_rhs1 (stmt
);
3521 while (handled_component_p (rhs
))
3522 rhs
= TREE_OPERAND (rhs
, 0);
3523 if (TREE_CODE (rhs
) == MEM_REF
3524 && TREE_OPERAND (rhs
, 0) == name
3525 && integer_zerop (TREE_OPERAND (rhs
, 1))
3526 && types_compatible_p (TREE_TYPE (rhs
),
3527 TREE_TYPE (TREE_TYPE (name
)))
3528 && !TREE_THIS_VOLATILE (rhs
))
3531 else if (is_gimple_call (stmt
))
3534 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3536 tree arg
= gimple_call_arg (stmt
, i
);
3537 while (handled_component_p (arg
))
3538 arg
= TREE_OPERAND (arg
, 0);
3539 if (TREE_CODE (arg
) == MEM_REF
3540 && TREE_OPERAND (arg
, 0) == name
3541 && integer_zerop (TREE_OPERAND (arg
, 1))
3542 && types_compatible_p (TREE_TYPE (arg
),
3543 TREE_TYPE (TREE_TYPE (name
)))
3544 && !TREE_THIS_VOLATILE (arg
))
3549 /* If the number of valid uses does not match the number of
3550 uses in this stmt there is an unhandled use. */
3551 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
3558 BREAK_FROM_IMM_USE_STMT (ui
);
3564 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3565 them in candidate_bitmap. Note that these do not necessarily include
3566 parameter which are unused and thus can be removed. Return true iff any
3567 such candidate has been found. */
3570 find_param_candidates (void)
3577 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3579 parm
= DECL_CHAIN (parm
))
3581 tree type
= TREE_TYPE (parm
);
3586 if (TREE_THIS_VOLATILE (parm
)
3587 || TREE_ADDRESSABLE (parm
)
3588 || (!is_gimple_reg_type (type
) && is_va_list_type (type
)))
3591 if (is_unused_scalar_param (parm
))
3597 if (POINTER_TYPE_P (type
))
3599 type
= TREE_TYPE (type
);
3601 if (TREE_CODE (type
) == FUNCTION_TYPE
3602 || TYPE_VOLATILE (type
)
3603 || (TREE_CODE (type
) == ARRAY_TYPE
3604 && TYPE_NONALIASED_COMPONENT (type
))
3605 || !is_gimple_reg (parm
)
3606 || is_va_list_type (type
)
3607 || ptr_parm_has_direct_uses (parm
))
3610 else if (!AGGREGATE_TYPE_P (type
))
3613 if (!COMPLETE_TYPE_P (type
)
3614 || !host_integerp (TYPE_SIZE (type
), 1)
3615 || tree_low_cst (TYPE_SIZE (type
), 1) == 0
3616 || (AGGREGATE_TYPE_P (type
)
3617 && type_internals_preclude_sra_p (type
, &msg
)))
3620 bitmap_set_bit (candidate_bitmap
, DECL_UID (parm
));
3621 slot
= htab_find_slot_with_hash (candidates
, parm
,
3622 DECL_UID (parm
), INSERT
);
3623 *slot
= (void *) parm
;
3626 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3628 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (parm
));
3629 print_generic_expr (dump_file
, parm
, 0);
3630 fprintf (dump_file
, "\n");
3634 func_param_count
= count
;
3638 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3642 mark_maybe_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
3645 struct access
*repr
= (struct access
*) data
;
3647 repr
->grp_maybe_modified
= 1;
3651 /* Analyze what representatives (in linked lists accessible from
3652 REPRESENTATIVES) can be modified by side effects of statements in the
3653 current function. */
3656 analyze_modified_params (vec
<access_p
> representatives
)
3660 for (i
= 0; i
< func_param_count
; i
++)
3662 struct access
*repr
;
3664 for (repr
= representatives
[i
];
3666 repr
= repr
->next_grp
)
3668 struct access
*access
;
3672 if (no_accesses_p (repr
))
3674 if (!POINTER_TYPE_P (TREE_TYPE (repr
->base
))
3675 || repr
->grp_maybe_modified
)
3678 ao_ref_init (&ar
, repr
->expr
);
3679 visited
= BITMAP_ALLOC (NULL
);
3680 for (access
= repr
; access
; access
= access
->next_sibling
)
3682 /* All accesses are read ones, otherwise grp_maybe_modified would
3683 be trivially set. */
3684 walk_aliased_vdefs (&ar
, gimple_vuse (access
->stmt
),
3685 mark_maybe_modified
, repr
, &visited
);
3686 if (repr
->grp_maybe_modified
)
3689 BITMAP_FREE (visited
);
3694 /* Propagate distances in bb_dereferences in the opposite direction than the
3695 control flow edges, in each step storing the maximum of the current value
3696 and the minimum of all successors. These steps are repeated until the table
3697 stabilizes. Note that BBs which might terminate the functions (according to
3698 final_bbs bitmap) never updated in this way. */
3701 propagate_dereference_distances (void)
3703 vec
<basic_block
> queue
;
3706 queue
.create (last_basic_block_for_function (cfun
));
3707 queue
.quick_push (ENTRY_BLOCK_PTR
);
3710 queue
.quick_push (bb
);
3714 while (!queue
.is_empty ())
3718 bool change
= false;
3724 if (bitmap_bit_p (final_bbs
, bb
->index
))
3727 for (i
= 0; i
< func_param_count
; i
++)
3729 int idx
= bb
->index
* func_param_count
+ i
;
3731 HOST_WIDE_INT inh
= 0;
3733 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3735 int succ_idx
= e
->dest
->index
* func_param_count
+ i
;
3737 if (e
->src
== EXIT_BLOCK_PTR
)
3743 inh
= bb_dereferences
[succ_idx
];
3745 else if (bb_dereferences
[succ_idx
] < inh
)
3746 inh
= bb_dereferences
[succ_idx
];
3749 if (!first
&& bb_dereferences
[idx
] < inh
)
3751 bb_dereferences
[idx
] = inh
;
3756 if (change
&& !bitmap_bit_p (final_bbs
, bb
->index
))
3757 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3762 e
->src
->aux
= e
->src
;
3763 queue
.quick_push (e
->src
);
3770 /* Dump a dereferences TABLE with heading STR to file F. */
3773 dump_dereferences_table (FILE *f
, const char *str
, HOST_WIDE_INT
*table
)
3777 fprintf (dump_file
, str
);
3778 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
3780 fprintf (f
, "%4i %i ", bb
->index
, bitmap_bit_p (final_bbs
, bb
->index
));
3781 if (bb
!= EXIT_BLOCK_PTR
)
3784 for (i
= 0; i
< func_param_count
; i
++)
3786 int idx
= bb
->index
* func_param_count
+ i
;
3787 fprintf (f
, " %4" HOST_WIDE_INT_PRINT
"d", table
[idx
]);
3792 fprintf (dump_file
, "\n");
3795 /* Determine what (parts of) parameters passed by reference that are not
3796 assigned to are not certainly dereferenced in this function and thus the
3797 dereferencing cannot be safely moved to the caller without potentially
3798 introducing a segfault. Mark such REPRESENTATIVES as
3799 grp_not_necessarilly_dereferenced.
3801 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3802 part is calculated rather than simple booleans are calculated for each
3803 pointer parameter to handle cases when only a fraction of the whole
3804 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3807 The maximum dereference distances for each pointer parameter and BB are
3808 already stored in bb_dereference. This routine simply propagates these
3809 values upwards by propagate_dereference_distances and then compares the
3810 distances of individual parameters in the ENTRY BB to the equivalent
3811 distances of each representative of a (fraction of a) parameter. */
3814 analyze_caller_dereference_legality (vec
<access_p
> representatives
)
3818 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3819 dump_dereferences_table (dump_file
,
3820 "Dereference table before propagation:\n",
3823 propagate_dereference_distances ();
3825 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3826 dump_dereferences_table (dump_file
,
3827 "Dereference table after propagation:\n",
3830 for (i
= 0; i
< func_param_count
; i
++)
3832 struct access
*repr
= representatives
[i
];
3833 int idx
= ENTRY_BLOCK_PTR
->index
* func_param_count
+ i
;
3835 if (!repr
|| no_accesses_p (repr
))
3840 if ((repr
->offset
+ repr
->size
) > bb_dereferences
[idx
])
3841 repr
->grp_not_necessarilly_dereferenced
= 1;
3842 repr
= repr
->next_grp
;
3848 /* Return the representative access for the parameter declaration PARM if it is
3849 a scalar passed by reference which is not written to and the pointer value
3850 is not used directly. Thus, if it is legal to dereference it in the caller
3851 and we can rule out modifications through aliases, such parameter should be
3852 turned into one passed by value. Return NULL otherwise. */
3854 static struct access
*
3855 unmodified_by_ref_scalar_representative (tree parm
)
3857 int i
, access_count
;
3858 struct access
*repr
;
3859 vec
<access_p
> *access_vec
;
3861 access_vec
= get_base_access_vector (parm
);
3862 gcc_assert (access_vec
);
3863 repr
= (*access_vec
)[0];
3866 repr
->group_representative
= repr
;
3868 access_count
= access_vec
->length ();
3869 for (i
= 1; i
< access_count
; i
++)
3871 struct access
*access
= (*access_vec
)[i
];
3874 access
->group_representative
= repr
;
3875 access
->next_sibling
= repr
->next_sibling
;
3876 repr
->next_sibling
= access
;
3880 repr
->grp_scalar_ptr
= 1;
3884 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
3885 associated with. REQ_ALIGN is the minimum required alignment. */
3888 access_precludes_ipa_sra_p (struct access
*access
, unsigned int req_align
)
3890 unsigned int exp_align
;
3891 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3892 is incompatible assign in a call statement (and possibly even in asm
3893 statements). This can be relaxed by using a new temporary but only for
3894 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3895 intraprocedural SRA we deal with this by keeping the old aggregate around,
3896 something we cannot do in IPA-SRA.) */
3898 && (is_gimple_call (access
->stmt
)
3899 || gimple_code (access
->stmt
) == GIMPLE_ASM
))
3902 exp_align
= get_object_alignment (access
->expr
);
3903 if (exp_align
< req_align
)
3910 /* Sort collected accesses for parameter PARM, identify representatives for
3911 each accessed region and link them together. Return NULL if there are
3912 different but overlapping accesses, return the special ptr value meaning
3913 there are no accesses for this parameter if that is the case and return the
3914 first representative otherwise. Set *RO_GRP if there is a group of accesses
3915 with only read (i.e. no write) accesses. */
3917 static struct access
*
3918 splice_param_accesses (tree parm
, bool *ro_grp
)
3920 int i
, j
, access_count
, group_count
;
3921 int agg_size
, total_size
= 0;
3922 struct access
*access
, *res
, **prev_acc_ptr
= &res
;
3923 vec
<access_p
> *access_vec
;
3925 access_vec
= get_base_access_vector (parm
);
3927 return &no_accesses_representant
;
3928 access_count
= access_vec
->length ();
3930 access_vec
->qsort (compare_access_positions
);
3935 while (i
< access_count
)
3939 access
= (*access_vec
)[i
];
3940 modification
= access
->write
;
3941 if (access_precludes_ipa_sra_p (access
, TYPE_ALIGN (access
->type
)))
3943 a1_alias_type
= reference_alias_ptr_type (access
->expr
);
3945 /* Access is about to become group representative unless we find some
3946 nasty overlap which would preclude us from breaking this parameter
3950 while (j
< access_count
)
3952 struct access
*ac2
= (*access_vec
)[j
];
3953 if (ac2
->offset
!= access
->offset
)
3955 /* All or nothing law for parameters. */
3956 if (access
->offset
+ access
->size
> ac2
->offset
)
3961 else if (ac2
->size
!= access
->size
)
3964 if (access_precludes_ipa_sra_p (ac2
, TYPE_ALIGN (access
->type
))
3965 || (ac2
->type
!= access
->type
3966 && (TREE_ADDRESSABLE (ac2
->type
)
3967 || TREE_ADDRESSABLE (access
->type
)))
3968 || (reference_alias_ptr_type (ac2
->expr
) != a1_alias_type
))
3971 modification
|= ac2
->write
;
3972 ac2
->group_representative
= access
;
3973 ac2
->next_sibling
= access
->next_sibling
;
3974 access
->next_sibling
= ac2
;
3979 access
->grp_maybe_modified
= modification
;
3982 *prev_acc_ptr
= access
;
3983 prev_acc_ptr
= &access
->next_grp
;
3984 total_size
+= access
->size
;
3988 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3989 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
3991 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
3992 if (total_size
>= agg_size
)
3995 gcc_assert (group_count
> 0);
3999 /* Decide whether parameters with representative accesses given by REPR should
4000 be reduced into components. */
4003 decide_one_param_reduction (struct access
*repr
)
4005 int total_size
, cur_parm_size
, agg_size
, new_param_count
, parm_size_limit
;
4010 cur_parm_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
4011 gcc_assert (cur_parm_size
> 0);
4013 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4016 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
4021 agg_size
= cur_parm_size
;
4027 fprintf (dump_file
, "Evaluating PARAM group sizes for ");
4028 print_generic_expr (dump_file
, parm
, 0);
4029 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (parm
));
4030 for (acc
= repr
; acc
; acc
= acc
->next_grp
)
4031 dump_access (dump_file
, acc
, true);
4035 new_param_count
= 0;
4037 for (; repr
; repr
= repr
->next_grp
)
4039 gcc_assert (parm
== repr
->base
);
4041 /* Taking the address of a non-addressable field is verboten. */
4042 if (by_ref
&& repr
->non_addressable
)
4045 /* Do not decompose a non-BLKmode param in a way that would
4046 create BLKmode params. Especially for by-reference passing
4047 (thus, pointer-type param) this is hardly worthwhile. */
4048 if (DECL_MODE (parm
) != BLKmode
4049 && TYPE_MODE (repr
->type
) == BLKmode
)
4052 if (!by_ref
|| (!repr
->grp_maybe_modified
4053 && !repr
->grp_not_necessarilly_dereferenced
))
4054 total_size
+= repr
->size
;
4056 total_size
+= cur_parm_size
;
4061 gcc_assert (new_param_count
> 0);
4063 if (optimize_function_for_size_p (cfun
))
4064 parm_size_limit
= cur_parm_size
;
4066 parm_size_limit
= (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR
)
4069 if (total_size
< agg_size
4070 && total_size
<= parm_size_limit
)
4073 fprintf (dump_file
, " ....will be split into %i components\n",
4075 return new_param_count
;
4081 /* The order of the following enums is important, we need to do extra work for
4082 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4083 enum ipa_splicing_result
{ NO_GOOD_ACCESS
, UNUSED_PARAMS
, BY_VAL_ACCESSES
,
4084 MODIF_BY_REF_ACCESSES
, UNMODIF_BY_REF_ACCESSES
};
4086 /* Identify representatives of all accesses to all candidate parameters for
4087 IPA-SRA. Return result based on what representatives have been found. */
4089 static enum ipa_splicing_result
4090 splice_all_param_accesses (vec
<access_p
> &representatives
)
4092 enum ipa_splicing_result result
= NO_GOOD_ACCESS
;
4094 struct access
*repr
;
4096 representatives
.create (func_param_count
);
4098 for (parm
= DECL_ARGUMENTS (current_function_decl
);
4100 parm
= DECL_CHAIN (parm
))
4102 if (is_unused_scalar_param (parm
))
4104 representatives
.quick_push (&no_accesses_representant
);
4105 if (result
== NO_GOOD_ACCESS
)
4106 result
= UNUSED_PARAMS
;
4108 else if (POINTER_TYPE_P (TREE_TYPE (parm
))
4109 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm
)))
4110 && bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4112 repr
= unmodified_by_ref_scalar_representative (parm
);
4113 representatives
.quick_push (repr
);
4115 result
= UNMODIF_BY_REF_ACCESSES
;
4117 else if (bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4119 bool ro_grp
= false;
4120 repr
= splice_param_accesses (parm
, &ro_grp
);
4121 representatives
.quick_push (repr
);
4123 if (repr
&& !no_accesses_p (repr
))
4125 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4128 result
= UNMODIF_BY_REF_ACCESSES
;
4129 else if (result
< MODIF_BY_REF_ACCESSES
)
4130 result
= MODIF_BY_REF_ACCESSES
;
4132 else if (result
< BY_VAL_ACCESSES
)
4133 result
= BY_VAL_ACCESSES
;
4135 else if (no_accesses_p (repr
) && (result
== NO_GOOD_ACCESS
))
4136 result
= UNUSED_PARAMS
;
4139 representatives
.quick_push (NULL
);
4142 if (result
== NO_GOOD_ACCESS
)
4144 representatives
.release ();
4145 return NO_GOOD_ACCESS
;
4151 /* Return the index of BASE in PARMS. Abort if it is not found. */
4154 get_param_index (tree base
, vec
<tree
> parms
)
4158 len
= parms
.length ();
4159 for (i
= 0; i
< len
; i
++)
4160 if (parms
[i
] == base
)
4165 /* Convert the decisions made at the representative level into compact
4166 parameter adjustments. REPRESENTATIVES are pointers to first
4167 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4168 final number of adjustments. */
4170 static ipa_parm_adjustment_vec
4171 turn_representatives_into_adjustments (vec
<access_p
> representatives
,
4172 int adjustments_count
)
4175 ipa_parm_adjustment_vec adjustments
;
4179 gcc_assert (adjustments_count
> 0);
4180 parms
= ipa_get_vector_of_formal_parms (current_function_decl
);
4181 adjustments
.create (adjustments_count
);
4182 parm
= DECL_ARGUMENTS (current_function_decl
);
4183 for (i
= 0; i
< func_param_count
; i
++, parm
= DECL_CHAIN (parm
))
4185 struct access
*repr
= representatives
[i
];
4187 if (!repr
|| no_accesses_p (repr
))
4189 struct ipa_parm_adjustment adj
;
4191 memset (&adj
, 0, sizeof (adj
));
4192 adj
.base_index
= get_param_index (parm
, parms
);
4197 adj
.remove_param
= 1;
4198 adjustments
.quick_push (adj
);
4202 struct ipa_parm_adjustment adj
;
4203 int index
= get_param_index (parm
, parms
);
4205 for (; repr
; repr
= repr
->next_grp
)
4207 memset (&adj
, 0, sizeof (adj
));
4208 gcc_assert (repr
->base
== parm
);
4209 adj
.base_index
= index
;
4210 adj
.base
= repr
->base
;
4211 adj
.type
= repr
->type
;
4212 adj
.alias_ptr_type
= reference_alias_ptr_type (repr
->expr
);
4213 adj
.offset
= repr
->offset
;
4214 adj
.by_ref
= (POINTER_TYPE_P (TREE_TYPE (repr
->base
))
4215 && (repr
->grp_maybe_modified
4216 || repr
->grp_not_necessarilly_dereferenced
));
4217 adjustments
.quick_push (adj
);
4225 /* Analyze the collected accesses and produce a plan what to do with the
4226 parameters in the form of adjustments, NULL meaning nothing. */
4228 static ipa_parm_adjustment_vec
4229 analyze_all_param_acesses (void)
4231 enum ipa_splicing_result repr_state
;
4232 bool proceed
= false;
4233 int i
, adjustments_count
= 0;
4234 vec
<access_p
> representatives
;
4235 ipa_parm_adjustment_vec adjustments
;
4237 repr_state
= splice_all_param_accesses (representatives
);
4238 if (repr_state
== NO_GOOD_ACCESS
)
4239 return ipa_parm_adjustment_vec();
4241 /* If there are any parameters passed by reference which are not modified
4242 directly, we need to check whether they can be modified indirectly. */
4243 if (repr_state
== UNMODIF_BY_REF_ACCESSES
)
4245 analyze_caller_dereference_legality (representatives
);
4246 analyze_modified_params (representatives
);
4249 for (i
= 0; i
< func_param_count
; i
++)
4251 struct access
*repr
= representatives
[i
];
4253 if (repr
&& !no_accesses_p (repr
))
4255 if (repr
->grp_scalar_ptr
)
4257 adjustments_count
++;
4258 if (repr
->grp_not_necessarilly_dereferenced
4259 || repr
->grp_maybe_modified
)
4260 representatives
[i
] = NULL
;
4264 sra_stats
.scalar_by_ref_to_by_val
++;
4269 int new_components
= decide_one_param_reduction (repr
);
4271 if (new_components
== 0)
4273 representatives
[i
] = NULL
;
4274 adjustments_count
++;
4278 adjustments_count
+= new_components
;
4279 sra_stats
.aggregate_params_reduced
++;
4280 sra_stats
.param_reductions_created
+= new_components
;
4287 if (no_accesses_p (repr
))
4290 sra_stats
.deleted_unused_parameters
++;
4292 adjustments_count
++;
4296 if (!proceed
&& dump_file
)
4297 fprintf (dump_file
, "NOT proceeding to change params.\n");
4300 adjustments
= turn_representatives_into_adjustments (representatives
,
4303 adjustments
= ipa_parm_adjustment_vec();
4305 representatives
.release ();
4309 /* If a parameter replacement identified by ADJ does not yet exist in the form
4310 of declaration, create it and record it, otherwise return the previously
4314 get_replaced_param_substitute (struct ipa_parm_adjustment
*adj
)
4317 if (!adj
->new_ssa_base
)
4319 char *pretty_name
= make_fancy_name (adj
->base
);
4321 repl
= create_tmp_reg (TREE_TYPE (adj
->base
), "ISR");
4322 DECL_NAME (repl
) = get_identifier (pretty_name
);
4323 obstack_free (&name_obstack
, pretty_name
);
4325 adj
->new_ssa_base
= repl
;
4328 repl
= adj
->new_ssa_base
;
4332 /* Find the first adjustment for a particular parameter BASE in a vector of
4333 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4336 static struct ipa_parm_adjustment
*
4337 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments
, tree base
)
4341 len
= adjustments
.length ();
4342 for (i
= 0; i
< len
; i
++)
4344 struct ipa_parm_adjustment
*adj
;
4346 adj
= &adjustments
[i
];
4347 if (!adj
->copy_param
&& adj
->base
== base
)
4354 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4355 removed because its value is not used, replace the SSA_NAME with a one
4356 relating to a created VAR_DECL together all of its uses and return true.
4357 ADJUSTMENTS is a pointer to an adjustments vector. */
4360 replace_removed_params_ssa_names (gimple stmt
,
4361 ipa_parm_adjustment_vec adjustments
)
4363 struct ipa_parm_adjustment
*adj
;
4364 tree lhs
, decl
, repl
, name
;
4366 if (gimple_code (stmt
) == GIMPLE_PHI
)
4367 lhs
= gimple_phi_result (stmt
);
4368 else if (is_gimple_assign (stmt
))
4369 lhs
= gimple_assign_lhs (stmt
);
4370 else if (is_gimple_call (stmt
))
4371 lhs
= gimple_call_lhs (stmt
);
4375 if (TREE_CODE (lhs
) != SSA_NAME
)
4378 decl
= SSA_NAME_VAR (lhs
);
4379 if (decl
== NULL_TREE
4380 || TREE_CODE (decl
) != PARM_DECL
)
4383 adj
= get_adjustment_for_base (adjustments
, decl
);
4387 repl
= get_replaced_param_substitute (adj
);
4388 name
= make_ssa_name (repl
, stmt
);
4392 fprintf (dump_file
, "replacing an SSA name of a removed param ");
4393 print_generic_expr (dump_file
, lhs
, 0);
4394 fprintf (dump_file
, " with ");
4395 print_generic_expr (dump_file
, name
, 0);
4396 fprintf (dump_file
, "\n");
4399 if (is_gimple_assign (stmt
))
4400 gimple_assign_set_lhs (stmt
, name
);
4401 else if (is_gimple_call (stmt
))
4402 gimple_call_set_lhs (stmt
, name
);
4404 gimple_phi_set_result (stmt
, name
);
4406 replace_uses_by (lhs
, name
);
4407 release_ssa_name (lhs
);
4411 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4412 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4413 specifies whether the function should care about type incompatibility the
4414 current and new expressions. If it is false, the function will leave
4415 incompatibility issues to the caller. Return true iff the expression
4419 sra_ipa_modify_expr (tree
*expr
, bool convert
,
4420 ipa_parm_adjustment_vec adjustments
)
4423 struct ipa_parm_adjustment
*adj
, *cand
= NULL
;
4424 HOST_WIDE_INT offset
, size
, max_size
;
4427 len
= adjustments
.length ();
4429 if (TREE_CODE (*expr
) == BIT_FIELD_REF
4430 || TREE_CODE (*expr
) == IMAGPART_EXPR
4431 || TREE_CODE (*expr
) == REALPART_EXPR
)
4433 expr
= &TREE_OPERAND (*expr
, 0);
4437 base
= get_ref_base_and_extent (*expr
, &offset
, &size
, &max_size
);
4438 if (!base
|| size
== -1 || max_size
== -1)
4441 if (TREE_CODE (base
) == MEM_REF
)
4443 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
4444 base
= TREE_OPERAND (base
, 0);
4447 base
= get_ssa_base_param (base
);
4448 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4451 for (i
= 0; i
< len
; i
++)
4453 adj
= &adjustments
[i
];
4455 if (adj
->base
== base
&&
4456 (adj
->offset
== offset
|| adj
->remove_param
))
4462 if (!cand
|| cand
->copy_param
|| cand
->remove_param
)
4466 src
= build_simple_mem_ref (cand
->reduction
);
4468 src
= cand
->reduction
;
4470 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4472 fprintf (dump_file
, "About to replace expr ");
4473 print_generic_expr (dump_file
, *expr
, 0);
4474 fprintf (dump_file
, " with ");
4475 print_generic_expr (dump_file
, src
, 0);
4476 fprintf (dump_file
, "\n");
4479 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4481 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4489 /* If the statement pointed to by STMT_PTR contains any expressions that need
4490 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4491 potential type incompatibilities (GSI is used to accommodate conversion
4492 statements and must point to the statement). Return true iff the statement
4496 sra_ipa_modify_assign (gimple
*stmt_ptr
, gimple_stmt_iterator
*gsi
,
4497 ipa_parm_adjustment_vec adjustments
)
4499 gimple stmt
= *stmt_ptr
;
4500 tree
*lhs_p
, *rhs_p
;
4503 if (!gimple_assign_single_p (stmt
))
4506 rhs_p
= gimple_assign_rhs1_ptr (stmt
);
4507 lhs_p
= gimple_assign_lhs_ptr (stmt
);
4509 any
= sra_ipa_modify_expr (rhs_p
, false, adjustments
);
4510 any
|= sra_ipa_modify_expr (lhs_p
, false, adjustments
);
4513 tree new_rhs
= NULL_TREE
;
4515 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p
), TREE_TYPE (*rhs_p
)))
4517 if (TREE_CODE (*rhs_p
) == CONSTRUCTOR
)
4519 /* V_C_Es of constructors can cause trouble (PR 42714). */
4520 if (is_gimple_reg_type (TREE_TYPE (*lhs_p
)))
4521 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
4523 *rhs_p
= build_constructor (TREE_TYPE (*lhs_p
),
4527 new_rhs
= fold_build1_loc (gimple_location (stmt
),
4528 VIEW_CONVERT_EXPR
, TREE_TYPE (*lhs_p
),
4531 else if (REFERENCE_CLASS_P (*rhs_p
)
4532 && is_gimple_reg_type (TREE_TYPE (*lhs_p
))
4533 && !is_gimple_reg (*lhs_p
))
4534 /* This can happen when an assignment in between two single field
4535 structures is turned into an assignment in between two pointers to
4536 scalars (PR 42237). */
4541 tree tmp
= force_gimple_operand_gsi (gsi
, new_rhs
, true, NULL_TREE
,
4542 true, GSI_SAME_STMT
);
4544 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
4553 /* Traverse the function body and all modifications as described in
4554 ADJUSTMENTS. Return true iff the CFG has been changed. */
4557 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments
)
4559 bool cfg_changed
= false;
4564 gimple_stmt_iterator gsi
;
4566 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4567 replace_removed_params_ssa_names (gsi_stmt (gsi
), adjustments
);
4569 gsi
= gsi_start_bb (bb
);
4570 while (!gsi_end_p (gsi
))
4572 gimple stmt
= gsi_stmt (gsi
);
4573 bool modified
= false;
4577 switch (gimple_code (stmt
))
4580 t
= gimple_return_retval_ptr (stmt
);
4581 if (*t
!= NULL_TREE
)
4582 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4586 modified
|= sra_ipa_modify_assign (&stmt
, &gsi
, adjustments
);
4587 modified
|= replace_removed_params_ssa_names (stmt
, adjustments
);
4591 /* Operands must be processed before the lhs. */
4592 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4594 t
= gimple_call_arg_ptr (stmt
, i
);
4595 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4598 if (gimple_call_lhs (stmt
))
4600 t
= gimple_call_lhs_ptr (stmt
);
4601 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4602 modified
|= replace_removed_params_ssa_names (stmt
,
4608 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
4610 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
4611 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4613 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
4615 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
4616 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4627 if (maybe_clean_eh_stmt (stmt
)
4628 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4638 /* Call gimple_debug_bind_reset_value on all debug statements describing
4639 gimple register parameters that are being removed or replaced. */
4642 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments
)
4645 gimple_stmt_iterator
*gsip
= NULL
, gsi
;
4647 if (MAY_HAVE_DEBUG_STMTS
&& single_succ_p (ENTRY_BLOCK_PTR
))
4649 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR
));
4652 len
= adjustments
.length ();
4653 for (i
= 0; i
< len
; i
++)
4655 struct ipa_parm_adjustment
*adj
;
4656 imm_use_iterator ui
;
4657 gimple stmt
, def_temp
;
4658 tree name
, vexpr
, copy
= NULL_TREE
;
4659 use_operand_p use_p
;
4661 adj
= &adjustments
[i
];
4662 if (adj
->copy_param
|| !is_gimple_reg (adj
->base
))
4664 name
= ssa_default_def (cfun
, adj
->base
);
4667 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
4669 /* All other users must have been removed by
4670 ipa_sra_modify_function_body. */
4671 gcc_assert (is_gimple_debug (stmt
));
4672 if (vexpr
== NULL
&& gsip
!= NULL
)
4674 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4675 vexpr
= make_node (DEBUG_EXPR_DECL
);
4676 def_temp
= gimple_build_debug_source_bind (vexpr
, adj
->base
,
4678 DECL_ARTIFICIAL (vexpr
) = 1;
4679 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
4680 DECL_MODE (vexpr
) = DECL_MODE (adj
->base
);
4681 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4685 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
4686 SET_USE (use_p
, vexpr
);
4689 gimple_debug_bind_reset_value (stmt
);
4692 /* Create a VAR_DECL for debug info purposes. */
4693 if (!DECL_IGNORED_P (adj
->base
))
4695 copy
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
4696 VAR_DECL
, DECL_NAME (adj
->base
),
4697 TREE_TYPE (adj
->base
));
4698 if (DECL_PT_UID_SET_P (adj
->base
))
4699 SET_DECL_PT_UID (copy
, DECL_PT_UID (adj
->base
));
4700 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (adj
->base
);
4701 TREE_READONLY (copy
) = TREE_READONLY (adj
->base
);
4702 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (adj
->base
);
4703 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (adj
->base
);
4704 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (adj
->base
);
4705 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (adj
->base
);
4706 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (adj
->base
);
4707 DECL_SEEN_IN_BIND_EXPR_P (copy
) = 1;
4708 SET_DECL_RTL (copy
, 0);
4709 TREE_USED (copy
) = 1;
4710 DECL_CONTEXT (copy
) = current_function_decl
;
4711 add_local_decl (cfun
, copy
);
4713 BLOCK_VARS (DECL_INITIAL (current_function_decl
));
4714 BLOCK_VARS (DECL_INITIAL (current_function_decl
)) = copy
;
4716 if (gsip
!= NULL
&& copy
&& target_for_debug_bind (adj
->base
))
4718 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4720 def_temp
= gimple_build_debug_bind (copy
, vexpr
, NULL
);
4722 def_temp
= gimple_build_debug_source_bind (copy
, adj
->base
,
4724 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4729 /* Return false iff all callers have at least as many actual arguments as there
4730 are formal parameters in the current function. */
4733 not_all_callers_have_enough_arguments_p (struct cgraph_node
*node
,
4734 void *data ATTRIBUTE_UNUSED
)
4736 struct cgraph_edge
*cs
;
4737 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4738 if (!callsite_has_enough_arguments_p (cs
->call_stmt
))
4744 /* Convert all callers of NODE. */
4747 convert_callers_for_node (struct cgraph_node
*node
,
4750 ipa_parm_adjustment_vec
*adjustments
= (ipa_parm_adjustment_vec
*) data
;
4751 bitmap recomputed_callers
= BITMAP_ALLOC (NULL
);
4752 struct cgraph_edge
*cs
;
4754 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4756 push_cfun (DECL_STRUCT_FUNCTION (cs
->caller
->symbol
.decl
));
4759 fprintf (dump_file
, "Adjusting call (%i -> %i) %s -> %s\n",
4760 cs
->caller
->uid
, cs
->callee
->uid
,
4761 xstrdup (cgraph_node_name (cs
->caller
)),
4762 xstrdup (cgraph_node_name (cs
->callee
)));
4764 ipa_modify_call_arguments (cs
, cs
->call_stmt
, *adjustments
);
4769 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4770 if (bitmap_set_bit (recomputed_callers
, cs
->caller
->uid
)
4771 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs
->caller
->symbol
.decl
)))
4772 compute_inline_parameters (cs
->caller
, true);
4773 BITMAP_FREE (recomputed_callers
);
4778 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4781 convert_callers (struct cgraph_node
*node
, tree old_decl
,
4782 ipa_parm_adjustment_vec adjustments
)
4784 basic_block this_block
;
4786 cgraph_for_node_and_aliases (node
, convert_callers_for_node
,
4787 &adjustments
, false);
4789 if (!encountered_recursive_call
)
4792 FOR_EACH_BB (this_block
)
4794 gimple_stmt_iterator gsi
;
4796 for (gsi
= gsi_start_bb (this_block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4798 gimple stmt
= gsi_stmt (gsi
);
4800 if (gimple_code (stmt
) != GIMPLE_CALL
)
4802 call_fndecl
= gimple_call_fndecl (stmt
);
4803 if (call_fndecl
== old_decl
)
4806 fprintf (dump_file
, "Adjusting recursive call");
4807 gimple_call_set_fndecl (stmt
, node
->symbol
.decl
);
4808 ipa_modify_call_arguments (NULL
, stmt
, adjustments
);
4816 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4817 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4820 modify_function (struct cgraph_node
*node
, ipa_parm_adjustment_vec adjustments
)
4822 struct cgraph_node
*new_node
;
4824 vec
<cgraph_edge_p
> redirect_callers
= collect_callers_of_node (node
);
4826 rebuild_cgraph_edges ();
4827 free_dominance_info (CDI_DOMINATORS
);
4830 new_node
= cgraph_function_versioning (node
, redirect_callers
,
4832 NULL
, false, NULL
, NULL
, "isra");
4833 redirect_callers
.release ();
4835 push_cfun (DECL_STRUCT_FUNCTION (new_node
->symbol
.decl
));
4836 ipa_modify_formal_parameters (current_function_decl
, adjustments
, "ISRA");
4837 cfg_changed
= ipa_sra_modify_function_body (adjustments
);
4838 sra_ipa_reset_debug_stmts (adjustments
);
4839 convert_callers (new_node
, node
->symbol
.decl
, adjustments
);
4840 cgraph_make_node_local (new_node
);
4844 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4845 attributes, return true otherwise. NODE is the cgraph node of the current
4849 ipa_sra_preliminary_function_checks (struct cgraph_node
*node
)
4851 if (!cgraph_node_can_be_local_p (node
))
4854 fprintf (dump_file
, "Function not local to this compilation unit.\n");
4858 if (!node
->local
.can_change_signature
)
4861 fprintf (dump_file
, "Function can not change signature.\n");
4865 if (!tree_versionable_function_p (node
->symbol
.decl
))
4868 fprintf (dump_file
, "Function is not versionable.\n");
4872 if (DECL_VIRTUAL_P (current_function_decl
))
4875 fprintf (dump_file
, "Function is a virtual method.\n");
4879 if ((DECL_COMDAT (node
->symbol
.decl
) || DECL_EXTERNAL (node
->symbol
.decl
))
4880 && inline_summary(node
)->size
>= MAX_INLINE_INSNS_AUTO
)
4883 fprintf (dump_file
, "Function too big to be made truly local.\n");
4891 "Function has no callers in this compilation unit.\n");
4898 fprintf (dump_file
, "Function uses stdarg. \n");
4902 if (TYPE_ATTRIBUTES (TREE_TYPE (node
->symbol
.decl
)))
4908 /* Perform early interprocedural SRA. */
4911 ipa_early_sra (void)
4913 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
4914 ipa_parm_adjustment_vec adjustments
;
4917 if (!ipa_sra_preliminary_function_checks (node
))
4921 sra_mode
= SRA_MODE_EARLY_IPA
;
4923 if (!find_param_candidates ())
4926 fprintf (dump_file
, "Function has no IPA-SRA candidates.\n");
4930 if (cgraph_for_node_and_aliases (node
, not_all_callers_have_enough_arguments_p
,
4934 fprintf (dump_file
, "There are callers with insufficient number of "
4939 bb_dereferences
= XCNEWVEC (HOST_WIDE_INT
,
4941 * last_basic_block_for_function (cfun
));
4942 final_bbs
= BITMAP_ALLOC (NULL
);
4945 if (encountered_apply_args
)
4948 fprintf (dump_file
, "Function calls __builtin_apply_args().\n");
4952 if (encountered_unchangable_recursive_call
)
4955 fprintf (dump_file
, "Function calls itself with insufficient "
4956 "number of arguments.\n");
4960 adjustments
= analyze_all_param_acesses ();
4961 if (!adjustments
.exists ())
4964 ipa_dump_param_adjustments (dump_file
, adjustments
, current_function_decl
);
4966 if (modify_function (node
, adjustments
))
4967 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
4969 ret
= TODO_update_ssa
;
4970 adjustments
.release ();
4972 statistics_counter_event (cfun
, "Unused parameters deleted",
4973 sra_stats
.deleted_unused_parameters
);
4974 statistics_counter_event (cfun
, "Scalar parameters converted to by-value",
4975 sra_stats
.scalar_by_ref_to_by_val
);
4976 statistics_counter_event (cfun
, "Aggregate parameters broken up",
4977 sra_stats
.aggregate_params_reduced
);
4978 statistics_counter_event (cfun
, "Aggregate parameter components created",
4979 sra_stats
.param_reductions_created
);
4982 BITMAP_FREE (final_bbs
);
4983 free (bb_dereferences
);
4985 sra_deinitialize ();
4989 /* Return if early ipa sra shall be performed. */
4991 ipa_early_sra_gate (void)
4993 return flag_ipa_sra
&& dbg_cnt (eipa_sra
);
4996 struct gimple_opt_pass pass_early_ipa_sra
=
5000 "eipa_sra", /* name */
5001 OPTGROUP_NONE
, /* optinfo_flags */
5002 ipa_early_sra_gate
, /* gate */
5003 ipa_early_sra
, /* execute */
5006 0, /* static_pass_number */
5007 TV_IPA_SRA
, /* tv_id */
5008 0, /* properties_required */
5009 0, /* properties_provided */
5010 0, /* properties_destroyed */
5011 0, /* todo_flags_start */
5012 TODO_dump_symtab
/* todo_flags_finish */