re PR tree-optimization/50569 (unaligned memory accesses generated for memcpy)
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "ipa-prop.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
87 #include "timevar.h"
88 #include "params.h"
89 #include "target.h"
90 #include "flags.h"
91 #include "dbgcnt.h"
92 #include "tree-inline.h"
93 #include "gimple-pretty-print.h"
94 #include "ipa-inline.h"
95
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
98 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 SRA_MODE_INTRA }; /* late intraprocedural SRA */
100
101 /* Global variable describing which aggregate reduction we are performing at
102 the moment. */
103 static enum sra_mode sra_mode;
104
105 struct assign_link;
106
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
112
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
118
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
122
123 struct access
124 {
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset;
129 HOST_WIDE_INT size;
130 tree base;
131
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
134 testcase. */
135 tree expr;
136 /* Type. */
137 tree type;
138
139 /* The statement this access belongs to. */
140 gimple stmt;
141
142 /* Next group representative for this aggregate. */
143 struct access *next_grp;
144
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access *group_representative;
148
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access *first_child;
152
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access *next_sibling;
157
158 /* Pointers to the first and last element in the linked list of assign
159 links. */
160 struct assign_link *first_link, *last_link;
161
162 /* Pointer to the next access in the work queue. */
163 struct access *next_queued;
164
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl;
169
170 /* Is this particular access write access? */
171 unsigned write : 1;
172
173 /* Is this access an access to a non-addressable field? */
174 unsigned non_addressable : 1;
175
176 /* Is this access currently in the work queue? */
177 unsigned grp_queued : 1;
178
179 /* Does this group contain a write access? This flag is propagated down the
180 access tree. */
181 unsigned grp_write : 1;
182
183 /* Does this group contain a read access? This flag is propagated down the
184 access tree. */
185 unsigned grp_read : 1;
186
187 /* Does this group contain a read access that comes from an assignment
188 statement? This flag is propagated down the access tree. */
189 unsigned grp_assignment_read : 1;
190
191 /* Does this group contain a write access that comes from an assignment
192 statement? This flag is propagated down the access tree. */
193 unsigned grp_assignment_write : 1;
194
195 /* Does this group contain a read access through a scalar type? This flag is
196 not propagated in the access tree in any direction. */
197 unsigned grp_scalar_read : 1;
198
199 /* Does this group contain a write access through a scalar type? This flag
200 is not propagated in the access tree in any direction. */
201 unsigned grp_scalar_write : 1;
202
203 /* Is this access an artificial one created to scalarize some record
204 entirely? */
205 unsigned grp_total_scalarization : 1;
206
207 /* Other passes of the analysis use this bit to make function
208 analyze_access_subtree create scalar replacements for this group if
209 possible. */
210 unsigned grp_hint : 1;
211
212 /* Is the subtree rooted in this access fully covered by scalar
213 replacements? */
214 unsigned grp_covered : 1;
215
216 /* If set to true, this access and all below it in an access tree must not be
217 scalarized. */
218 unsigned grp_unscalarizable_region : 1;
219
220 /* Whether data have been written to parts of the aggregate covered by this
221 access which is not to be scalarized. This flag is propagated up in the
222 access tree. */
223 unsigned grp_unscalarized_data : 1;
224
225 /* Does this access and/or group contain a write access through a
226 BIT_FIELD_REF? */
227 unsigned grp_partial_lhs : 1;
228
229 /* Set when a scalar replacement should be created for this variable. We do
230 the decision and creation at different places because create_tmp_var
231 cannot be called from within FOR_EACH_REFERENCED_VAR. */
232 unsigned grp_to_be_replaced : 1;
233
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning : 1;
236
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified : 1;
240
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr : 1;
245
246 /* Set when we discover that this pointer is not safe to dereference in the
247 caller. */
248 unsigned grp_not_necessarilly_dereferenced : 1;
249 };
250
251 typedef struct access *access_p;
252
253 DEF_VEC_P (access_p);
254 DEF_VEC_ALLOC_P (access_p, heap);
255
256 /* Alloc pool for allocating access structures. */
257 static alloc_pool access_pool;
258
259 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
260 are used to propagate subaccesses from rhs to lhs as long as they don't
261 conflict with what is already there. */
262 struct assign_link
263 {
264 struct access *lacc, *racc;
265 struct assign_link *next;
266 };
267
268 /* Alloc pool for allocating assign link structures. */
269 static alloc_pool link_pool;
270
271 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
272 static struct pointer_map_t *base_access_vec;
273
274 /* Bitmap of candidates. */
275 static bitmap candidate_bitmap;
276
277 /* Bitmap of candidates which we should try to entirely scalarize away and
278 those which cannot be (because they are and need be used as a whole). */
279 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
280
281 /* Obstack for creation of fancy names. */
282 static struct obstack name_obstack;
283
284 /* Head of a linked list of accesses that need to have its subaccesses
285 propagated to their assignment counterparts. */
286 static struct access *work_queue_head;
287
288 /* Number of parameters of the analyzed function when doing early ipa SRA. */
289 static int func_param_count;
290
291 /* scan_function sets the following to true if it encounters a call to
292 __builtin_apply_args. */
293 static bool encountered_apply_args;
294
295 /* Set by scan_function when it finds a recursive call. */
296 static bool encountered_recursive_call;
297
298 /* Set by scan_function when it finds a recursive call with less actual
299 arguments than formal parameters.. */
300 static bool encountered_unchangable_recursive_call;
301
302 /* This is a table in which for each basic block and parameter there is a
303 distance (offset + size) in that parameter which is dereferenced and
304 accessed in that BB. */
305 static HOST_WIDE_INT *bb_dereferences;
306 /* Bitmap of BBs that can cause the function to "stop" progressing by
307 returning, throwing externally, looping infinitely or calling a function
308 which might abort etc.. */
309 static bitmap final_bbs;
310
311 /* Representative of no accesses at all. */
312 static struct access no_accesses_representant;
313
314 /* Predicate to test the special value. */
315
316 static inline bool
317 no_accesses_p (struct access *access)
318 {
319 return access == &no_accesses_representant;
320 }
321
322 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
323 representative fields are dumped, otherwise those which only describe the
324 individual access are. */
325
326 static struct
327 {
328 /* Number of processed aggregates is readily available in
329 analyze_all_variable_accesses and so is not stored here. */
330
331 /* Number of created scalar replacements. */
332 int replacements;
333
334 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
335 expression. */
336 int exprs;
337
338 /* Number of statements created by generate_subtree_copies. */
339 int subtree_copies;
340
341 /* Number of statements created by load_assign_lhs_subreplacements. */
342 int subreplacements;
343
344 /* Number of times sra_modify_assign has deleted a statement. */
345 int deleted;
346
347 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
348 RHS reparately due to type conversions or nonexistent matching
349 references. */
350 int separate_lhs_rhs_handling;
351
352 /* Number of parameters that were removed because they were unused. */
353 int deleted_unused_parameters;
354
355 /* Number of scalars passed as parameters by reference that have been
356 converted to be passed by value. */
357 int scalar_by_ref_to_by_val;
358
359 /* Number of aggregate parameters that were replaced by one or more of their
360 components. */
361 int aggregate_params_reduced;
362
363 /* Numbber of components created when splitting aggregate parameters. */
364 int param_reductions_created;
365 } sra_stats;
366
367 static void
368 dump_access (FILE *f, struct access *access, bool grp)
369 {
370 fprintf (f, "access { ");
371 fprintf (f, "base = (%d)'", DECL_UID (access->base));
372 print_generic_expr (f, access->base, 0);
373 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
374 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
375 fprintf (f, ", expr = ");
376 print_generic_expr (f, access->expr, 0);
377 fprintf (f, ", type = ");
378 print_generic_expr (f, access->type, 0);
379 if (grp)
380 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
381 "grp_assignment_write = %d, grp_scalar_read = %d, "
382 "grp_scalar_write = %d, grp_total_scalarization = %d, "
383 "grp_hint = %d, grp_covered = %d, "
384 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
385 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
386 "grp_maybe_modified = %d, "
387 "grp_not_necessarilly_dereferenced = %d\n",
388 access->grp_read, access->grp_write, access->grp_assignment_read,
389 access->grp_assignment_write, access->grp_scalar_read,
390 access->grp_scalar_write, access->grp_total_scalarization,
391 access->grp_hint, access->grp_covered,
392 access->grp_unscalarizable_region, access->grp_unscalarized_data,
393 access->grp_partial_lhs, access->grp_to_be_replaced,
394 access->grp_maybe_modified,
395 access->grp_not_necessarilly_dereferenced);
396 else
397 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
398 "grp_partial_lhs = %d\n",
399 access->write, access->grp_total_scalarization,
400 access->grp_partial_lhs);
401 }
402
403 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
404
405 static void
406 dump_access_tree_1 (FILE *f, struct access *access, int level)
407 {
408 do
409 {
410 int i;
411
412 for (i = 0; i < level; i++)
413 fputs ("* ", dump_file);
414
415 dump_access (f, access, true);
416
417 if (access->first_child)
418 dump_access_tree_1 (f, access->first_child, level + 1);
419
420 access = access->next_sibling;
421 }
422 while (access);
423 }
424
425 /* Dump all access trees for a variable, given the pointer to the first root in
426 ACCESS. */
427
428 static void
429 dump_access_tree (FILE *f, struct access *access)
430 {
431 for (; access; access = access->next_grp)
432 dump_access_tree_1 (f, access, 0);
433 }
434
435 /* Return true iff ACC is non-NULL and has subaccesses. */
436
437 static inline bool
438 access_has_children_p (struct access *acc)
439 {
440 return acc && acc->first_child;
441 }
442
443 /* Return a vector of pointers to accesses for the variable given in BASE or
444 NULL if there is none. */
445
446 static VEC (access_p, heap) *
447 get_base_access_vector (tree base)
448 {
449 void **slot;
450
451 slot = pointer_map_contains (base_access_vec, base);
452 if (!slot)
453 return NULL;
454 else
455 return *(VEC (access_p, heap) **) slot;
456 }
457
458 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
459 in ACCESS. Return NULL if it cannot be found. */
460
461 static struct access *
462 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
463 HOST_WIDE_INT size)
464 {
465 while (access && (access->offset != offset || access->size != size))
466 {
467 struct access *child = access->first_child;
468
469 while (child && (child->offset + child->size <= offset))
470 child = child->next_sibling;
471 access = child;
472 }
473
474 return access;
475 }
476
477 /* Return the first group representative for DECL or NULL if none exists. */
478
479 static struct access *
480 get_first_repr_for_decl (tree base)
481 {
482 VEC (access_p, heap) *access_vec;
483
484 access_vec = get_base_access_vector (base);
485 if (!access_vec)
486 return NULL;
487
488 return VEC_index (access_p, access_vec, 0);
489 }
490
491 /* Find an access representative for the variable BASE and given OFFSET and
492 SIZE. Requires that access trees have already been built. Return NULL if
493 it cannot be found. */
494
495 static struct access *
496 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
497 HOST_WIDE_INT size)
498 {
499 struct access *access;
500
501 access = get_first_repr_for_decl (base);
502 while (access && (access->offset + access->size <= offset))
503 access = access->next_grp;
504 if (!access)
505 return NULL;
506
507 return find_access_in_subtree (access, offset, size);
508 }
509
510 /* Add LINK to the linked list of assign links of RACC. */
511 static void
512 add_link_to_rhs (struct access *racc, struct assign_link *link)
513 {
514 gcc_assert (link->racc == racc);
515
516 if (!racc->first_link)
517 {
518 gcc_assert (!racc->last_link);
519 racc->first_link = link;
520 }
521 else
522 racc->last_link->next = link;
523
524 racc->last_link = link;
525 link->next = NULL;
526 }
527
528 /* Move all link structures in their linked list in OLD_RACC to the linked list
529 in NEW_RACC. */
530 static void
531 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
532 {
533 if (!old_racc->first_link)
534 {
535 gcc_assert (!old_racc->last_link);
536 return;
537 }
538
539 if (new_racc->first_link)
540 {
541 gcc_assert (!new_racc->last_link->next);
542 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
543
544 new_racc->last_link->next = old_racc->first_link;
545 new_racc->last_link = old_racc->last_link;
546 }
547 else
548 {
549 gcc_assert (!new_racc->last_link);
550
551 new_racc->first_link = old_racc->first_link;
552 new_racc->last_link = old_racc->last_link;
553 }
554 old_racc->first_link = old_racc->last_link = NULL;
555 }
556
557 /* Add ACCESS to the work queue (which is actually a stack). */
558
559 static void
560 add_access_to_work_queue (struct access *access)
561 {
562 if (!access->grp_queued)
563 {
564 gcc_assert (!access->next_queued);
565 access->next_queued = work_queue_head;
566 access->grp_queued = 1;
567 work_queue_head = access;
568 }
569 }
570
571 /* Pop an access from the work queue, and return it, assuming there is one. */
572
573 static struct access *
574 pop_access_from_work_queue (void)
575 {
576 struct access *access = work_queue_head;
577
578 work_queue_head = access->next_queued;
579 access->next_queued = NULL;
580 access->grp_queued = 0;
581 return access;
582 }
583
584
585 /* Allocate necessary structures. */
586
587 static void
588 sra_initialize (void)
589 {
590 candidate_bitmap = BITMAP_ALLOC (NULL);
591 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
592 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
593 gcc_obstack_init (&name_obstack);
594 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
595 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
596 base_access_vec = pointer_map_create ();
597 memset (&sra_stats, 0, sizeof (sra_stats));
598 encountered_apply_args = false;
599 encountered_recursive_call = false;
600 encountered_unchangable_recursive_call = false;
601 }
602
603 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
604
605 static bool
606 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
607 void *data ATTRIBUTE_UNUSED)
608 {
609 VEC (access_p, heap) *access_vec;
610 access_vec = (VEC (access_p, heap) *) *value;
611 VEC_free (access_p, heap, access_vec);
612
613 return true;
614 }
615
616 /* Deallocate all general structures. */
617
618 static void
619 sra_deinitialize (void)
620 {
621 BITMAP_FREE (candidate_bitmap);
622 BITMAP_FREE (should_scalarize_away_bitmap);
623 BITMAP_FREE (cannot_scalarize_away_bitmap);
624 free_alloc_pool (access_pool);
625 free_alloc_pool (link_pool);
626 obstack_free (&name_obstack, NULL);
627
628 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
629 pointer_map_destroy (base_access_vec);
630 }
631
632 /* Remove DECL from candidates for SRA and write REASON to the dump file if
633 there is one. */
634 static void
635 disqualify_candidate (tree decl, const char *reason)
636 {
637 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
638
639 if (dump_file && (dump_flags & TDF_DETAILS))
640 {
641 fprintf (dump_file, "! Disqualifying ");
642 print_generic_expr (dump_file, decl, 0);
643 fprintf (dump_file, " - %s\n", reason);
644 }
645 }
646
647 /* Return true iff the type contains a field or an element which does not allow
648 scalarization. */
649
650 static bool
651 type_internals_preclude_sra_p (tree type, const char **msg)
652 {
653 tree fld;
654 tree et;
655
656 switch (TREE_CODE (type))
657 {
658 case RECORD_TYPE:
659 case UNION_TYPE:
660 case QUAL_UNION_TYPE:
661 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
662 if (TREE_CODE (fld) == FIELD_DECL)
663 {
664 tree ft = TREE_TYPE (fld);
665
666 if (TREE_THIS_VOLATILE (fld))
667 {
668 *msg = "volatile structure field";
669 return true;
670 }
671 if (!DECL_FIELD_OFFSET (fld))
672 {
673 *msg = "no structure field offset";
674 return true;
675 }
676 if (!DECL_SIZE (fld))
677 {
678 *msg = "zero structure field size";
679 return true;
680 }
681 if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
682 {
683 *msg = "structure field offset not fixed";
684 return true;
685 }
686 if (!host_integerp (DECL_SIZE (fld), 1))
687 {
688 *msg = "structure field size not fixed";
689 return true;
690 }
691 if (AGGREGATE_TYPE_P (ft)
692 && int_bit_position (fld) % BITS_PER_UNIT != 0)
693 {
694 *msg = "structure field is bit field";
695 return true;
696 }
697
698 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
699 return true;
700 }
701
702 return false;
703
704 case ARRAY_TYPE:
705 et = TREE_TYPE (type);
706
707 if (TYPE_VOLATILE (et))
708 {
709 *msg = "element type is volatile";
710 return true;
711 }
712
713 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
714 return true;
715
716 return false;
717
718 default:
719 return false;
720 }
721 }
722
723 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
724 base variable if it is. Return T if it is not an SSA_NAME. */
725
726 static tree
727 get_ssa_base_param (tree t)
728 {
729 if (TREE_CODE (t) == SSA_NAME)
730 {
731 if (SSA_NAME_IS_DEFAULT_DEF (t))
732 return SSA_NAME_VAR (t);
733 else
734 return NULL_TREE;
735 }
736 return t;
737 }
738
739 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
740 belongs to, unless the BB has already been marked as a potentially
741 final. */
742
743 static void
744 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
745 {
746 basic_block bb = gimple_bb (stmt);
747 int idx, parm_index = 0;
748 tree parm;
749
750 if (bitmap_bit_p (final_bbs, bb->index))
751 return;
752
753 for (parm = DECL_ARGUMENTS (current_function_decl);
754 parm && parm != base;
755 parm = DECL_CHAIN (parm))
756 parm_index++;
757
758 gcc_assert (parm_index < func_param_count);
759
760 idx = bb->index * func_param_count + parm_index;
761 if (bb_dereferences[idx] < dist)
762 bb_dereferences[idx] = dist;
763 }
764
765 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
766 the three fields. Also add it to the vector of accesses corresponding to
767 the base. Finally, return the new access. */
768
769 static struct access *
770 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
771 {
772 VEC (access_p, heap) *vec;
773 struct access *access;
774 void **slot;
775
776 access = (struct access *) pool_alloc (access_pool);
777 memset (access, 0, sizeof (struct access));
778 access->base = base;
779 access->offset = offset;
780 access->size = size;
781
782 slot = pointer_map_contains (base_access_vec, base);
783 if (slot)
784 vec = (VEC (access_p, heap) *) *slot;
785 else
786 vec = VEC_alloc (access_p, heap, 32);
787
788 VEC_safe_push (access_p, heap, vec, access);
789
790 *((struct VEC (access_p,heap) **)
791 pointer_map_insert (base_access_vec, base)) = vec;
792
793 return access;
794 }
795
796 /* Create and insert access for EXPR. Return created access, or NULL if it is
797 not possible. */
798
799 static struct access *
800 create_access (tree expr, gimple stmt, bool write)
801 {
802 struct access *access;
803 HOST_WIDE_INT offset, size, max_size;
804 tree base = expr;
805 bool ptr, unscalarizable_region = false;
806
807 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
808
809 if (sra_mode == SRA_MODE_EARLY_IPA
810 && TREE_CODE (base) == MEM_REF)
811 {
812 base = get_ssa_base_param (TREE_OPERAND (base, 0));
813 if (!base)
814 return NULL;
815 ptr = true;
816 }
817 else
818 ptr = false;
819
820 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
821 return NULL;
822
823 if (sra_mode == SRA_MODE_EARLY_IPA)
824 {
825 if (size < 0 || size != max_size)
826 {
827 disqualify_candidate (base, "Encountered a variable sized access.");
828 return NULL;
829 }
830 if (TREE_CODE (expr) == COMPONENT_REF
831 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
832 {
833 disqualify_candidate (base, "Encountered a bit-field access.");
834 return NULL;
835 }
836 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
837
838 if (ptr)
839 mark_parm_dereference (base, offset + size, stmt);
840 }
841 else
842 {
843 if (size != max_size)
844 {
845 size = max_size;
846 unscalarizable_region = true;
847 }
848 if (size < 0)
849 {
850 disqualify_candidate (base, "Encountered an unconstrained access.");
851 return NULL;
852 }
853 }
854
855 access = create_access_1 (base, offset, size);
856 access->expr = expr;
857 access->type = TREE_TYPE (expr);
858 access->write = write;
859 access->grp_unscalarizable_region = unscalarizable_region;
860 access->stmt = stmt;
861
862 if (TREE_CODE (expr) == COMPONENT_REF
863 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
864 access->non_addressable = 1;
865
866 return access;
867 }
868
869
870 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
871 register types or (recursively) records with only these two kinds of fields.
872 It also returns false if any of these records contains a bit-field. */
873
874 static bool
875 type_consists_of_records_p (tree type)
876 {
877 tree fld;
878
879 if (TREE_CODE (type) != RECORD_TYPE)
880 return false;
881
882 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
883 if (TREE_CODE (fld) == FIELD_DECL)
884 {
885 tree ft = TREE_TYPE (fld);
886
887 if (DECL_BIT_FIELD (fld))
888 return false;
889
890 if (!is_gimple_reg_type (ft)
891 && !type_consists_of_records_p (ft))
892 return false;
893 }
894
895 return true;
896 }
897
898 /* Create total_scalarization accesses for all scalar type fields in DECL that
899 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
900 must be the top-most VAR_DECL representing the variable, OFFSET must be the
901 offset of DECL within BASE. REF must be the memory reference expression for
902 the given decl. */
903
904 static void
905 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
906 tree ref)
907 {
908 tree fld, decl_type = TREE_TYPE (decl);
909
910 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
911 if (TREE_CODE (fld) == FIELD_DECL)
912 {
913 HOST_WIDE_INT pos = offset + int_bit_position (fld);
914 tree ft = TREE_TYPE (fld);
915 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
916 NULL_TREE);
917
918 if (is_gimple_reg_type (ft))
919 {
920 struct access *access;
921 HOST_WIDE_INT size;
922
923 size = tree_low_cst (DECL_SIZE (fld), 1);
924 access = create_access_1 (base, pos, size);
925 access->expr = nref;
926 access->type = ft;
927 access->grp_total_scalarization = 1;
928 /* Accesses for intraprocedural SRA can have their stmt NULL. */
929 }
930 else
931 completely_scalarize_record (base, fld, pos, nref);
932 }
933 }
934
935 /* Create total_scalarization accesses for all scalar type fields in VAR and
936 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
937 type_consists_of_records_p. */
938
939 static void
940 completely_scalarize_var (tree var)
941 {
942 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
943 struct access *access;
944
945 access = create_access_1 (var, 0, size);
946 access->expr = var;
947 access->type = TREE_TYPE (var);
948 access->grp_total_scalarization = 1;
949
950 completely_scalarize_record (var, var, 0, var);
951 }
952
953 /* Search the given tree for a declaration by skipping handled components and
954 exclude it from the candidates. */
955
956 static void
957 disqualify_base_of_expr (tree t, const char *reason)
958 {
959 t = get_base_address (t);
960 if (sra_mode == SRA_MODE_EARLY_IPA
961 && TREE_CODE (t) == MEM_REF)
962 t = get_ssa_base_param (TREE_OPERAND (t, 0));
963
964 if (t && DECL_P (t))
965 disqualify_candidate (t, reason);
966 }
967
968 /* Scan expression EXPR and create access structures for all accesses to
969 candidates for scalarization. Return the created access or NULL if none is
970 created. */
971
972 static struct access *
973 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
974 {
975 struct access *ret = NULL;
976 bool partial_ref;
977
978 if (TREE_CODE (expr) == BIT_FIELD_REF
979 || TREE_CODE (expr) == IMAGPART_EXPR
980 || TREE_CODE (expr) == REALPART_EXPR)
981 {
982 expr = TREE_OPERAND (expr, 0);
983 partial_ref = true;
984 }
985 else
986 partial_ref = false;
987
988 /* We need to dive through V_C_Es in order to get the size of its parameter
989 and not the result type. Ada produces such statements. We are also
990 capable of handling the topmost V_C_E but not any of those buried in other
991 handled components. */
992 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
993 expr = TREE_OPERAND (expr, 0);
994
995 if (contains_view_convert_expr_p (expr))
996 {
997 disqualify_base_of_expr (expr, "V_C_E under a different handled "
998 "component.");
999 return NULL;
1000 }
1001
1002 switch (TREE_CODE (expr))
1003 {
1004 case MEM_REF:
1005 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1006 && sra_mode != SRA_MODE_EARLY_IPA)
1007 return NULL;
1008 /* fall through */
1009 case VAR_DECL:
1010 case PARM_DECL:
1011 case RESULT_DECL:
1012 case COMPONENT_REF:
1013 case ARRAY_REF:
1014 case ARRAY_RANGE_REF:
1015 ret = create_access (expr, stmt, write);
1016 break;
1017
1018 default:
1019 break;
1020 }
1021
1022 if (write && partial_ref && ret)
1023 ret->grp_partial_lhs = 1;
1024
1025 return ret;
1026 }
1027
1028 /* Scan expression EXPR and create access structures for all accesses to
1029 candidates for scalarization. Return true if any access has been inserted.
1030 STMT must be the statement from which the expression is taken, WRITE must be
1031 true if the expression is a store and false otherwise. */
1032
1033 static bool
1034 build_access_from_expr (tree expr, gimple stmt, bool write)
1035 {
1036 struct access *access;
1037
1038 access = build_access_from_expr_1 (expr, stmt, write);
1039 if (access)
1040 {
1041 /* This means the aggregate is accesses as a whole in a way other than an
1042 assign statement and thus cannot be removed even if we had a scalar
1043 replacement for everything. */
1044 if (cannot_scalarize_away_bitmap)
1045 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1046 return true;
1047 }
1048 return false;
1049 }
1050
1051 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1052 modes in which it matters, return true iff they have been disqualified. RHS
1053 may be NULL, in that case ignore it. If we scalarize an aggregate in
1054 intra-SRA we may need to add statements after each statement. This is not
1055 possible if a statement unconditionally has to end the basic block. */
1056 static bool
1057 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1058 {
1059 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1060 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1061 {
1062 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1063 if (rhs)
1064 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1065 return true;
1066 }
1067 return false;
1068 }
1069
1070 /* Return true if EXP is a memory reference less aligned than ALIGN. This is
1071 invoked only on strict-alignment targets. */
1072
1073 static bool
1074 tree_non_aligned_mem_p (tree exp, unsigned int align)
1075 {
1076 unsigned int exp_align;
1077
1078 if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
1079 exp = TREE_OPERAND (exp, 0);
1080
1081 if (TREE_CODE (exp) == SSA_NAME || is_gimple_min_invariant (exp))
1082 return false;
1083
1084 /* get_object_alignment will fall back to BITS_PER_UNIT if it cannot
1085 compute an explicit alignment. Pretend that dereferenced pointers
1086 are always aligned on strict-alignment targets. */
1087 if (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF)
1088 exp_align = get_object_or_type_alignment (exp);
1089 else
1090 exp_align = get_object_alignment (exp);
1091
1092 if (exp_align < align)
1093 return true;
1094
1095 return false;
1096 }
1097
1098 /* Scan expressions occuring in STMT, create access structures for all accesses
1099 to candidates for scalarization and remove those candidates which occur in
1100 statements or expressions that prevent them from being split apart. Return
1101 true if any access has been inserted. */
1102
1103 static bool
1104 build_accesses_from_assign (gimple stmt)
1105 {
1106 tree lhs, rhs;
1107 struct access *lacc, *racc;
1108
1109 if (!gimple_assign_single_p (stmt)
1110 /* Scope clobbers don't influence scalarization. */
1111 || gimple_clobber_p (stmt))
1112 return false;
1113
1114 lhs = gimple_assign_lhs (stmt);
1115 rhs = gimple_assign_rhs1 (stmt);
1116
1117 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1118 return false;
1119
1120 racc = build_access_from_expr_1 (rhs, stmt, false);
1121 lacc = build_access_from_expr_1 (lhs, stmt, true);
1122
1123 if (lacc)
1124 {
1125 lacc->grp_assignment_write = 1;
1126 if (STRICT_ALIGNMENT
1127 && tree_non_aligned_mem_p (rhs, get_object_alignment (lhs)))
1128 lacc->grp_unscalarizable_region = 1;
1129 }
1130
1131 if (racc)
1132 {
1133 racc->grp_assignment_read = 1;
1134 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1135 && !is_gimple_reg_type (racc->type))
1136 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1137 if (STRICT_ALIGNMENT
1138 && tree_non_aligned_mem_p (lhs, get_object_alignment (rhs)))
1139 racc->grp_unscalarizable_region = 1;
1140 }
1141
1142 if (lacc && racc
1143 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1144 && !lacc->grp_unscalarizable_region
1145 && !racc->grp_unscalarizable_region
1146 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1147 /* FIXME: Turn the following line into an assert after PR 40058 is
1148 fixed. */
1149 && lacc->size == racc->size
1150 && useless_type_conversion_p (lacc->type, racc->type))
1151 {
1152 struct assign_link *link;
1153
1154 link = (struct assign_link *) pool_alloc (link_pool);
1155 memset (link, 0, sizeof (struct assign_link));
1156
1157 link->lacc = lacc;
1158 link->racc = racc;
1159
1160 add_link_to_rhs (racc, link);
1161 }
1162
1163 return lacc || racc;
1164 }
1165
1166 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1167 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1168
1169 static bool
1170 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1171 void *data ATTRIBUTE_UNUSED)
1172 {
1173 op = get_base_address (op);
1174 if (op
1175 && DECL_P (op))
1176 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1177
1178 return false;
1179 }
1180
1181 /* Return true iff callsite CALL has at least as many actual arguments as there
1182 are formal parameters of the function currently processed by IPA-SRA. */
1183
1184 static inline bool
1185 callsite_has_enough_arguments_p (gimple call)
1186 {
1187 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1188 }
1189
1190 /* Scan function and look for interesting expressions and create access
1191 structures for them. Return true iff any access is created. */
1192
1193 static bool
1194 scan_function (void)
1195 {
1196 basic_block bb;
1197 bool ret = false;
1198
1199 FOR_EACH_BB (bb)
1200 {
1201 gimple_stmt_iterator gsi;
1202 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1203 {
1204 gimple stmt = gsi_stmt (gsi);
1205 tree t;
1206 unsigned i;
1207
1208 if (final_bbs && stmt_can_throw_external (stmt))
1209 bitmap_set_bit (final_bbs, bb->index);
1210 switch (gimple_code (stmt))
1211 {
1212 case GIMPLE_RETURN:
1213 t = gimple_return_retval (stmt);
1214 if (t != NULL_TREE)
1215 ret |= build_access_from_expr (t, stmt, false);
1216 if (final_bbs)
1217 bitmap_set_bit (final_bbs, bb->index);
1218 break;
1219
1220 case GIMPLE_ASSIGN:
1221 ret |= build_accesses_from_assign (stmt);
1222 break;
1223
1224 case GIMPLE_CALL:
1225 for (i = 0; i < gimple_call_num_args (stmt); i++)
1226 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1227 stmt, false);
1228
1229 if (sra_mode == SRA_MODE_EARLY_IPA)
1230 {
1231 tree dest = gimple_call_fndecl (stmt);
1232 int flags = gimple_call_flags (stmt);
1233
1234 if (dest)
1235 {
1236 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1237 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1238 encountered_apply_args = true;
1239 if (cgraph_get_node (dest)
1240 == cgraph_get_node (current_function_decl))
1241 {
1242 encountered_recursive_call = true;
1243 if (!callsite_has_enough_arguments_p (stmt))
1244 encountered_unchangable_recursive_call = true;
1245 }
1246 }
1247
1248 if (final_bbs
1249 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1250 bitmap_set_bit (final_bbs, bb->index);
1251 }
1252
1253 t = gimple_call_lhs (stmt);
1254 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1255 ret |= build_access_from_expr (t, stmt, true);
1256 break;
1257
1258 case GIMPLE_ASM:
1259 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1260 asm_visit_addr);
1261 if (final_bbs)
1262 bitmap_set_bit (final_bbs, bb->index);
1263
1264 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1265 {
1266 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1267 ret |= build_access_from_expr (t, stmt, false);
1268 }
1269 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1270 {
1271 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1272 ret |= build_access_from_expr (t, stmt, true);
1273 }
1274 break;
1275
1276 default:
1277 break;
1278 }
1279 }
1280 }
1281
1282 return ret;
1283 }
1284
1285 /* Helper of QSORT function. There are pointers to accesses in the array. An
1286 access is considered smaller than another if it has smaller offset or if the
1287 offsets are the same but is size is bigger. */
1288
1289 static int
1290 compare_access_positions (const void *a, const void *b)
1291 {
1292 const access_p *fp1 = (const access_p *) a;
1293 const access_p *fp2 = (const access_p *) b;
1294 const access_p f1 = *fp1;
1295 const access_p f2 = *fp2;
1296
1297 if (f1->offset != f2->offset)
1298 return f1->offset < f2->offset ? -1 : 1;
1299
1300 if (f1->size == f2->size)
1301 {
1302 if (f1->type == f2->type)
1303 return 0;
1304 /* Put any non-aggregate type before any aggregate type. */
1305 else if (!is_gimple_reg_type (f1->type)
1306 && is_gimple_reg_type (f2->type))
1307 return 1;
1308 else if (is_gimple_reg_type (f1->type)
1309 && !is_gimple_reg_type (f2->type))
1310 return -1;
1311 /* Put any complex or vector type before any other scalar type. */
1312 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1313 && TREE_CODE (f1->type) != VECTOR_TYPE
1314 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1315 || TREE_CODE (f2->type) == VECTOR_TYPE))
1316 return 1;
1317 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1318 || TREE_CODE (f1->type) == VECTOR_TYPE)
1319 && TREE_CODE (f2->type) != COMPLEX_TYPE
1320 && TREE_CODE (f2->type) != VECTOR_TYPE)
1321 return -1;
1322 /* Put the integral type with the bigger precision first. */
1323 else if (INTEGRAL_TYPE_P (f1->type)
1324 && INTEGRAL_TYPE_P (f2->type))
1325 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1326 /* Put any integral type with non-full precision last. */
1327 else if (INTEGRAL_TYPE_P (f1->type)
1328 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1329 != TYPE_PRECISION (f1->type)))
1330 return 1;
1331 else if (INTEGRAL_TYPE_P (f2->type)
1332 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1333 != TYPE_PRECISION (f2->type)))
1334 return -1;
1335 /* Stabilize the sort. */
1336 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1337 }
1338
1339 /* We want the bigger accesses first, thus the opposite operator in the next
1340 line: */
1341 return f1->size > f2->size ? -1 : 1;
1342 }
1343
1344
1345 /* Append a name of the declaration to the name obstack. A helper function for
1346 make_fancy_name. */
1347
1348 static void
1349 make_fancy_decl_name (tree decl)
1350 {
1351 char buffer[32];
1352
1353 tree name = DECL_NAME (decl);
1354 if (name)
1355 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1356 IDENTIFIER_LENGTH (name));
1357 else
1358 {
1359 sprintf (buffer, "D%u", DECL_UID (decl));
1360 obstack_grow (&name_obstack, buffer, strlen (buffer));
1361 }
1362 }
1363
1364 /* Helper for make_fancy_name. */
1365
1366 static void
1367 make_fancy_name_1 (tree expr)
1368 {
1369 char buffer[32];
1370 tree index;
1371
1372 if (DECL_P (expr))
1373 {
1374 make_fancy_decl_name (expr);
1375 return;
1376 }
1377
1378 switch (TREE_CODE (expr))
1379 {
1380 case COMPONENT_REF:
1381 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1382 obstack_1grow (&name_obstack, '$');
1383 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1384 break;
1385
1386 case ARRAY_REF:
1387 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1388 obstack_1grow (&name_obstack, '$');
1389 /* Arrays with only one element may not have a constant as their
1390 index. */
1391 index = TREE_OPERAND (expr, 1);
1392 if (TREE_CODE (index) != INTEGER_CST)
1393 break;
1394 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1395 obstack_grow (&name_obstack, buffer, strlen (buffer));
1396 break;
1397
1398 case ADDR_EXPR:
1399 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1400 break;
1401
1402 case MEM_REF:
1403 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1404 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1405 {
1406 obstack_1grow (&name_obstack, '$');
1407 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1408 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1409 obstack_grow (&name_obstack, buffer, strlen (buffer));
1410 }
1411 break;
1412
1413 case BIT_FIELD_REF:
1414 case REALPART_EXPR:
1415 case IMAGPART_EXPR:
1416 gcc_unreachable (); /* we treat these as scalars. */
1417 break;
1418 default:
1419 break;
1420 }
1421 }
1422
1423 /* Create a human readable name for replacement variable of ACCESS. */
1424
1425 static char *
1426 make_fancy_name (tree expr)
1427 {
1428 make_fancy_name_1 (expr);
1429 obstack_1grow (&name_obstack, '\0');
1430 return XOBFINISH (&name_obstack, char *);
1431 }
1432
1433 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1434 EXP_TYPE at the given OFFSET. If BASE is something for which
1435 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1436 to insert new statements either before or below the current one as specified
1437 by INSERT_AFTER. This function is not capable of handling bitfields. */
1438
1439 tree
1440 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1441 tree exp_type, gimple_stmt_iterator *gsi,
1442 bool insert_after)
1443 {
1444 tree prev_base = base;
1445 tree off;
1446 HOST_WIDE_INT base_offset;
1447
1448 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1449
1450 base = get_addr_base_and_unit_offset (base, &base_offset);
1451
1452 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1453 offset such as array[var_index]. */
1454 if (!base)
1455 {
1456 gimple stmt;
1457 tree tmp, addr;
1458
1459 gcc_checking_assert (gsi);
1460 tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1461 add_referenced_var (tmp);
1462 tmp = make_ssa_name (tmp, NULL);
1463 addr = build_fold_addr_expr (unshare_expr (prev_base));
1464 STRIP_USELESS_TYPE_CONVERSION (addr);
1465 stmt = gimple_build_assign (tmp, addr);
1466 gimple_set_location (stmt, loc);
1467 SSA_NAME_DEF_STMT (tmp) = stmt;
1468 if (insert_after)
1469 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1470 else
1471 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1472 update_stmt (stmt);
1473
1474 off = build_int_cst (reference_alias_ptr_type (prev_base),
1475 offset / BITS_PER_UNIT);
1476 base = tmp;
1477 }
1478 else if (TREE_CODE (base) == MEM_REF)
1479 {
1480 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1481 base_offset + offset / BITS_PER_UNIT);
1482 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1483 base = unshare_expr (TREE_OPERAND (base, 0));
1484 }
1485 else
1486 {
1487 off = build_int_cst (reference_alias_ptr_type (base),
1488 base_offset + offset / BITS_PER_UNIT);
1489 base = build_fold_addr_expr (unshare_expr (base));
1490 }
1491
1492 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1493 }
1494
1495 DEF_VEC_ALLOC_P_STACK (tree);
1496 #define VEC_tree_stack_alloc(alloc) VEC_stack_alloc (tree, alloc)
1497
1498 /* Construct a memory reference to a part of an aggregate BASE at the given
1499 OFFSET and of the type of MODEL. In case this is a chain of references
1500 to component, the function will replicate the chain of COMPONENT_REFs of
1501 the expression of MODEL to access it. GSI and INSERT_AFTER have the same
1502 meaning as in build_ref_for_offset. */
1503
1504 static tree
1505 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1506 struct access *model, gimple_stmt_iterator *gsi,
1507 bool insert_after)
1508 {
1509 tree type = model->type, t;
1510 VEC(tree,stack) *cr_stack = NULL;
1511
1512 if (TREE_CODE (model->expr) == COMPONENT_REF)
1513 {
1514 tree expr = model->expr;
1515
1516 /* Create a stack of the COMPONENT_REFs so later we can walk them in
1517 order from inner to outer. */
1518 cr_stack = VEC_alloc (tree, stack, 6);
1519
1520 do {
1521 tree field = TREE_OPERAND (expr, 1);
1522 tree cr_offset = component_ref_field_offset (expr);
1523 gcc_assert (cr_offset && host_integerp (cr_offset, 1));
1524
1525 offset -= TREE_INT_CST_LOW (cr_offset) * BITS_PER_UNIT;
1526 offset -= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1527
1528 VEC_safe_push (tree, stack, cr_stack, expr);
1529
1530 expr = TREE_OPERAND (expr, 0);
1531 type = TREE_TYPE (expr);
1532 } while (TREE_CODE (expr) == COMPONENT_REF);
1533 }
1534
1535 t = build_ref_for_offset (loc, base, offset, type, gsi, insert_after);
1536
1537 if (TREE_CODE (model->expr) == COMPONENT_REF)
1538 {
1539 unsigned i;
1540 tree expr;
1541
1542 /* Now replicate the chain of COMPONENT_REFs from inner to outer. */
1543 FOR_EACH_VEC_ELT_REVERSE (tree, cr_stack, i, expr)
1544 {
1545 tree field = TREE_OPERAND (expr, 1);
1546 t = fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (field), t, field,
1547 TREE_OPERAND (expr, 2));
1548 }
1549
1550 VEC_free (tree, stack, cr_stack);
1551 }
1552
1553 return t;
1554 }
1555
1556 /* Construct a memory reference consisting of component_refs and array_refs to
1557 a part of an aggregate *RES (which is of type TYPE). The requested part
1558 should have type EXP_TYPE at be the given OFFSET. This function might not
1559 succeed, it returns true when it does and only then *RES points to something
1560 meaningful. This function should be used only to build expressions that we
1561 might need to present to user (e.g. in warnings). In all other situations,
1562 build_ref_for_model or build_ref_for_offset should be used instead. */
1563
1564 static bool
1565 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1566 tree exp_type)
1567 {
1568 while (1)
1569 {
1570 tree fld;
1571 tree tr_size, index, minidx;
1572 HOST_WIDE_INT el_size;
1573
1574 if (offset == 0 && exp_type
1575 && types_compatible_p (exp_type, type))
1576 return true;
1577
1578 switch (TREE_CODE (type))
1579 {
1580 case UNION_TYPE:
1581 case QUAL_UNION_TYPE:
1582 case RECORD_TYPE:
1583 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1584 {
1585 HOST_WIDE_INT pos, size;
1586 tree expr, *expr_ptr;
1587
1588 if (TREE_CODE (fld) != FIELD_DECL)
1589 continue;
1590
1591 pos = int_bit_position (fld);
1592 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1593 tr_size = DECL_SIZE (fld);
1594 if (!tr_size || !host_integerp (tr_size, 1))
1595 continue;
1596 size = tree_low_cst (tr_size, 1);
1597 if (size == 0)
1598 {
1599 if (pos != offset)
1600 continue;
1601 }
1602 else if (pos > offset || (pos + size) <= offset)
1603 continue;
1604
1605 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1606 NULL_TREE);
1607 expr_ptr = &expr;
1608 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1609 offset - pos, exp_type))
1610 {
1611 *res = expr;
1612 return true;
1613 }
1614 }
1615 return false;
1616
1617 case ARRAY_TYPE:
1618 tr_size = TYPE_SIZE (TREE_TYPE (type));
1619 if (!tr_size || !host_integerp (tr_size, 1))
1620 return false;
1621 el_size = tree_low_cst (tr_size, 1);
1622
1623 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1624 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1625 return false;
1626 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1627 if (!integer_zerop (minidx))
1628 index = int_const_binop (PLUS_EXPR, index, minidx);
1629 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1630 NULL_TREE, NULL_TREE);
1631 offset = offset % el_size;
1632 type = TREE_TYPE (type);
1633 break;
1634
1635 default:
1636 if (offset != 0)
1637 return false;
1638
1639 if (exp_type)
1640 return false;
1641 else
1642 return true;
1643 }
1644 }
1645 }
1646
1647 /* Return true iff TYPE is stdarg va_list type. */
1648
1649 static inline bool
1650 is_va_list_type (tree type)
1651 {
1652 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1653 }
1654
1655 /* Print message to dump file why a variable was rejected. */
1656
1657 static void
1658 reject (tree var, const char *msg)
1659 {
1660 if (dump_file && (dump_flags & TDF_DETAILS))
1661 {
1662 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1663 print_generic_expr (dump_file, var, 0);
1664 fprintf (dump_file, "\n");
1665 }
1666 }
1667
1668 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1669 those with type which is suitable for scalarization. */
1670
1671 static bool
1672 find_var_candidates (void)
1673 {
1674 tree var, type;
1675 referenced_var_iterator rvi;
1676 bool ret = false;
1677 const char *msg;
1678
1679 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1680 {
1681 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1682 continue;
1683 type = TREE_TYPE (var);
1684
1685 if (!AGGREGATE_TYPE_P (type))
1686 {
1687 reject (var, "not aggregate");
1688 continue;
1689 }
1690 if (needs_to_live_in_memory (var))
1691 {
1692 reject (var, "needs to live in memory");
1693 continue;
1694 }
1695 if (TREE_THIS_VOLATILE (var))
1696 {
1697 reject (var, "is volatile");
1698 continue;
1699 }
1700 if (!COMPLETE_TYPE_P (type))
1701 {
1702 reject (var, "has incomplete type");
1703 continue;
1704 }
1705 if (!host_integerp (TYPE_SIZE (type), 1))
1706 {
1707 reject (var, "type size not fixed");
1708 continue;
1709 }
1710 if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1711 {
1712 reject (var, "type size is zero");
1713 continue;
1714 }
1715 if (type_internals_preclude_sra_p (type, &msg))
1716 {
1717 reject (var, msg);
1718 continue;
1719 }
1720 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1721 we also want to schedule it rather late. Thus we ignore it in
1722 the early pass. */
1723 (sra_mode == SRA_MODE_EARLY_INTRA
1724 && is_va_list_type (type)))
1725 {
1726 reject (var, "is va_list");
1727 continue;
1728 }
1729
1730 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1731
1732 if (dump_file && (dump_flags & TDF_DETAILS))
1733 {
1734 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1735 print_generic_expr (dump_file, var, 0);
1736 fprintf (dump_file, "\n");
1737 }
1738 ret = true;
1739 }
1740
1741 return ret;
1742 }
1743
1744 /* Sort all accesses for the given variable, check for partial overlaps and
1745 return NULL if there are any. If there are none, pick a representative for
1746 each combination of offset and size and create a linked list out of them.
1747 Return the pointer to the first representative and make sure it is the first
1748 one in the vector of accesses. */
1749
1750 static struct access *
1751 sort_and_splice_var_accesses (tree var)
1752 {
1753 int i, j, access_count;
1754 struct access *res, **prev_acc_ptr = &res;
1755 VEC (access_p, heap) *access_vec;
1756 bool first = true;
1757 HOST_WIDE_INT low = -1, high = 0;
1758
1759 access_vec = get_base_access_vector (var);
1760 if (!access_vec)
1761 return NULL;
1762 access_count = VEC_length (access_p, access_vec);
1763
1764 /* Sort by <OFFSET, SIZE>. */
1765 VEC_qsort (access_p, access_vec, compare_access_positions);
1766
1767 i = 0;
1768 while (i < access_count)
1769 {
1770 struct access *access = VEC_index (access_p, access_vec, i);
1771 bool grp_write = access->write;
1772 bool grp_read = !access->write;
1773 bool grp_scalar_write = access->write
1774 && is_gimple_reg_type (access->type);
1775 bool grp_scalar_read = !access->write
1776 && is_gimple_reg_type (access->type);
1777 bool grp_assignment_read = access->grp_assignment_read;
1778 bool grp_assignment_write = access->grp_assignment_write;
1779 bool multiple_scalar_reads = false;
1780 bool total_scalarization = access->grp_total_scalarization;
1781 bool grp_partial_lhs = access->grp_partial_lhs;
1782 bool first_scalar = is_gimple_reg_type (access->type);
1783 bool unscalarizable_region = access->grp_unscalarizable_region;
1784
1785 if (first || access->offset >= high)
1786 {
1787 first = false;
1788 low = access->offset;
1789 high = access->offset + access->size;
1790 }
1791 else if (access->offset > low && access->offset + access->size > high)
1792 return NULL;
1793 else
1794 gcc_assert (access->offset >= low
1795 && access->offset + access->size <= high);
1796
1797 j = i + 1;
1798 while (j < access_count)
1799 {
1800 struct access *ac2 = VEC_index (access_p, access_vec, j);
1801 if (ac2->offset != access->offset || ac2->size != access->size)
1802 break;
1803 if (ac2->write)
1804 {
1805 grp_write = true;
1806 grp_scalar_write = (grp_scalar_write
1807 || is_gimple_reg_type (ac2->type));
1808 }
1809 else
1810 {
1811 grp_read = true;
1812 if (is_gimple_reg_type (ac2->type))
1813 {
1814 if (grp_scalar_read)
1815 multiple_scalar_reads = true;
1816 else
1817 grp_scalar_read = true;
1818 }
1819 }
1820 grp_assignment_read |= ac2->grp_assignment_read;
1821 grp_assignment_write |= ac2->grp_assignment_write;
1822 grp_partial_lhs |= ac2->grp_partial_lhs;
1823 unscalarizable_region |= ac2->grp_unscalarizable_region;
1824 total_scalarization |= ac2->grp_total_scalarization;
1825 relink_to_new_repr (access, ac2);
1826
1827 /* If there are both aggregate-type and scalar-type accesses with
1828 this combination of size and offset, the comparison function
1829 should have put the scalars first. */
1830 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1831 ac2->group_representative = access;
1832 j++;
1833 }
1834
1835 i = j;
1836
1837 access->group_representative = access;
1838 access->grp_write = grp_write;
1839 access->grp_read = grp_read;
1840 access->grp_scalar_read = grp_scalar_read;
1841 access->grp_scalar_write = grp_scalar_write;
1842 access->grp_assignment_read = grp_assignment_read;
1843 access->grp_assignment_write = grp_assignment_write;
1844 access->grp_hint = multiple_scalar_reads || total_scalarization;
1845 access->grp_total_scalarization = total_scalarization;
1846 access->grp_partial_lhs = grp_partial_lhs;
1847 access->grp_unscalarizable_region = unscalarizable_region;
1848 if (access->first_link)
1849 add_access_to_work_queue (access);
1850
1851 *prev_acc_ptr = access;
1852 prev_acc_ptr = &access->next_grp;
1853 }
1854
1855 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1856 return res;
1857 }
1858
1859 /* Create a variable for the given ACCESS which determines the type, name and a
1860 few other properties. Return the variable declaration and store it also to
1861 ACCESS->replacement. */
1862
1863 static tree
1864 create_access_replacement (struct access *access, bool rename)
1865 {
1866 tree repl;
1867
1868 repl = create_tmp_var (access->type, "SR");
1869 add_referenced_var (repl);
1870 if (rename)
1871 mark_sym_for_renaming (repl);
1872
1873 if (!access->grp_partial_lhs
1874 && (TREE_CODE (access->type) == COMPLEX_TYPE
1875 || TREE_CODE (access->type) == VECTOR_TYPE))
1876 DECL_GIMPLE_REG_P (repl) = 1;
1877
1878 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1879 DECL_ARTIFICIAL (repl) = 1;
1880 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1881
1882 if (DECL_NAME (access->base)
1883 && !DECL_IGNORED_P (access->base)
1884 && !DECL_ARTIFICIAL (access->base))
1885 {
1886 char *pretty_name = make_fancy_name (access->expr);
1887 tree debug_expr = unshare_expr (access->expr), d;
1888
1889 DECL_NAME (repl) = get_identifier (pretty_name);
1890 obstack_free (&name_obstack, pretty_name);
1891
1892 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1893 as DECL_DEBUG_EXPR isn't considered when looking for still
1894 used SSA_NAMEs and thus they could be freed. All debug info
1895 generation cares is whether something is constant or variable
1896 and that get_ref_base_and_extent works properly on the
1897 expression. */
1898 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1899 switch (TREE_CODE (d))
1900 {
1901 case ARRAY_REF:
1902 case ARRAY_RANGE_REF:
1903 if (TREE_OPERAND (d, 1)
1904 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1905 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1906 if (TREE_OPERAND (d, 3)
1907 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1908 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1909 /* FALLTHRU */
1910 case COMPONENT_REF:
1911 if (TREE_OPERAND (d, 2)
1912 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1913 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1914 break;
1915 default:
1916 break;
1917 }
1918 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1919 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1920 if (access->grp_no_warning)
1921 TREE_NO_WARNING (repl) = 1;
1922 else
1923 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1924 }
1925 else
1926 TREE_NO_WARNING (repl) = 1;
1927
1928 if (dump_file)
1929 {
1930 fprintf (dump_file, "Created a replacement for ");
1931 print_generic_expr (dump_file, access->base, 0);
1932 fprintf (dump_file, " offset: %u, size: %u: ",
1933 (unsigned) access->offset, (unsigned) access->size);
1934 print_generic_expr (dump_file, repl, 0);
1935 fprintf (dump_file, "\n");
1936 }
1937 sra_stats.replacements++;
1938
1939 return repl;
1940 }
1941
1942 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1943
1944 static inline tree
1945 get_access_replacement (struct access *access)
1946 {
1947 gcc_assert (access->grp_to_be_replaced);
1948
1949 if (!access->replacement_decl)
1950 access->replacement_decl = create_access_replacement (access, true);
1951 return access->replacement_decl;
1952 }
1953
1954 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1955 not mark it for renaming. */
1956
1957 static inline tree
1958 get_unrenamed_access_replacement (struct access *access)
1959 {
1960 gcc_assert (!access->grp_to_be_replaced);
1961
1962 if (!access->replacement_decl)
1963 access->replacement_decl = create_access_replacement (access, false);
1964 return access->replacement_decl;
1965 }
1966
1967
1968 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1969 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1970 to it is not "within" the root. Return false iff some accesses partially
1971 overlap. */
1972
1973 static bool
1974 build_access_subtree (struct access **access)
1975 {
1976 struct access *root = *access, *last_child = NULL;
1977 HOST_WIDE_INT limit = root->offset + root->size;
1978
1979 *access = (*access)->next_grp;
1980 while (*access && (*access)->offset + (*access)->size <= limit)
1981 {
1982 if (!last_child)
1983 root->first_child = *access;
1984 else
1985 last_child->next_sibling = *access;
1986 last_child = *access;
1987
1988 if (!build_access_subtree (access))
1989 return false;
1990 }
1991
1992 if (*access && (*access)->offset < limit)
1993 return false;
1994
1995 return true;
1996 }
1997
1998 /* Build a tree of access representatives, ACCESS is the pointer to the first
1999 one, others are linked in a list by the next_grp field. Return false iff
2000 some accesses partially overlap. */
2001
2002 static bool
2003 build_access_trees (struct access *access)
2004 {
2005 while (access)
2006 {
2007 struct access *root = access;
2008
2009 if (!build_access_subtree (&access))
2010 return false;
2011 root->next_grp = access;
2012 }
2013 return true;
2014 }
2015
2016 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2017 array. */
2018
2019 static bool
2020 expr_with_var_bounded_array_refs_p (tree expr)
2021 {
2022 while (handled_component_p (expr))
2023 {
2024 if (TREE_CODE (expr) == ARRAY_REF
2025 && !host_integerp (array_ref_low_bound (expr), 0))
2026 return true;
2027 expr = TREE_OPERAND (expr, 0);
2028 }
2029 return false;
2030 }
2031
2032 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2033 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2034 sorts of access flags appropriately along the way, notably always set
2035 grp_read and grp_assign_read according to MARK_READ and grp_write when
2036 MARK_WRITE is true.
2037
2038 Creating a replacement for a scalar access is considered beneficial if its
2039 grp_hint is set (this means we are either attempting total scalarization or
2040 there is more than one direct read access) or according to the following
2041 table:
2042
2043 Access written to through a scalar type (once or more times)
2044 |
2045 | Written to in an assignment statement
2046 | |
2047 | | Access read as scalar _once_
2048 | | |
2049 | | | Read in an assignment statement
2050 | | | |
2051 | | | | Scalarize Comment
2052 -----------------------------------------------------------------------------
2053 0 0 0 0 No access for the scalar
2054 0 0 0 1 No access for the scalar
2055 0 0 1 0 No Single read - won't help
2056 0 0 1 1 No The same case
2057 0 1 0 0 No access for the scalar
2058 0 1 0 1 No access for the scalar
2059 0 1 1 0 Yes s = *g; return s.i;
2060 0 1 1 1 Yes The same case as above
2061 1 0 0 0 No Won't help
2062 1 0 0 1 Yes s.i = 1; *g = s;
2063 1 0 1 0 Yes s.i = 5; g = s.i;
2064 1 0 1 1 Yes The same case as above
2065 1 1 0 0 No Won't help.
2066 1 1 0 1 Yes s.i = 1; *g = s;
2067 1 1 1 0 Yes s = *g; return s.i;
2068 1 1 1 1 Yes Any of the above yeses */
2069
2070 static bool
2071 analyze_access_subtree (struct access *root, struct access *parent,
2072 bool allow_replacements)
2073 {
2074 struct access *child;
2075 HOST_WIDE_INT limit = root->offset + root->size;
2076 HOST_WIDE_INT covered_to = root->offset;
2077 bool scalar = is_gimple_reg_type (root->type);
2078 bool hole = false, sth_created = false;
2079
2080 if (parent)
2081 {
2082 if (parent->grp_read)
2083 root->grp_read = 1;
2084 if (parent->grp_assignment_read)
2085 root->grp_assignment_read = 1;
2086 if (parent->grp_write)
2087 root->grp_write = 1;
2088 if (parent->grp_assignment_write)
2089 root->grp_assignment_write = 1;
2090 if (parent->grp_total_scalarization)
2091 root->grp_total_scalarization = 1;
2092 }
2093
2094 if (root->grp_unscalarizable_region)
2095 allow_replacements = false;
2096
2097 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2098 allow_replacements = false;
2099
2100 for (child = root->first_child; child; child = child->next_sibling)
2101 {
2102 hole |= covered_to < child->offset;
2103 sth_created |= analyze_access_subtree (child, root,
2104 allow_replacements && !scalar);
2105
2106 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2107 root->grp_total_scalarization &= child->grp_total_scalarization;
2108 if (child->grp_covered)
2109 covered_to += child->size;
2110 else
2111 hole = true;
2112 }
2113
2114 if (allow_replacements && scalar && !root->first_child
2115 && (root->grp_hint
2116 || ((root->grp_scalar_read || root->grp_assignment_read)
2117 && (root->grp_scalar_write || root->grp_assignment_write))))
2118 {
2119 bool new_integer_type;
2120 if (TREE_CODE (root->type) == ENUMERAL_TYPE)
2121 {
2122 tree rt = root->type;
2123 root->type = build_nonstandard_integer_type (TYPE_PRECISION (rt),
2124 TYPE_UNSIGNED (rt));
2125 new_integer_type = true;
2126 }
2127 else
2128 new_integer_type = false;
2129
2130 if (dump_file && (dump_flags & TDF_DETAILS))
2131 {
2132 fprintf (dump_file, "Marking ");
2133 print_generic_expr (dump_file, root->base, 0);
2134 fprintf (dump_file, " offset: %u, size: %u ",
2135 (unsigned) root->offset, (unsigned) root->size);
2136 fprintf (dump_file, " to be replaced%s.\n",
2137 new_integer_type ? " with an integer": "");
2138 }
2139
2140 root->grp_to_be_replaced = 1;
2141 sth_created = true;
2142 hole = false;
2143 }
2144 else
2145 {
2146 if (covered_to < limit)
2147 hole = true;
2148 if (scalar)
2149 root->grp_total_scalarization = 0;
2150 }
2151
2152 if (sth_created
2153 && (!hole || root->grp_total_scalarization))
2154 {
2155 root->grp_covered = 1;
2156 return true;
2157 }
2158 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2159 root->grp_unscalarized_data = 1; /* not covered and written to */
2160 if (sth_created)
2161 return true;
2162 return false;
2163 }
2164
2165 /* Analyze all access trees linked by next_grp by the means of
2166 analyze_access_subtree. */
2167 static bool
2168 analyze_access_trees (struct access *access)
2169 {
2170 bool ret = false;
2171
2172 while (access)
2173 {
2174 if (analyze_access_subtree (access, NULL, true))
2175 ret = true;
2176 access = access->next_grp;
2177 }
2178
2179 return ret;
2180 }
2181
2182 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2183 SIZE would conflict with an already existing one. If exactly such a child
2184 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2185
2186 static bool
2187 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2188 HOST_WIDE_INT size, struct access **exact_match)
2189 {
2190 struct access *child;
2191
2192 for (child = lacc->first_child; child; child = child->next_sibling)
2193 {
2194 if (child->offset == norm_offset && child->size == size)
2195 {
2196 *exact_match = child;
2197 return true;
2198 }
2199
2200 if (child->offset < norm_offset + size
2201 && child->offset + child->size > norm_offset)
2202 return true;
2203 }
2204
2205 return false;
2206 }
2207
2208 /* Create a new child access of PARENT, with all properties just like MODEL
2209 except for its offset and with its grp_write false and grp_read true.
2210 Return the new access or NULL if it cannot be created. Note that this access
2211 is created long after all splicing and sorting, it's not located in any
2212 access vector and is automatically a representative of its group. */
2213
2214 static struct access *
2215 create_artificial_child_access (struct access *parent, struct access *model,
2216 HOST_WIDE_INT new_offset)
2217 {
2218 struct access *access;
2219 struct access **child;
2220 tree expr = parent->base;
2221
2222 gcc_assert (!model->grp_unscalarizable_region);
2223
2224 access = (struct access *) pool_alloc (access_pool);
2225 memset (access, 0, sizeof (struct access));
2226 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2227 model->type))
2228 {
2229 access->grp_no_warning = true;
2230 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2231 new_offset, model, NULL, false);
2232 }
2233
2234 access->base = parent->base;
2235 access->expr = expr;
2236 access->offset = new_offset;
2237 access->size = model->size;
2238 access->type = model->type;
2239 access->grp_write = true;
2240 access->grp_read = false;
2241
2242 child = &parent->first_child;
2243 while (*child && (*child)->offset < new_offset)
2244 child = &(*child)->next_sibling;
2245
2246 access->next_sibling = *child;
2247 *child = access;
2248
2249 return access;
2250 }
2251
2252
2253 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2254 true if any new subaccess was created. Additionally, if RACC is a scalar
2255 access but LACC is not, change the type of the latter, if possible. */
2256
2257 static bool
2258 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2259 {
2260 struct access *rchild;
2261 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2262 bool ret = false;
2263
2264 if (is_gimple_reg_type (lacc->type)
2265 || lacc->grp_unscalarizable_region
2266 || racc->grp_unscalarizable_region)
2267 return false;
2268
2269 if (!lacc->first_child && !racc->first_child
2270 && is_gimple_reg_type (racc->type))
2271 {
2272 tree t = lacc->base;
2273
2274 lacc->type = racc->type;
2275 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t), lacc->offset,
2276 racc->type))
2277 lacc->expr = t;
2278 else
2279 {
2280 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2281 lacc->base, lacc->offset,
2282 racc, NULL, false);
2283 lacc->grp_no_warning = true;
2284 }
2285 return false;
2286 }
2287
2288 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2289 {
2290 struct access *new_acc = NULL;
2291 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2292
2293 if (rchild->grp_unscalarizable_region)
2294 continue;
2295
2296 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2297 &new_acc))
2298 {
2299 if (new_acc)
2300 {
2301 rchild->grp_hint = 1;
2302 new_acc->grp_hint |= new_acc->grp_read;
2303 if (rchild->first_child)
2304 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2305 }
2306 continue;
2307 }
2308
2309 rchild->grp_hint = 1;
2310 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2311 if (new_acc)
2312 {
2313 ret = true;
2314 if (racc->first_child)
2315 propagate_subaccesses_across_link (new_acc, rchild);
2316 }
2317 }
2318
2319 return ret;
2320 }
2321
2322 /* Propagate all subaccesses across assignment links. */
2323
2324 static void
2325 propagate_all_subaccesses (void)
2326 {
2327 while (work_queue_head)
2328 {
2329 struct access *racc = pop_access_from_work_queue ();
2330 struct assign_link *link;
2331
2332 gcc_assert (racc->first_link);
2333
2334 for (link = racc->first_link; link; link = link->next)
2335 {
2336 struct access *lacc = link->lacc;
2337
2338 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2339 continue;
2340 lacc = lacc->group_representative;
2341 if (propagate_subaccesses_across_link (lacc, racc)
2342 && lacc->first_link)
2343 add_access_to_work_queue (lacc);
2344 }
2345 }
2346 }
2347
2348 /* Go through all accesses collected throughout the (intraprocedural) analysis
2349 stage, exclude overlapping ones, identify representatives and build trees
2350 out of them, making decisions about scalarization on the way. Return true
2351 iff there are any to-be-scalarized variables after this stage. */
2352
2353 static bool
2354 analyze_all_variable_accesses (void)
2355 {
2356 int res = 0;
2357 bitmap tmp = BITMAP_ALLOC (NULL);
2358 bitmap_iterator bi;
2359 unsigned i, max_total_scalarization_size;
2360
2361 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2362 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2363
2364 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2365 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2366 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2367 {
2368 tree var = referenced_var (i);
2369
2370 if (TREE_CODE (var) == VAR_DECL
2371 && type_consists_of_records_p (TREE_TYPE (var)))
2372 {
2373 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2374 <= max_total_scalarization_size)
2375 {
2376 completely_scalarize_var (var);
2377 if (dump_file && (dump_flags & TDF_DETAILS))
2378 {
2379 fprintf (dump_file, "Will attempt to totally scalarize ");
2380 print_generic_expr (dump_file, var, 0);
2381 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2382 }
2383 }
2384 else if (dump_file && (dump_flags & TDF_DETAILS))
2385 {
2386 fprintf (dump_file, "Too big to totally scalarize: ");
2387 print_generic_expr (dump_file, var, 0);
2388 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2389 }
2390 }
2391 }
2392
2393 bitmap_copy (tmp, candidate_bitmap);
2394 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2395 {
2396 tree var = referenced_var (i);
2397 struct access *access;
2398
2399 access = sort_and_splice_var_accesses (var);
2400 if (!access || !build_access_trees (access))
2401 disqualify_candidate (var,
2402 "No or inhibitingly overlapping accesses.");
2403 }
2404
2405 propagate_all_subaccesses ();
2406
2407 bitmap_copy (tmp, candidate_bitmap);
2408 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2409 {
2410 tree var = referenced_var (i);
2411 struct access *access = get_first_repr_for_decl (var);
2412
2413 if (analyze_access_trees (access))
2414 {
2415 res++;
2416 if (dump_file && (dump_flags & TDF_DETAILS))
2417 {
2418 fprintf (dump_file, "\nAccess trees for ");
2419 print_generic_expr (dump_file, var, 0);
2420 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2421 dump_access_tree (dump_file, access);
2422 fprintf (dump_file, "\n");
2423 }
2424 }
2425 else
2426 disqualify_candidate (var, "No scalar replacements to be created.");
2427 }
2428
2429 BITMAP_FREE (tmp);
2430
2431 if (res)
2432 {
2433 statistics_counter_event (cfun, "Scalarized aggregates", res);
2434 return true;
2435 }
2436 else
2437 return false;
2438 }
2439
2440 /* Generate statements copying scalar replacements of accesses within a subtree
2441 into or out of AGG. ACCESS, all its children, siblings and their children
2442 are to be processed. AGG is an aggregate type expression (can be a
2443 declaration but does not have to be, it can for example also be a mem_ref or
2444 a series of handled components). TOP_OFFSET is the offset of the processed
2445 subtree which has to be subtracted from offsets of individual accesses to
2446 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2447 replacements in the interval <start_offset, start_offset + chunk_size>,
2448 otherwise copy all. GSI is a statement iterator used to place the new
2449 statements. WRITE should be true when the statements should write from AGG
2450 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2451 statements will be added after the current statement in GSI, they will be
2452 added before the statement otherwise. */
2453
2454 static void
2455 generate_subtree_copies (struct access *access, tree agg,
2456 HOST_WIDE_INT top_offset,
2457 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2458 gimple_stmt_iterator *gsi, bool write,
2459 bool insert_after, location_t loc)
2460 {
2461 do
2462 {
2463 if (chunk_size && access->offset >= start_offset + chunk_size)
2464 return;
2465
2466 if (access->grp_to_be_replaced
2467 && (chunk_size == 0
2468 || access->offset + access->size > start_offset))
2469 {
2470 tree expr, repl = get_access_replacement (access);
2471 gimple stmt;
2472
2473 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2474 access, gsi, insert_after);
2475
2476 if (write)
2477 {
2478 if (access->grp_partial_lhs)
2479 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2480 !insert_after,
2481 insert_after ? GSI_NEW_STMT
2482 : GSI_SAME_STMT);
2483 stmt = gimple_build_assign (repl, expr);
2484 }
2485 else
2486 {
2487 TREE_NO_WARNING (repl) = 1;
2488 if (access->grp_partial_lhs)
2489 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2490 !insert_after,
2491 insert_after ? GSI_NEW_STMT
2492 : GSI_SAME_STMT);
2493 stmt = gimple_build_assign (expr, repl);
2494 }
2495 gimple_set_location (stmt, loc);
2496
2497 if (insert_after)
2498 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2499 else
2500 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2501 update_stmt (stmt);
2502 sra_stats.subtree_copies++;
2503 }
2504
2505 if (access->first_child)
2506 generate_subtree_copies (access->first_child, agg, top_offset,
2507 start_offset, chunk_size, gsi,
2508 write, insert_after, loc);
2509
2510 access = access->next_sibling;
2511 }
2512 while (access);
2513 }
2514
2515 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2516 the root of the subtree to be processed. GSI is the statement iterator used
2517 for inserting statements which are added after the current statement if
2518 INSERT_AFTER is true or before it otherwise. */
2519
2520 static void
2521 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2522 bool insert_after, location_t loc)
2523
2524 {
2525 struct access *child;
2526
2527 if (access->grp_to_be_replaced)
2528 {
2529 gimple stmt;
2530
2531 stmt = gimple_build_assign (get_access_replacement (access),
2532 build_zero_cst (access->type));
2533 if (insert_after)
2534 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2535 else
2536 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2537 update_stmt (stmt);
2538 gimple_set_location (stmt, loc);
2539 }
2540
2541 for (child = access->first_child; child; child = child->next_sibling)
2542 init_subtree_with_zero (child, gsi, insert_after, loc);
2543 }
2544
2545 /* Search for an access representative for the given expression EXPR and
2546 return it or NULL if it cannot be found. */
2547
2548 static struct access *
2549 get_access_for_expr (tree expr)
2550 {
2551 HOST_WIDE_INT offset, size, max_size;
2552 tree base;
2553
2554 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2555 a different size than the size of its argument and we need the latter
2556 one. */
2557 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2558 expr = TREE_OPERAND (expr, 0);
2559
2560 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2561 if (max_size == -1 || !DECL_P (base))
2562 return NULL;
2563
2564 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2565 return NULL;
2566
2567 return get_var_base_offset_size_access (base, offset, max_size);
2568 }
2569
2570 /* Replace the expression EXPR with a scalar replacement if there is one and
2571 generate other statements to do type conversion or subtree copying if
2572 necessary. GSI is used to place newly created statements, WRITE is true if
2573 the expression is being written to (it is on a LHS of a statement or output
2574 in an assembly statement). */
2575
2576 static bool
2577 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2578 {
2579 location_t loc;
2580 struct access *access;
2581 tree type, bfr;
2582
2583 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2584 {
2585 bfr = *expr;
2586 expr = &TREE_OPERAND (*expr, 0);
2587 }
2588 else
2589 bfr = NULL_TREE;
2590
2591 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2592 expr = &TREE_OPERAND (*expr, 0);
2593 access = get_access_for_expr (*expr);
2594 if (!access)
2595 return false;
2596 type = TREE_TYPE (*expr);
2597
2598 loc = gimple_location (gsi_stmt (*gsi));
2599 if (access->grp_to_be_replaced)
2600 {
2601 tree repl = get_access_replacement (access);
2602 /* If we replace a non-register typed access simply use the original
2603 access expression to extract the scalar component afterwards.
2604 This happens if scalarizing a function return value or parameter
2605 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2606 gcc.c-torture/compile/20011217-1.c.
2607
2608 We also want to use this when accessing a complex or vector which can
2609 be accessed as a different type too, potentially creating a need for
2610 type conversion (see PR42196) and when scalarized unions are involved
2611 in assembler statements (see PR42398). */
2612 if (!useless_type_conversion_p (type, access->type))
2613 {
2614 tree ref;
2615
2616 ref = build_ref_for_model (loc, access->base, access->offset, access,
2617 NULL, false);
2618
2619 if (write)
2620 {
2621 gimple stmt;
2622
2623 if (access->grp_partial_lhs)
2624 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2625 false, GSI_NEW_STMT);
2626 stmt = gimple_build_assign (repl, ref);
2627 gimple_set_location (stmt, loc);
2628 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2629 }
2630 else
2631 {
2632 gimple stmt;
2633
2634 if (access->grp_partial_lhs)
2635 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2636 true, GSI_SAME_STMT);
2637 stmt = gimple_build_assign (ref, repl);
2638 gimple_set_location (stmt, loc);
2639 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2640 }
2641 }
2642 else
2643 *expr = repl;
2644 sra_stats.exprs++;
2645 }
2646
2647 if (access->first_child)
2648 {
2649 HOST_WIDE_INT start_offset, chunk_size;
2650 if (bfr
2651 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2652 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2653 {
2654 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2655 start_offset = access->offset
2656 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2657 }
2658 else
2659 start_offset = chunk_size = 0;
2660
2661 generate_subtree_copies (access->first_child, access->base, 0,
2662 start_offset, chunk_size, gsi, write, write,
2663 loc);
2664 }
2665 return true;
2666 }
2667
2668 /* Where scalar replacements of the RHS have been written to when a replacement
2669 of a LHS of an assigments cannot be direclty loaded from a replacement of
2670 the RHS. */
2671 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2672 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2673 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2674
2675 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2676 base aggregate if there are unscalarized data or directly to LHS of the
2677 statement that is pointed to by GSI otherwise. */
2678
2679 static enum unscalarized_data_handling
2680 handle_unscalarized_data_in_subtree (struct access *top_racc,
2681 gimple_stmt_iterator *gsi)
2682 {
2683 if (top_racc->grp_unscalarized_data)
2684 {
2685 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2686 gsi, false, false,
2687 gimple_location (gsi_stmt (*gsi)));
2688 return SRA_UDH_RIGHT;
2689 }
2690 else
2691 {
2692 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2693 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2694 0, 0, gsi, false, false,
2695 gimple_location (gsi_stmt (*gsi)));
2696 return SRA_UDH_LEFT;
2697 }
2698 }
2699
2700
2701 /* Try to generate statements to load all sub-replacements in an access subtree
2702 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2703 If that is not possible, refresh the TOP_RACC base aggregate and load the
2704 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2705 copied. NEW_GSI is stmt iterator used for statement insertions after the
2706 original assignment, OLD_GSI is used to insert statements before the
2707 assignment. *REFRESHED keeps the information whether we have needed to
2708 refresh replacements of the LHS and from which side of the assignments this
2709 takes place. */
2710
2711 static void
2712 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2713 HOST_WIDE_INT left_offset,
2714 gimple_stmt_iterator *old_gsi,
2715 gimple_stmt_iterator *new_gsi,
2716 enum unscalarized_data_handling *refreshed)
2717 {
2718 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2719 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2720 {
2721 if (lacc->grp_to_be_replaced)
2722 {
2723 struct access *racc;
2724 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2725 gimple stmt;
2726 tree rhs;
2727
2728 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2729 if (racc && racc->grp_to_be_replaced)
2730 {
2731 rhs = get_access_replacement (racc);
2732 if (!useless_type_conversion_p (lacc->type, racc->type))
2733 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2734
2735 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2736 rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2737 true, GSI_SAME_STMT);
2738 }
2739 else
2740 {
2741 /* No suitable access on the right hand side, need to load from
2742 the aggregate. See if we have to update it first... */
2743 if (*refreshed == SRA_UDH_NONE)
2744 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2745 old_gsi);
2746
2747 if (*refreshed == SRA_UDH_LEFT)
2748 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2749 new_gsi, true);
2750 else
2751 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2752 new_gsi, true);
2753 }
2754
2755 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2756 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2757 gimple_set_location (stmt, loc);
2758 update_stmt (stmt);
2759 sra_stats.subreplacements++;
2760 }
2761 else if (*refreshed == SRA_UDH_NONE
2762 && lacc->grp_read && !lacc->grp_covered)
2763 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2764 old_gsi);
2765
2766 if (lacc->first_child)
2767 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2768 old_gsi, new_gsi, refreshed);
2769 }
2770 }
2771
2772 /* Result code for SRA assignment modification. */
2773 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2774 SRA_AM_MODIFIED, /* stmt changed but not
2775 removed */
2776 SRA_AM_REMOVED }; /* stmt eliminated */
2777
2778 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2779 to the assignment and GSI is the statement iterator pointing at it. Returns
2780 the same values as sra_modify_assign. */
2781
2782 static enum assignment_mod_result
2783 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2784 {
2785 tree lhs = gimple_assign_lhs (*stmt);
2786 struct access *acc;
2787 location_t loc;
2788
2789 acc = get_access_for_expr (lhs);
2790 if (!acc)
2791 return SRA_AM_NONE;
2792
2793 loc = gimple_location (*stmt);
2794 if (VEC_length (constructor_elt,
2795 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2796 {
2797 /* I have never seen this code path trigger but if it can happen the
2798 following should handle it gracefully. */
2799 if (access_has_children_p (acc))
2800 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2801 true, true, loc);
2802 return SRA_AM_MODIFIED;
2803 }
2804
2805 if (acc->grp_covered)
2806 {
2807 init_subtree_with_zero (acc, gsi, false, loc);
2808 unlink_stmt_vdef (*stmt);
2809 gsi_remove (gsi, true);
2810 return SRA_AM_REMOVED;
2811 }
2812 else
2813 {
2814 init_subtree_with_zero (acc, gsi, true, loc);
2815 return SRA_AM_MODIFIED;
2816 }
2817 }
2818
2819 /* Create and return a new suitable default definition SSA_NAME for RACC which
2820 is an access describing an uninitialized part of an aggregate that is being
2821 loaded. */
2822
2823 static tree
2824 get_repl_default_def_ssa_name (struct access *racc)
2825 {
2826 tree repl, decl;
2827
2828 decl = get_unrenamed_access_replacement (racc);
2829
2830 repl = gimple_default_def (cfun, decl);
2831 if (!repl)
2832 {
2833 repl = make_ssa_name (decl, gimple_build_nop ());
2834 set_default_def (decl, repl);
2835 }
2836
2837 return repl;
2838 }
2839
2840 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2841 somewhere in it. */
2842
2843 static inline bool
2844 contains_bitfld_comp_ref_p (const_tree ref)
2845 {
2846 while (handled_component_p (ref))
2847 {
2848 if (TREE_CODE (ref) == COMPONENT_REF
2849 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2850 return true;
2851 ref = TREE_OPERAND (ref, 0);
2852 }
2853
2854 return false;
2855 }
2856
2857 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2858 bit-field field declaration somewhere in it. */
2859
2860 static inline bool
2861 contains_vce_or_bfcref_p (const_tree ref)
2862 {
2863 while (handled_component_p (ref))
2864 {
2865 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2866 || (TREE_CODE (ref) == COMPONENT_REF
2867 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2868 return true;
2869 ref = TREE_OPERAND (ref, 0);
2870 }
2871
2872 return false;
2873 }
2874
2875 /* Examine both sides of the assignment statement pointed to by STMT, replace
2876 them with a scalare replacement if there is one and generate copying of
2877 replacements if scalarized aggregates have been used in the assignment. GSI
2878 is used to hold generated statements for type conversions and subtree
2879 copying. */
2880
2881 static enum assignment_mod_result
2882 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2883 {
2884 struct access *lacc, *racc;
2885 tree lhs, rhs;
2886 bool modify_this_stmt = false;
2887 bool force_gimple_rhs = false;
2888 location_t loc;
2889 gimple_stmt_iterator orig_gsi = *gsi;
2890
2891 if (!gimple_assign_single_p (*stmt))
2892 return SRA_AM_NONE;
2893 lhs = gimple_assign_lhs (*stmt);
2894 rhs = gimple_assign_rhs1 (*stmt);
2895
2896 if (TREE_CODE (rhs) == CONSTRUCTOR)
2897 return sra_modify_constructor_assign (stmt, gsi);
2898
2899 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2900 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2901 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2902 {
2903 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2904 gsi, false);
2905 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2906 gsi, true);
2907 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2908 }
2909
2910 lacc = get_access_for_expr (lhs);
2911 racc = get_access_for_expr (rhs);
2912 if (!lacc && !racc)
2913 return SRA_AM_NONE;
2914
2915 loc = gimple_location (*stmt);
2916 if (lacc && lacc->grp_to_be_replaced)
2917 {
2918 lhs = get_access_replacement (lacc);
2919 gimple_assign_set_lhs (*stmt, lhs);
2920 modify_this_stmt = true;
2921 if (lacc->grp_partial_lhs)
2922 force_gimple_rhs = true;
2923 sra_stats.exprs++;
2924 }
2925
2926 if (racc && racc->grp_to_be_replaced)
2927 {
2928 rhs = get_access_replacement (racc);
2929 modify_this_stmt = true;
2930 if (racc->grp_partial_lhs)
2931 force_gimple_rhs = true;
2932 sra_stats.exprs++;
2933 }
2934
2935 if (modify_this_stmt)
2936 {
2937 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2938 {
2939 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2940 ??? This should move to fold_stmt which we simply should
2941 call after building a VIEW_CONVERT_EXPR here. */
2942 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2943 && !contains_bitfld_comp_ref_p (lhs)
2944 && !access_has_children_p (lacc))
2945 {
2946 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
2947 gimple_assign_set_lhs (*stmt, lhs);
2948 }
2949 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2950 && !contains_vce_or_bfcref_p (rhs)
2951 && !access_has_children_p (racc))
2952 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
2953
2954 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2955 {
2956 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
2957 rhs);
2958 if (is_gimple_reg_type (TREE_TYPE (lhs))
2959 && TREE_CODE (lhs) != SSA_NAME)
2960 force_gimple_rhs = true;
2961 }
2962 }
2963 }
2964
2965 /* From this point on, the function deals with assignments in between
2966 aggregates when at least one has scalar reductions of some of its
2967 components. There are three possible scenarios: Both the LHS and RHS have
2968 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2969
2970 In the first case, we would like to load the LHS components from RHS
2971 components whenever possible. If that is not possible, we would like to
2972 read it directly from the RHS (after updating it by storing in it its own
2973 components). If there are some necessary unscalarized data in the LHS,
2974 those will be loaded by the original assignment too. If neither of these
2975 cases happen, the original statement can be removed. Most of this is done
2976 by load_assign_lhs_subreplacements.
2977
2978 In the second case, we would like to store all RHS scalarized components
2979 directly into LHS and if they cover the aggregate completely, remove the
2980 statement too. In the third case, we want the LHS components to be loaded
2981 directly from the RHS (DSE will remove the original statement if it
2982 becomes redundant).
2983
2984 This is a bit complex but manageable when types match and when unions do
2985 not cause confusion in a way that we cannot really load a component of LHS
2986 from the RHS or vice versa (the access representing this level can have
2987 subaccesses that are accessible only through a different union field at a
2988 higher level - different from the one used in the examined expression).
2989 Unions are fun.
2990
2991 Therefore, I specially handle a fourth case, happening when there is a
2992 specific type cast or it is impossible to locate a scalarized subaccess on
2993 the other side of the expression. If that happens, I simply "refresh" the
2994 RHS by storing in it is scalarized components leave the original statement
2995 there to do the copying and then load the scalar replacements of the LHS.
2996 This is what the first branch does. */
2997
2998 if (modify_this_stmt
2999 || gimple_has_volatile_ops (*stmt)
3000 || contains_vce_or_bfcref_p (rhs)
3001 || contains_vce_or_bfcref_p (lhs))
3002 {
3003 if (access_has_children_p (racc))
3004 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3005 gsi, false, false, loc);
3006 if (access_has_children_p (lacc))
3007 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3008 gsi, true, true, loc);
3009 sra_stats.separate_lhs_rhs_handling++;
3010 }
3011 else
3012 {
3013 if (access_has_children_p (lacc) && access_has_children_p (racc))
3014 {
3015 gimple_stmt_iterator orig_gsi = *gsi;
3016 enum unscalarized_data_handling refreshed;
3017
3018 if (lacc->grp_read && !lacc->grp_covered)
3019 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3020 else
3021 refreshed = SRA_UDH_NONE;
3022
3023 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3024 &orig_gsi, gsi, &refreshed);
3025 if (refreshed != SRA_UDH_RIGHT)
3026 {
3027 gsi_next (gsi);
3028 unlink_stmt_vdef (*stmt);
3029 gsi_remove (&orig_gsi, true);
3030 sra_stats.deleted++;
3031 return SRA_AM_REMOVED;
3032 }
3033 }
3034 else
3035 {
3036 if (racc)
3037 {
3038 if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
3039 {
3040 if (dump_file)
3041 {
3042 fprintf (dump_file, "Removing load: ");
3043 print_gimple_stmt (dump_file, *stmt, 0, 0);
3044 }
3045
3046 if (TREE_CODE (lhs) == SSA_NAME)
3047 {
3048 rhs = get_repl_default_def_ssa_name (racc);
3049 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3050 TREE_TYPE (rhs)))
3051 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3052 TREE_TYPE (lhs), rhs);
3053 }
3054 else
3055 {
3056 if (racc->first_child)
3057 generate_subtree_copies (racc->first_child, lhs,
3058 racc->offset, 0, 0, gsi,
3059 false, false, loc);
3060
3061 gcc_assert (*stmt == gsi_stmt (*gsi));
3062 unlink_stmt_vdef (*stmt);
3063 gsi_remove (gsi, true);
3064 sra_stats.deleted++;
3065 return SRA_AM_REMOVED;
3066 }
3067 }
3068 else if (racc->first_child)
3069 generate_subtree_copies (racc->first_child, lhs, racc->offset,
3070 0, 0, gsi, false, true, loc);
3071 }
3072 if (access_has_children_p (lacc))
3073 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3074 0, 0, gsi, true, true, loc);
3075 }
3076 }
3077
3078 /* This gimplification must be done after generate_subtree_copies, lest we
3079 insert the subtree copies in the middle of the gimplified sequence. */
3080 if (force_gimple_rhs)
3081 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3082 true, GSI_SAME_STMT);
3083 if (gimple_assign_rhs1 (*stmt) != rhs)
3084 {
3085 modify_this_stmt = true;
3086 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3087 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3088 }
3089
3090 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3091 }
3092
3093 /* Traverse the function body and all modifications as decided in
3094 analyze_all_variable_accesses. Return true iff the CFG has been
3095 changed. */
3096
3097 static bool
3098 sra_modify_function_body (void)
3099 {
3100 bool cfg_changed = false;
3101 basic_block bb;
3102
3103 FOR_EACH_BB (bb)
3104 {
3105 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3106 while (!gsi_end_p (gsi))
3107 {
3108 gimple stmt = gsi_stmt (gsi);
3109 enum assignment_mod_result assign_result;
3110 bool modified = false, deleted = false;
3111 tree *t;
3112 unsigned i;
3113
3114 switch (gimple_code (stmt))
3115 {
3116 case GIMPLE_RETURN:
3117 t = gimple_return_retval_ptr (stmt);
3118 if (*t != NULL_TREE)
3119 modified |= sra_modify_expr (t, &gsi, false);
3120 break;
3121
3122 case GIMPLE_ASSIGN:
3123 assign_result = sra_modify_assign (&stmt, &gsi);
3124 modified |= assign_result == SRA_AM_MODIFIED;
3125 deleted = assign_result == SRA_AM_REMOVED;
3126 break;
3127
3128 case GIMPLE_CALL:
3129 /* Operands must be processed before the lhs. */
3130 for (i = 0; i < gimple_call_num_args (stmt); i++)
3131 {
3132 t = gimple_call_arg_ptr (stmt, i);
3133 modified |= sra_modify_expr (t, &gsi, false);
3134 }
3135
3136 if (gimple_call_lhs (stmt))
3137 {
3138 t = gimple_call_lhs_ptr (stmt);
3139 modified |= sra_modify_expr (t, &gsi, true);
3140 }
3141 break;
3142
3143 case GIMPLE_ASM:
3144 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3145 {
3146 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3147 modified |= sra_modify_expr (t, &gsi, false);
3148 }
3149 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3150 {
3151 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3152 modified |= sra_modify_expr (t, &gsi, true);
3153 }
3154 break;
3155
3156 default:
3157 break;
3158 }
3159
3160 if (modified)
3161 {
3162 update_stmt (stmt);
3163 if (maybe_clean_eh_stmt (stmt)
3164 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3165 cfg_changed = true;
3166 }
3167 if (!deleted)
3168 gsi_next (&gsi);
3169 }
3170 }
3171
3172 return cfg_changed;
3173 }
3174
3175 /* Generate statements initializing scalar replacements of parts of function
3176 parameters. */
3177
3178 static void
3179 initialize_parameter_reductions (void)
3180 {
3181 gimple_stmt_iterator gsi;
3182 gimple_seq seq = NULL;
3183 tree parm;
3184
3185 for (parm = DECL_ARGUMENTS (current_function_decl);
3186 parm;
3187 parm = DECL_CHAIN (parm))
3188 {
3189 VEC (access_p, heap) *access_vec;
3190 struct access *access;
3191
3192 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3193 continue;
3194 access_vec = get_base_access_vector (parm);
3195 if (!access_vec)
3196 continue;
3197
3198 if (!seq)
3199 {
3200 seq = gimple_seq_alloc ();
3201 gsi = gsi_start (seq);
3202 }
3203
3204 for (access = VEC_index (access_p, access_vec, 0);
3205 access;
3206 access = access->next_grp)
3207 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3208 EXPR_LOCATION (parm));
3209 }
3210
3211 if (seq)
3212 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3213 }
3214
3215 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3216 it reveals there are components of some aggregates to be scalarized, it runs
3217 the required transformations. */
3218 static unsigned int
3219 perform_intra_sra (void)
3220 {
3221 int ret = 0;
3222 sra_initialize ();
3223
3224 if (!find_var_candidates ())
3225 goto out;
3226
3227 if (!scan_function ())
3228 goto out;
3229
3230 if (!analyze_all_variable_accesses ())
3231 goto out;
3232
3233 if (sra_modify_function_body ())
3234 ret = TODO_update_ssa | TODO_cleanup_cfg;
3235 else
3236 ret = TODO_update_ssa;
3237 initialize_parameter_reductions ();
3238
3239 statistics_counter_event (cfun, "Scalar replacements created",
3240 sra_stats.replacements);
3241 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3242 statistics_counter_event (cfun, "Subtree copy stmts",
3243 sra_stats.subtree_copies);
3244 statistics_counter_event (cfun, "Subreplacement stmts",
3245 sra_stats.subreplacements);
3246 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3247 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3248 sra_stats.separate_lhs_rhs_handling);
3249
3250 out:
3251 sra_deinitialize ();
3252 return ret;
3253 }
3254
3255 /* Perform early intraprocedural SRA. */
3256 static unsigned int
3257 early_intra_sra (void)
3258 {
3259 sra_mode = SRA_MODE_EARLY_INTRA;
3260 return perform_intra_sra ();
3261 }
3262
3263 /* Perform "late" intraprocedural SRA. */
3264 static unsigned int
3265 late_intra_sra (void)
3266 {
3267 sra_mode = SRA_MODE_INTRA;
3268 return perform_intra_sra ();
3269 }
3270
3271
3272 static bool
3273 gate_intra_sra (void)
3274 {
3275 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3276 }
3277
3278
3279 struct gimple_opt_pass pass_sra_early =
3280 {
3281 {
3282 GIMPLE_PASS,
3283 "esra", /* name */
3284 gate_intra_sra, /* gate */
3285 early_intra_sra, /* execute */
3286 NULL, /* sub */
3287 NULL, /* next */
3288 0, /* static_pass_number */
3289 TV_TREE_SRA, /* tv_id */
3290 PROP_cfg | PROP_ssa, /* properties_required */
3291 0, /* properties_provided */
3292 0, /* properties_destroyed */
3293 0, /* todo_flags_start */
3294 TODO_update_ssa
3295 | TODO_ggc_collect
3296 | TODO_verify_ssa /* todo_flags_finish */
3297 }
3298 };
3299
3300 struct gimple_opt_pass pass_sra =
3301 {
3302 {
3303 GIMPLE_PASS,
3304 "sra", /* name */
3305 gate_intra_sra, /* gate */
3306 late_intra_sra, /* execute */
3307 NULL, /* sub */
3308 NULL, /* next */
3309 0, /* static_pass_number */
3310 TV_TREE_SRA, /* tv_id */
3311 PROP_cfg | PROP_ssa, /* properties_required */
3312 0, /* properties_provided */
3313 0, /* properties_destroyed */
3314 TODO_update_address_taken, /* todo_flags_start */
3315 TODO_update_ssa
3316 | TODO_ggc_collect
3317 | TODO_verify_ssa /* todo_flags_finish */
3318 }
3319 };
3320
3321
3322 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3323 parameter. */
3324
3325 static bool
3326 is_unused_scalar_param (tree parm)
3327 {
3328 tree name;
3329 return (is_gimple_reg (parm)
3330 && (!(name = gimple_default_def (cfun, parm))
3331 || has_zero_uses (name)));
3332 }
3333
3334 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3335 examine whether there are any direct or otherwise infeasible ones. If so,
3336 return true, otherwise return false. PARM must be a gimple register with a
3337 non-NULL default definition. */
3338
3339 static bool
3340 ptr_parm_has_direct_uses (tree parm)
3341 {
3342 imm_use_iterator ui;
3343 gimple stmt;
3344 tree name = gimple_default_def (cfun, parm);
3345 bool ret = false;
3346
3347 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3348 {
3349 int uses_ok = 0;
3350 use_operand_p use_p;
3351
3352 if (is_gimple_debug (stmt))
3353 continue;
3354
3355 /* Valid uses include dereferences on the lhs and the rhs. */
3356 if (gimple_has_lhs (stmt))
3357 {
3358 tree lhs = gimple_get_lhs (stmt);
3359 while (handled_component_p (lhs))
3360 lhs = TREE_OPERAND (lhs, 0);
3361 if (TREE_CODE (lhs) == MEM_REF
3362 && TREE_OPERAND (lhs, 0) == name
3363 && integer_zerop (TREE_OPERAND (lhs, 1))
3364 && types_compatible_p (TREE_TYPE (lhs),
3365 TREE_TYPE (TREE_TYPE (name)))
3366 && !TREE_THIS_VOLATILE (lhs))
3367 uses_ok++;
3368 }
3369 if (gimple_assign_single_p (stmt))
3370 {
3371 tree rhs = gimple_assign_rhs1 (stmt);
3372 while (handled_component_p (rhs))
3373 rhs = TREE_OPERAND (rhs, 0);
3374 if (TREE_CODE (rhs) == MEM_REF
3375 && TREE_OPERAND (rhs, 0) == name
3376 && integer_zerop (TREE_OPERAND (rhs, 1))
3377 && types_compatible_p (TREE_TYPE (rhs),
3378 TREE_TYPE (TREE_TYPE (name)))
3379 && !TREE_THIS_VOLATILE (rhs))
3380 uses_ok++;
3381 }
3382 else if (is_gimple_call (stmt))
3383 {
3384 unsigned i;
3385 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3386 {
3387 tree arg = gimple_call_arg (stmt, i);
3388 while (handled_component_p (arg))
3389 arg = TREE_OPERAND (arg, 0);
3390 if (TREE_CODE (arg) == MEM_REF
3391 && TREE_OPERAND (arg, 0) == name
3392 && integer_zerop (TREE_OPERAND (arg, 1))
3393 && types_compatible_p (TREE_TYPE (arg),
3394 TREE_TYPE (TREE_TYPE (name)))
3395 && !TREE_THIS_VOLATILE (arg))
3396 uses_ok++;
3397 }
3398 }
3399
3400 /* If the number of valid uses does not match the number of
3401 uses in this stmt there is an unhandled use. */
3402 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3403 --uses_ok;
3404
3405 if (uses_ok != 0)
3406 ret = true;
3407
3408 if (ret)
3409 BREAK_FROM_IMM_USE_STMT (ui);
3410 }
3411
3412 return ret;
3413 }
3414
3415 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3416 them in candidate_bitmap. Note that these do not necessarily include
3417 parameter which are unused and thus can be removed. Return true iff any
3418 such candidate has been found. */
3419
3420 static bool
3421 find_param_candidates (void)
3422 {
3423 tree parm;
3424 int count = 0;
3425 bool ret = false;
3426 const char *msg;
3427
3428 for (parm = DECL_ARGUMENTS (current_function_decl);
3429 parm;
3430 parm = DECL_CHAIN (parm))
3431 {
3432 tree type = TREE_TYPE (parm);
3433
3434 count++;
3435
3436 if (TREE_THIS_VOLATILE (parm)
3437 || TREE_ADDRESSABLE (parm)
3438 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3439 continue;
3440
3441 if (is_unused_scalar_param (parm))
3442 {
3443 ret = true;
3444 continue;
3445 }
3446
3447 if (POINTER_TYPE_P (type))
3448 {
3449 type = TREE_TYPE (type);
3450
3451 if (TREE_CODE (type) == FUNCTION_TYPE
3452 || TYPE_VOLATILE (type)
3453 || (TREE_CODE (type) == ARRAY_TYPE
3454 && TYPE_NONALIASED_COMPONENT (type))
3455 || !is_gimple_reg (parm)
3456 || is_va_list_type (type)
3457 || ptr_parm_has_direct_uses (parm))
3458 continue;
3459 }
3460 else if (!AGGREGATE_TYPE_P (type))
3461 continue;
3462
3463 if (!COMPLETE_TYPE_P (type)
3464 || !host_integerp (TYPE_SIZE (type), 1)
3465 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3466 || (AGGREGATE_TYPE_P (type)
3467 && type_internals_preclude_sra_p (type, &msg)))
3468 continue;
3469
3470 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3471 ret = true;
3472 if (dump_file && (dump_flags & TDF_DETAILS))
3473 {
3474 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3475 print_generic_expr (dump_file, parm, 0);
3476 fprintf (dump_file, "\n");
3477 }
3478 }
3479
3480 func_param_count = count;
3481 return ret;
3482 }
3483
3484 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3485 maybe_modified. */
3486
3487 static bool
3488 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3489 void *data)
3490 {
3491 struct access *repr = (struct access *) data;
3492
3493 repr->grp_maybe_modified = 1;
3494 return true;
3495 }
3496
3497 /* Analyze what representatives (in linked lists accessible from
3498 REPRESENTATIVES) can be modified by side effects of statements in the
3499 current function. */
3500
3501 static void
3502 analyze_modified_params (VEC (access_p, heap) *representatives)
3503 {
3504 int i;
3505
3506 for (i = 0; i < func_param_count; i++)
3507 {
3508 struct access *repr;
3509
3510 for (repr = VEC_index (access_p, representatives, i);
3511 repr;
3512 repr = repr->next_grp)
3513 {
3514 struct access *access;
3515 bitmap visited;
3516 ao_ref ar;
3517
3518 if (no_accesses_p (repr))
3519 continue;
3520 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3521 || repr->grp_maybe_modified)
3522 continue;
3523
3524 ao_ref_init (&ar, repr->expr);
3525 visited = BITMAP_ALLOC (NULL);
3526 for (access = repr; access; access = access->next_sibling)
3527 {
3528 /* All accesses are read ones, otherwise grp_maybe_modified would
3529 be trivially set. */
3530 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3531 mark_maybe_modified, repr, &visited);
3532 if (repr->grp_maybe_modified)
3533 break;
3534 }
3535 BITMAP_FREE (visited);
3536 }
3537 }
3538 }
3539
3540 /* Propagate distances in bb_dereferences in the opposite direction than the
3541 control flow edges, in each step storing the maximum of the current value
3542 and the minimum of all successors. These steps are repeated until the table
3543 stabilizes. Note that BBs which might terminate the functions (according to
3544 final_bbs bitmap) never updated in this way. */
3545
3546 static void
3547 propagate_dereference_distances (void)
3548 {
3549 VEC (basic_block, heap) *queue;
3550 basic_block bb;
3551
3552 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3553 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3554 FOR_EACH_BB (bb)
3555 {
3556 VEC_quick_push (basic_block, queue, bb);
3557 bb->aux = bb;
3558 }
3559
3560 while (!VEC_empty (basic_block, queue))
3561 {
3562 edge_iterator ei;
3563 edge e;
3564 bool change = false;
3565 int i;
3566
3567 bb = VEC_pop (basic_block, queue);
3568 bb->aux = NULL;
3569
3570 if (bitmap_bit_p (final_bbs, bb->index))
3571 continue;
3572
3573 for (i = 0; i < func_param_count; i++)
3574 {
3575 int idx = bb->index * func_param_count + i;
3576 bool first = true;
3577 HOST_WIDE_INT inh = 0;
3578
3579 FOR_EACH_EDGE (e, ei, bb->succs)
3580 {
3581 int succ_idx = e->dest->index * func_param_count + i;
3582
3583 if (e->src == EXIT_BLOCK_PTR)
3584 continue;
3585
3586 if (first)
3587 {
3588 first = false;
3589 inh = bb_dereferences [succ_idx];
3590 }
3591 else if (bb_dereferences [succ_idx] < inh)
3592 inh = bb_dereferences [succ_idx];
3593 }
3594
3595 if (!first && bb_dereferences[idx] < inh)
3596 {
3597 bb_dereferences[idx] = inh;
3598 change = true;
3599 }
3600 }
3601
3602 if (change && !bitmap_bit_p (final_bbs, bb->index))
3603 FOR_EACH_EDGE (e, ei, bb->preds)
3604 {
3605 if (e->src->aux)
3606 continue;
3607
3608 e->src->aux = e->src;
3609 VEC_quick_push (basic_block, queue, e->src);
3610 }
3611 }
3612
3613 VEC_free (basic_block, heap, queue);
3614 }
3615
3616 /* Dump a dereferences TABLE with heading STR to file F. */
3617
3618 static void
3619 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3620 {
3621 basic_block bb;
3622
3623 fprintf (dump_file, str);
3624 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3625 {
3626 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3627 if (bb != EXIT_BLOCK_PTR)
3628 {
3629 int i;
3630 for (i = 0; i < func_param_count; i++)
3631 {
3632 int idx = bb->index * func_param_count + i;
3633 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3634 }
3635 }
3636 fprintf (f, "\n");
3637 }
3638 fprintf (dump_file, "\n");
3639 }
3640
3641 /* Determine what (parts of) parameters passed by reference that are not
3642 assigned to are not certainly dereferenced in this function and thus the
3643 dereferencing cannot be safely moved to the caller without potentially
3644 introducing a segfault. Mark such REPRESENTATIVES as
3645 grp_not_necessarilly_dereferenced.
3646
3647 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3648 part is calculated rather than simple booleans are calculated for each
3649 pointer parameter to handle cases when only a fraction of the whole
3650 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3651 an example).
3652
3653 The maximum dereference distances for each pointer parameter and BB are
3654 already stored in bb_dereference. This routine simply propagates these
3655 values upwards by propagate_dereference_distances and then compares the
3656 distances of individual parameters in the ENTRY BB to the equivalent
3657 distances of each representative of a (fraction of a) parameter. */
3658
3659 static void
3660 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3661 {
3662 int i;
3663
3664 if (dump_file && (dump_flags & TDF_DETAILS))
3665 dump_dereferences_table (dump_file,
3666 "Dereference table before propagation:\n",
3667 bb_dereferences);
3668
3669 propagate_dereference_distances ();
3670
3671 if (dump_file && (dump_flags & TDF_DETAILS))
3672 dump_dereferences_table (dump_file,
3673 "Dereference table after propagation:\n",
3674 bb_dereferences);
3675
3676 for (i = 0; i < func_param_count; i++)
3677 {
3678 struct access *repr = VEC_index (access_p, representatives, i);
3679 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3680
3681 if (!repr || no_accesses_p (repr))
3682 continue;
3683
3684 do
3685 {
3686 if ((repr->offset + repr->size) > bb_dereferences[idx])
3687 repr->grp_not_necessarilly_dereferenced = 1;
3688 repr = repr->next_grp;
3689 }
3690 while (repr);
3691 }
3692 }
3693
3694 /* Return the representative access for the parameter declaration PARM if it is
3695 a scalar passed by reference which is not written to and the pointer value
3696 is not used directly. Thus, if it is legal to dereference it in the caller
3697 and we can rule out modifications through aliases, such parameter should be
3698 turned into one passed by value. Return NULL otherwise. */
3699
3700 static struct access *
3701 unmodified_by_ref_scalar_representative (tree parm)
3702 {
3703 int i, access_count;
3704 struct access *repr;
3705 VEC (access_p, heap) *access_vec;
3706
3707 access_vec = get_base_access_vector (parm);
3708 gcc_assert (access_vec);
3709 repr = VEC_index (access_p, access_vec, 0);
3710 if (repr->write)
3711 return NULL;
3712 repr->group_representative = repr;
3713
3714 access_count = VEC_length (access_p, access_vec);
3715 for (i = 1; i < access_count; i++)
3716 {
3717 struct access *access = VEC_index (access_p, access_vec, i);
3718 if (access->write)
3719 return NULL;
3720 access->group_representative = repr;
3721 access->next_sibling = repr->next_sibling;
3722 repr->next_sibling = access;
3723 }
3724
3725 repr->grp_read = 1;
3726 repr->grp_scalar_ptr = 1;
3727 return repr;
3728 }
3729
3730 /* Return true iff this access precludes IPA-SRA of the parameter it is
3731 associated with. */
3732
3733 static bool
3734 access_precludes_ipa_sra_p (struct access *access)
3735 {
3736 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3737 is incompatible assign in a call statement (and possibly even in asm
3738 statements). This can be relaxed by using a new temporary but only for
3739 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3740 intraprocedural SRA we deal with this by keeping the old aggregate around,
3741 something we cannot do in IPA-SRA.) */
3742 if (access->write
3743 && (is_gimple_call (access->stmt)
3744 || gimple_code (access->stmt) == GIMPLE_ASM))
3745 return true;
3746
3747 if (STRICT_ALIGNMENT
3748 && tree_non_aligned_mem_p (access->expr, TYPE_ALIGN (access->type)))
3749 return true;
3750
3751 return false;
3752 }
3753
3754
3755 /* Sort collected accesses for parameter PARM, identify representatives for
3756 each accessed region and link them together. Return NULL if there are
3757 different but overlapping accesses, return the special ptr value meaning
3758 there are no accesses for this parameter if that is the case and return the
3759 first representative otherwise. Set *RO_GRP if there is a group of accesses
3760 with only read (i.e. no write) accesses. */
3761
3762 static struct access *
3763 splice_param_accesses (tree parm, bool *ro_grp)
3764 {
3765 int i, j, access_count, group_count;
3766 int agg_size, total_size = 0;
3767 struct access *access, *res, **prev_acc_ptr = &res;
3768 VEC (access_p, heap) *access_vec;
3769
3770 access_vec = get_base_access_vector (parm);
3771 if (!access_vec)
3772 return &no_accesses_representant;
3773 access_count = VEC_length (access_p, access_vec);
3774
3775 VEC_qsort (access_p, access_vec, compare_access_positions);
3776
3777 i = 0;
3778 total_size = 0;
3779 group_count = 0;
3780 while (i < access_count)
3781 {
3782 bool modification;
3783 tree a1_alias_type;
3784 access = VEC_index (access_p, access_vec, i);
3785 modification = access->write;
3786 if (access_precludes_ipa_sra_p (access))
3787 return NULL;
3788 a1_alias_type = reference_alias_ptr_type (access->expr);
3789
3790 /* Access is about to become group representative unless we find some
3791 nasty overlap which would preclude us from breaking this parameter
3792 apart. */
3793
3794 j = i + 1;
3795 while (j < access_count)
3796 {
3797 struct access *ac2 = VEC_index (access_p, access_vec, j);
3798 if (ac2->offset != access->offset)
3799 {
3800 /* All or nothing law for parameters. */
3801 if (access->offset + access->size > ac2->offset)
3802 return NULL;
3803 else
3804 break;
3805 }
3806 else if (ac2->size != access->size)
3807 return NULL;
3808
3809 if (access_precludes_ipa_sra_p (ac2)
3810 || (ac2->type != access->type
3811 && (TREE_ADDRESSABLE (ac2->type)
3812 || TREE_ADDRESSABLE (access->type)))
3813 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3814 return NULL;
3815
3816 modification |= ac2->write;
3817 ac2->group_representative = access;
3818 ac2->next_sibling = access->next_sibling;
3819 access->next_sibling = ac2;
3820 j++;
3821 }
3822
3823 group_count++;
3824 access->grp_maybe_modified = modification;
3825 if (!modification)
3826 *ro_grp = true;
3827 *prev_acc_ptr = access;
3828 prev_acc_ptr = &access->next_grp;
3829 total_size += access->size;
3830 i = j;
3831 }
3832
3833 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3834 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3835 else
3836 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3837 if (total_size >= agg_size)
3838 return NULL;
3839
3840 gcc_assert (group_count > 0);
3841 return res;
3842 }
3843
3844 /* Decide whether parameters with representative accesses given by REPR should
3845 be reduced into components. */
3846
3847 static int
3848 decide_one_param_reduction (struct access *repr)
3849 {
3850 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3851 bool by_ref;
3852 tree parm;
3853
3854 parm = repr->base;
3855 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3856 gcc_assert (cur_parm_size > 0);
3857
3858 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3859 {
3860 by_ref = true;
3861 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3862 }
3863 else
3864 {
3865 by_ref = false;
3866 agg_size = cur_parm_size;
3867 }
3868
3869 if (dump_file)
3870 {
3871 struct access *acc;
3872 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3873 print_generic_expr (dump_file, parm, 0);
3874 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3875 for (acc = repr; acc; acc = acc->next_grp)
3876 dump_access (dump_file, acc, true);
3877 }
3878
3879 total_size = 0;
3880 new_param_count = 0;
3881
3882 for (; repr; repr = repr->next_grp)
3883 {
3884 gcc_assert (parm == repr->base);
3885
3886 /* Taking the address of a non-addressable field is verboten. */
3887 if (by_ref && repr->non_addressable)
3888 return 0;
3889
3890 if (!by_ref || (!repr->grp_maybe_modified
3891 && !repr->grp_not_necessarilly_dereferenced))
3892 total_size += repr->size;
3893 else
3894 total_size += cur_parm_size;
3895
3896 new_param_count++;
3897 }
3898
3899 gcc_assert (new_param_count > 0);
3900
3901 if (optimize_function_for_size_p (cfun))
3902 parm_size_limit = cur_parm_size;
3903 else
3904 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3905 * cur_parm_size);
3906
3907 if (total_size < agg_size
3908 && total_size <= parm_size_limit)
3909 {
3910 if (dump_file)
3911 fprintf (dump_file, " ....will be split into %i components\n",
3912 new_param_count);
3913 return new_param_count;
3914 }
3915 else
3916 return 0;
3917 }
3918
3919 /* The order of the following enums is important, we need to do extra work for
3920 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3921 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3922 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3923
3924 /* Identify representatives of all accesses to all candidate parameters for
3925 IPA-SRA. Return result based on what representatives have been found. */
3926
3927 static enum ipa_splicing_result
3928 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3929 {
3930 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3931 tree parm;
3932 struct access *repr;
3933
3934 *representatives = VEC_alloc (access_p, heap, func_param_count);
3935
3936 for (parm = DECL_ARGUMENTS (current_function_decl);
3937 parm;
3938 parm = DECL_CHAIN (parm))
3939 {
3940 if (is_unused_scalar_param (parm))
3941 {
3942 VEC_quick_push (access_p, *representatives,
3943 &no_accesses_representant);
3944 if (result == NO_GOOD_ACCESS)
3945 result = UNUSED_PARAMS;
3946 }
3947 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3948 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3949 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3950 {
3951 repr = unmodified_by_ref_scalar_representative (parm);
3952 VEC_quick_push (access_p, *representatives, repr);
3953 if (repr)
3954 result = UNMODIF_BY_REF_ACCESSES;
3955 }
3956 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3957 {
3958 bool ro_grp = false;
3959 repr = splice_param_accesses (parm, &ro_grp);
3960 VEC_quick_push (access_p, *representatives, repr);
3961
3962 if (repr && !no_accesses_p (repr))
3963 {
3964 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3965 {
3966 if (ro_grp)
3967 result = UNMODIF_BY_REF_ACCESSES;
3968 else if (result < MODIF_BY_REF_ACCESSES)
3969 result = MODIF_BY_REF_ACCESSES;
3970 }
3971 else if (result < BY_VAL_ACCESSES)
3972 result = BY_VAL_ACCESSES;
3973 }
3974 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3975 result = UNUSED_PARAMS;
3976 }
3977 else
3978 VEC_quick_push (access_p, *representatives, NULL);
3979 }
3980
3981 if (result == NO_GOOD_ACCESS)
3982 {
3983 VEC_free (access_p, heap, *representatives);
3984 *representatives = NULL;
3985 return NO_GOOD_ACCESS;
3986 }
3987
3988 return result;
3989 }
3990
3991 /* Return the index of BASE in PARMS. Abort if it is not found. */
3992
3993 static inline int
3994 get_param_index (tree base, VEC(tree, heap) *parms)
3995 {
3996 int i, len;
3997
3998 len = VEC_length (tree, parms);
3999 for (i = 0; i < len; i++)
4000 if (VEC_index (tree, parms, i) == base)
4001 return i;
4002 gcc_unreachable ();
4003 }
4004
4005 /* Convert the decisions made at the representative level into compact
4006 parameter adjustments. REPRESENTATIVES are pointers to first
4007 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4008 final number of adjustments. */
4009
4010 static ipa_parm_adjustment_vec
4011 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
4012 int adjustments_count)
4013 {
4014 VEC (tree, heap) *parms;
4015 ipa_parm_adjustment_vec adjustments;
4016 tree parm;
4017 int i;
4018
4019 gcc_assert (adjustments_count > 0);
4020 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4021 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
4022 parm = DECL_ARGUMENTS (current_function_decl);
4023 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4024 {
4025 struct access *repr = VEC_index (access_p, representatives, i);
4026
4027 if (!repr || no_accesses_p (repr))
4028 {
4029 struct ipa_parm_adjustment *adj;
4030
4031 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4032 memset (adj, 0, sizeof (*adj));
4033 adj->base_index = get_param_index (parm, parms);
4034 adj->base = parm;
4035 if (!repr)
4036 adj->copy_param = 1;
4037 else
4038 adj->remove_param = 1;
4039 }
4040 else
4041 {
4042 struct ipa_parm_adjustment *adj;
4043 int index = get_param_index (parm, parms);
4044
4045 for (; repr; repr = repr->next_grp)
4046 {
4047 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4048 memset (adj, 0, sizeof (*adj));
4049 gcc_assert (repr->base == parm);
4050 adj->base_index = index;
4051 adj->base = repr->base;
4052 adj->type = repr->type;
4053 adj->alias_ptr_type = reference_alias_ptr_type (repr->expr);
4054 adj->offset = repr->offset;
4055 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4056 && (repr->grp_maybe_modified
4057 || repr->grp_not_necessarilly_dereferenced));
4058
4059 }
4060 }
4061 }
4062 VEC_free (tree, heap, parms);
4063 return adjustments;
4064 }
4065
4066 /* Analyze the collected accesses and produce a plan what to do with the
4067 parameters in the form of adjustments, NULL meaning nothing. */
4068
4069 static ipa_parm_adjustment_vec
4070 analyze_all_param_acesses (void)
4071 {
4072 enum ipa_splicing_result repr_state;
4073 bool proceed = false;
4074 int i, adjustments_count = 0;
4075 VEC (access_p, heap) *representatives;
4076 ipa_parm_adjustment_vec adjustments;
4077
4078 repr_state = splice_all_param_accesses (&representatives);
4079 if (repr_state == NO_GOOD_ACCESS)
4080 return NULL;
4081
4082 /* If there are any parameters passed by reference which are not modified
4083 directly, we need to check whether they can be modified indirectly. */
4084 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4085 {
4086 analyze_caller_dereference_legality (representatives);
4087 analyze_modified_params (representatives);
4088 }
4089
4090 for (i = 0; i < func_param_count; i++)
4091 {
4092 struct access *repr = VEC_index (access_p, representatives, i);
4093
4094 if (repr && !no_accesses_p (repr))
4095 {
4096 if (repr->grp_scalar_ptr)
4097 {
4098 adjustments_count++;
4099 if (repr->grp_not_necessarilly_dereferenced
4100 || repr->grp_maybe_modified)
4101 VEC_replace (access_p, representatives, i, NULL);
4102 else
4103 {
4104 proceed = true;
4105 sra_stats.scalar_by_ref_to_by_val++;
4106 }
4107 }
4108 else
4109 {
4110 int new_components = decide_one_param_reduction (repr);
4111
4112 if (new_components == 0)
4113 {
4114 VEC_replace (access_p, representatives, i, NULL);
4115 adjustments_count++;
4116 }
4117 else
4118 {
4119 adjustments_count += new_components;
4120 sra_stats.aggregate_params_reduced++;
4121 sra_stats.param_reductions_created += new_components;
4122 proceed = true;
4123 }
4124 }
4125 }
4126 else
4127 {
4128 if (no_accesses_p (repr))
4129 {
4130 proceed = true;
4131 sra_stats.deleted_unused_parameters++;
4132 }
4133 adjustments_count++;
4134 }
4135 }
4136
4137 if (!proceed && dump_file)
4138 fprintf (dump_file, "NOT proceeding to change params.\n");
4139
4140 if (proceed)
4141 adjustments = turn_representatives_into_adjustments (representatives,
4142 adjustments_count);
4143 else
4144 adjustments = NULL;
4145
4146 VEC_free (access_p, heap, representatives);
4147 return adjustments;
4148 }
4149
4150 /* If a parameter replacement identified by ADJ does not yet exist in the form
4151 of declaration, create it and record it, otherwise return the previously
4152 created one. */
4153
4154 static tree
4155 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4156 {
4157 tree repl;
4158 if (!adj->new_ssa_base)
4159 {
4160 char *pretty_name = make_fancy_name (adj->base);
4161
4162 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4163 DECL_NAME (repl) = get_identifier (pretty_name);
4164 obstack_free (&name_obstack, pretty_name);
4165
4166 add_referenced_var (repl);
4167 adj->new_ssa_base = repl;
4168 }
4169 else
4170 repl = adj->new_ssa_base;
4171 return repl;
4172 }
4173
4174 /* Find the first adjustment for a particular parameter BASE in a vector of
4175 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4176 adjustment. */
4177
4178 static struct ipa_parm_adjustment *
4179 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4180 {
4181 int i, len;
4182
4183 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4184 for (i = 0; i < len; i++)
4185 {
4186 struct ipa_parm_adjustment *adj;
4187
4188 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4189 if (!adj->copy_param && adj->base == base)
4190 return adj;
4191 }
4192
4193 return NULL;
4194 }
4195
4196 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4197 removed because its value is not used, replace the SSA_NAME with a one
4198 relating to a created VAR_DECL together all of its uses and return true.
4199 ADJUSTMENTS is a pointer to an adjustments vector. */
4200
4201 static bool
4202 replace_removed_params_ssa_names (gimple stmt,
4203 ipa_parm_adjustment_vec adjustments)
4204 {
4205 struct ipa_parm_adjustment *adj;
4206 tree lhs, decl, repl, name;
4207
4208 if (gimple_code (stmt) == GIMPLE_PHI)
4209 lhs = gimple_phi_result (stmt);
4210 else if (is_gimple_assign (stmt))
4211 lhs = gimple_assign_lhs (stmt);
4212 else if (is_gimple_call (stmt))
4213 lhs = gimple_call_lhs (stmt);
4214 else
4215 gcc_unreachable ();
4216
4217 if (TREE_CODE (lhs) != SSA_NAME)
4218 return false;
4219 decl = SSA_NAME_VAR (lhs);
4220 if (TREE_CODE (decl) != PARM_DECL)
4221 return false;
4222
4223 adj = get_adjustment_for_base (adjustments, decl);
4224 if (!adj)
4225 return false;
4226
4227 repl = get_replaced_param_substitute (adj);
4228 name = make_ssa_name (repl, stmt);
4229
4230 if (dump_file)
4231 {
4232 fprintf (dump_file, "replacing an SSA name of a removed param ");
4233 print_generic_expr (dump_file, lhs, 0);
4234 fprintf (dump_file, " with ");
4235 print_generic_expr (dump_file, name, 0);
4236 fprintf (dump_file, "\n");
4237 }
4238
4239 if (is_gimple_assign (stmt))
4240 gimple_assign_set_lhs (stmt, name);
4241 else if (is_gimple_call (stmt))
4242 gimple_call_set_lhs (stmt, name);
4243 else
4244 gimple_phi_set_result (stmt, name);
4245
4246 replace_uses_by (lhs, name);
4247 release_ssa_name (lhs);
4248 return true;
4249 }
4250
4251 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4252 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4253 specifies whether the function should care about type incompatibility the
4254 current and new expressions. If it is false, the function will leave
4255 incompatibility issues to the caller. Return true iff the expression
4256 was modified. */
4257
4258 static bool
4259 sra_ipa_modify_expr (tree *expr, bool convert,
4260 ipa_parm_adjustment_vec adjustments)
4261 {
4262 int i, len;
4263 struct ipa_parm_adjustment *adj, *cand = NULL;
4264 HOST_WIDE_INT offset, size, max_size;
4265 tree base, src;
4266
4267 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4268
4269 if (TREE_CODE (*expr) == BIT_FIELD_REF
4270 || TREE_CODE (*expr) == IMAGPART_EXPR
4271 || TREE_CODE (*expr) == REALPART_EXPR)
4272 {
4273 expr = &TREE_OPERAND (*expr, 0);
4274 convert = true;
4275 }
4276
4277 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4278 if (!base || size == -1 || max_size == -1)
4279 return false;
4280
4281 if (TREE_CODE (base) == MEM_REF)
4282 {
4283 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4284 base = TREE_OPERAND (base, 0);
4285 }
4286
4287 base = get_ssa_base_param (base);
4288 if (!base || TREE_CODE (base) != PARM_DECL)
4289 return false;
4290
4291 for (i = 0; i < len; i++)
4292 {
4293 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4294
4295 if (adj->base == base &&
4296 (adj->offset == offset || adj->remove_param))
4297 {
4298 cand = adj;
4299 break;
4300 }
4301 }
4302 if (!cand || cand->copy_param || cand->remove_param)
4303 return false;
4304
4305 if (cand->by_ref)
4306 src = build_simple_mem_ref (cand->reduction);
4307 else
4308 src = cand->reduction;
4309
4310 if (dump_file && (dump_flags & TDF_DETAILS))
4311 {
4312 fprintf (dump_file, "About to replace expr ");
4313 print_generic_expr (dump_file, *expr, 0);
4314 fprintf (dump_file, " with ");
4315 print_generic_expr (dump_file, src, 0);
4316 fprintf (dump_file, "\n");
4317 }
4318
4319 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4320 {
4321 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4322 *expr = vce;
4323 }
4324 else
4325 *expr = src;
4326 return true;
4327 }
4328
4329 /* If the statement pointed to by STMT_PTR contains any expressions that need
4330 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4331 potential type incompatibilities (GSI is used to accommodate conversion
4332 statements and must point to the statement). Return true iff the statement
4333 was modified. */
4334
4335 static bool
4336 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4337 ipa_parm_adjustment_vec adjustments)
4338 {
4339 gimple stmt = *stmt_ptr;
4340 tree *lhs_p, *rhs_p;
4341 bool any;
4342
4343 if (!gimple_assign_single_p (stmt))
4344 return false;
4345
4346 rhs_p = gimple_assign_rhs1_ptr (stmt);
4347 lhs_p = gimple_assign_lhs_ptr (stmt);
4348
4349 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4350 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4351 if (any)
4352 {
4353 tree new_rhs = NULL_TREE;
4354
4355 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4356 {
4357 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4358 {
4359 /* V_C_Es of constructors can cause trouble (PR 42714). */
4360 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4361 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4362 else
4363 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4364 }
4365 else
4366 new_rhs = fold_build1_loc (gimple_location (stmt),
4367 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4368 *rhs_p);
4369 }
4370 else if (REFERENCE_CLASS_P (*rhs_p)
4371 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4372 && !is_gimple_reg (*lhs_p))
4373 /* This can happen when an assignment in between two single field
4374 structures is turned into an assignment in between two pointers to
4375 scalars (PR 42237). */
4376 new_rhs = *rhs_p;
4377
4378 if (new_rhs)
4379 {
4380 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4381 true, GSI_SAME_STMT);
4382
4383 gimple_assign_set_rhs_from_tree (gsi, tmp);
4384 }
4385
4386 return true;
4387 }
4388
4389 return false;
4390 }
4391
4392 /* Traverse the function body and all modifications as described in
4393 ADJUSTMENTS. Return true iff the CFG has been changed. */
4394
4395 static bool
4396 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4397 {
4398 bool cfg_changed = false;
4399 basic_block bb;
4400
4401 FOR_EACH_BB (bb)
4402 {
4403 gimple_stmt_iterator gsi;
4404
4405 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4406 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4407
4408 gsi = gsi_start_bb (bb);
4409 while (!gsi_end_p (gsi))
4410 {
4411 gimple stmt = gsi_stmt (gsi);
4412 bool modified = false;
4413 tree *t;
4414 unsigned i;
4415
4416 switch (gimple_code (stmt))
4417 {
4418 case GIMPLE_RETURN:
4419 t = gimple_return_retval_ptr (stmt);
4420 if (*t != NULL_TREE)
4421 modified |= sra_ipa_modify_expr (t, true, adjustments);
4422 break;
4423
4424 case GIMPLE_ASSIGN:
4425 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4426 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4427 break;
4428
4429 case GIMPLE_CALL:
4430 /* Operands must be processed before the lhs. */
4431 for (i = 0; i < gimple_call_num_args (stmt); i++)
4432 {
4433 t = gimple_call_arg_ptr (stmt, i);
4434 modified |= sra_ipa_modify_expr (t, true, adjustments);
4435 }
4436
4437 if (gimple_call_lhs (stmt))
4438 {
4439 t = gimple_call_lhs_ptr (stmt);
4440 modified |= sra_ipa_modify_expr (t, false, adjustments);
4441 modified |= replace_removed_params_ssa_names (stmt,
4442 adjustments);
4443 }
4444 break;
4445
4446 case GIMPLE_ASM:
4447 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4448 {
4449 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4450 modified |= sra_ipa_modify_expr (t, true, adjustments);
4451 }
4452 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4453 {
4454 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4455 modified |= sra_ipa_modify_expr (t, false, adjustments);
4456 }
4457 break;
4458
4459 default:
4460 break;
4461 }
4462
4463 if (modified)
4464 {
4465 update_stmt (stmt);
4466 if (maybe_clean_eh_stmt (stmt)
4467 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4468 cfg_changed = true;
4469 }
4470 gsi_next (&gsi);
4471 }
4472 }
4473
4474 return cfg_changed;
4475 }
4476
4477 /* Call gimple_debug_bind_reset_value on all debug statements describing
4478 gimple register parameters that are being removed or replaced. */
4479
4480 static void
4481 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4482 {
4483 int i, len;
4484 gimple_stmt_iterator *gsip = NULL, gsi;
4485
4486 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4487 {
4488 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4489 gsip = &gsi;
4490 }
4491 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4492 for (i = 0; i < len; i++)
4493 {
4494 struct ipa_parm_adjustment *adj;
4495 imm_use_iterator ui;
4496 gimple stmt, def_temp;
4497 tree name, vexpr, copy = NULL_TREE;
4498 use_operand_p use_p;
4499
4500 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4501 if (adj->copy_param || !is_gimple_reg (adj->base))
4502 continue;
4503 name = gimple_default_def (cfun, adj->base);
4504 vexpr = NULL;
4505 if (name)
4506 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4507 {
4508 /* All other users must have been removed by
4509 ipa_sra_modify_function_body. */
4510 gcc_assert (is_gimple_debug (stmt));
4511 if (vexpr == NULL && gsip != NULL)
4512 {
4513 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4514 vexpr = make_node (DEBUG_EXPR_DECL);
4515 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4516 NULL);
4517 DECL_ARTIFICIAL (vexpr) = 1;
4518 TREE_TYPE (vexpr) = TREE_TYPE (name);
4519 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4520 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4521 }
4522 if (vexpr)
4523 {
4524 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4525 SET_USE (use_p, vexpr);
4526 }
4527 else
4528 gimple_debug_bind_reset_value (stmt);
4529 update_stmt (stmt);
4530 }
4531 /* Create a VAR_DECL for debug info purposes. */
4532 if (!DECL_IGNORED_P (adj->base))
4533 {
4534 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4535 VAR_DECL, DECL_NAME (adj->base),
4536 TREE_TYPE (adj->base));
4537 if (DECL_PT_UID_SET_P (adj->base))
4538 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4539 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4540 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4541 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4542 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4543 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4544 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4545 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4546 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4547 SET_DECL_RTL (copy, 0);
4548 TREE_USED (copy) = 1;
4549 DECL_CONTEXT (copy) = current_function_decl;
4550 add_referenced_var (copy);
4551 add_local_decl (cfun, copy);
4552 DECL_CHAIN (copy) =
4553 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4554 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4555 }
4556 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4557 {
4558 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4559 if (vexpr)
4560 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4561 else
4562 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4563 NULL);
4564 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4565 }
4566 }
4567 }
4568
4569 /* Return false iff all callers have at least as many actual arguments as there
4570 are formal parameters in the current function. */
4571
4572 static bool
4573 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4574 void *data ATTRIBUTE_UNUSED)
4575 {
4576 struct cgraph_edge *cs;
4577 for (cs = node->callers; cs; cs = cs->next_caller)
4578 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4579 return true;
4580
4581 return false;
4582 }
4583
4584 /* Convert all callers of NODE. */
4585
4586 static bool
4587 convert_callers_for_node (struct cgraph_node *node,
4588 void *data)
4589 {
4590 ipa_parm_adjustment_vec adjustments = (ipa_parm_adjustment_vec)data;
4591 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4592 struct cgraph_edge *cs;
4593
4594 for (cs = node->callers; cs; cs = cs->next_caller)
4595 {
4596 current_function_decl = cs->caller->decl;
4597 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4598
4599 if (dump_file)
4600 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4601 cs->caller->uid, cs->callee->uid,
4602 cgraph_node_name (cs->caller),
4603 cgraph_node_name (cs->callee));
4604
4605 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4606
4607 pop_cfun ();
4608 }
4609
4610 for (cs = node->callers; cs; cs = cs->next_caller)
4611 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4612 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4613 compute_inline_parameters (cs->caller, true);
4614 BITMAP_FREE (recomputed_callers);
4615
4616 return true;
4617 }
4618
4619 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4620
4621 static void
4622 convert_callers (struct cgraph_node *node, tree old_decl,
4623 ipa_parm_adjustment_vec adjustments)
4624 {
4625 tree old_cur_fndecl = current_function_decl;
4626 basic_block this_block;
4627
4628 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4629 adjustments, false);
4630
4631 current_function_decl = old_cur_fndecl;
4632
4633 if (!encountered_recursive_call)
4634 return;
4635
4636 FOR_EACH_BB (this_block)
4637 {
4638 gimple_stmt_iterator gsi;
4639
4640 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4641 {
4642 gimple stmt = gsi_stmt (gsi);
4643 tree call_fndecl;
4644 if (gimple_code (stmt) != GIMPLE_CALL)
4645 continue;
4646 call_fndecl = gimple_call_fndecl (stmt);
4647 if (call_fndecl == old_decl)
4648 {
4649 if (dump_file)
4650 fprintf (dump_file, "Adjusting recursive call");
4651 gimple_call_set_fndecl (stmt, node->decl);
4652 ipa_modify_call_arguments (NULL, stmt, adjustments);
4653 }
4654 }
4655 }
4656
4657 return;
4658 }
4659
4660 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4661 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4662
4663 static bool
4664 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4665 {
4666 struct cgraph_node *new_node;
4667 bool cfg_changed;
4668 VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
4669
4670 rebuild_cgraph_edges ();
4671 free_dominance_info (CDI_DOMINATORS);
4672 pop_cfun ();
4673 current_function_decl = NULL_TREE;
4674
4675 new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4676 NULL, NULL, "isra");
4677 current_function_decl = new_node->decl;
4678 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4679
4680 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4681 cfg_changed = ipa_sra_modify_function_body (adjustments);
4682 sra_ipa_reset_debug_stmts (adjustments);
4683 convert_callers (new_node, node->decl, adjustments);
4684 cgraph_make_node_local (new_node);
4685 return cfg_changed;
4686 }
4687
4688 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4689 attributes, return true otherwise. NODE is the cgraph node of the current
4690 function. */
4691
4692 static bool
4693 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4694 {
4695 if (!cgraph_node_can_be_local_p (node))
4696 {
4697 if (dump_file)
4698 fprintf (dump_file, "Function not local to this compilation unit.\n");
4699 return false;
4700 }
4701
4702 if (!node->local.can_change_signature)
4703 {
4704 if (dump_file)
4705 fprintf (dump_file, "Function can not change signature.\n");
4706 return false;
4707 }
4708
4709 if (!tree_versionable_function_p (node->decl))
4710 {
4711 if (dump_file)
4712 fprintf (dump_file, "Function is not versionable.\n");
4713 return false;
4714 }
4715
4716 if (DECL_VIRTUAL_P (current_function_decl))
4717 {
4718 if (dump_file)
4719 fprintf (dump_file, "Function is a virtual method.\n");
4720 return false;
4721 }
4722
4723 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4724 && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4725 {
4726 if (dump_file)
4727 fprintf (dump_file, "Function too big to be made truly local.\n");
4728 return false;
4729 }
4730
4731 if (!node->callers)
4732 {
4733 if (dump_file)
4734 fprintf (dump_file,
4735 "Function has no callers in this compilation unit.\n");
4736 return false;
4737 }
4738
4739 if (cfun->stdarg)
4740 {
4741 if (dump_file)
4742 fprintf (dump_file, "Function uses stdarg. \n");
4743 return false;
4744 }
4745
4746 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4747 return false;
4748
4749 return true;
4750 }
4751
4752 /* Perform early interprocedural SRA. */
4753
4754 static unsigned int
4755 ipa_early_sra (void)
4756 {
4757 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4758 ipa_parm_adjustment_vec adjustments;
4759 int ret = 0;
4760
4761 if (!ipa_sra_preliminary_function_checks (node))
4762 return 0;
4763
4764 sra_initialize ();
4765 sra_mode = SRA_MODE_EARLY_IPA;
4766
4767 if (!find_param_candidates ())
4768 {
4769 if (dump_file)
4770 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4771 goto simple_out;
4772 }
4773
4774 if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4775 NULL, true))
4776 {
4777 if (dump_file)
4778 fprintf (dump_file, "There are callers with insufficient number of "
4779 "arguments.\n");
4780 goto simple_out;
4781 }
4782
4783 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4784 func_param_count
4785 * last_basic_block_for_function (cfun));
4786 final_bbs = BITMAP_ALLOC (NULL);
4787
4788 scan_function ();
4789 if (encountered_apply_args)
4790 {
4791 if (dump_file)
4792 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4793 goto out;
4794 }
4795
4796 if (encountered_unchangable_recursive_call)
4797 {
4798 if (dump_file)
4799 fprintf (dump_file, "Function calls itself with insufficient "
4800 "number of arguments.\n");
4801 goto out;
4802 }
4803
4804 adjustments = analyze_all_param_acesses ();
4805 if (!adjustments)
4806 goto out;
4807 if (dump_file)
4808 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4809
4810 if (modify_function (node, adjustments))
4811 ret = TODO_update_ssa | TODO_cleanup_cfg;
4812 else
4813 ret = TODO_update_ssa;
4814 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4815
4816 statistics_counter_event (cfun, "Unused parameters deleted",
4817 sra_stats.deleted_unused_parameters);
4818 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4819 sra_stats.scalar_by_ref_to_by_val);
4820 statistics_counter_event (cfun, "Aggregate parameters broken up",
4821 sra_stats.aggregate_params_reduced);
4822 statistics_counter_event (cfun, "Aggregate parameter components created",
4823 sra_stats.param_reductions_created);
4824
4825 out:
4826 BITMAP_FREE (final_bbs);
4827 free (bb_dereferences);
4828 simple_out:
4829 sra_deinitialize ();
4830 return ret;
4831 }
4832
4833 /* Return if early ipa sra shall be performed. */
4834 static bool
4835 ipa_early_sra_gate (void)
4836 {
4837 return flag_ipa_sra && dbg_cnt (eipa_sra);
4838 }
4839
4840 struct gimple_opt_pass pass_early_ipa_sra =
4841 {
4842 {
4843 GIMPLE_PASS,
4844 "eipa_sra", /* name */
4845 ipa_early_sra_gate, /* gate */
4846 ipa_early_sra, /* execute */
4847 NULL, /* sub */
4848 NULL, /* next */
4849 0, /* static_pass_number */
4850 TV_IPA_SRA, /* tv_id */
4851 0, /* properties_required */
4852 0, /* properties_provided */
4853 0, /* properties_destroyed */
4854 0, /* todo_flags_start */
4855 TODO_dump_cgraph /* todo_flags_finish */
4856 }
4857 };