aarch64-simd.md (aarch64_simd_vec_<su>mult_lo_<mode>, [...]): Separate instruction...
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "tree-pass.h"
84 #include "ipa-prop.h"
85 #include "statistics.h"
86 #include "params.h"
87 #include "target.h"
88 #include "flags.h"
89 #include "dbgcnt.h"
90 #include "tree-inline.h"
91 #include "gimple-pretty-print.h"
92 #include "ipa-inline.h"
93
94 /* Enumeration of all aggregate reductions we can do. */
95 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
96 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
97 SRA_MODE_INTRA }; /* late intraprocedural SRA */
98
99 /* Global variable describing which aggregate reduction we are performing at
100 the moment. */
101 static enum sra_mode sra_mode;
102
103 struct assign_link;
104
105 /* ACCESS represents each access to an aggregate variable (as a whole or a
106 part). It can also represent a group of accesses that refer to exactly the
107 same fragment of an aggregate (i.e. those that have exactly the same offset
108 and size). Such representatives for a single aggregate, once determined,
109 are linked in a linked list and have the group fields set.
110
111 Moreover, when doing intraprocedural SRA, a tree is built from those
112 representatives (by the means of first_child and next_sibling pointers), in
113 which all items in a subtree are "within" the root, i.e. their offset is
114 greater or equal to offset of the root and offset+size is smaller or equal
115 to offset+size of the root. Children of an access are sorted by offset.
116
117 Note that accesses to parts of vector and complex number types always
118 represented by an access to the whole complex number or a vector. It is a
119 duty of the modifying functions to replace them appropriately. */
120
121 struct access
122 {
123 /* Values returned by `get_ref_base_and_extent' for each component reference
124 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
125 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
126 HOST_WIDE_INT offset;
127 HOST_WIDE_INT size;
128 tree base;
129
130 /* Expression. It is context dependent so do not use it to create new
131 expressions to access the original aggregate. See PR 42154 for a
132 testcase. */
133 tree expr;
134 /* Type. */
135 tree type;
136
137 /* The statement this access belongs to. */
138 gimple stmt;
139
140 /* Next group representative for this aggregate. */
141 struct access *next_grp;
142
143 /* Pointer to the group representative. Pointer to itself if the struct is
144 the representative. */
145 struct access *group_representative;
146
147 /* If this access has any children (in terms of the definition above), this
148 points to the first one. */
149 struct access *first_child;
150
151 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
152 described above. In IPA-SRA this is a pointer to the next access
153 belonging to the same group (having the same representative). */
154 struct access *next_sibling;
155
156 /* Pointers to the first and last element in the linked list of assign
157 links. */
158 struct assign_link *first_link, *last_link;
159
160 /* Pointer to the next access in the work queue. */
161 struct access *next_queued;
162
163 /* Replacement variable for this access "region." Never to be accessed
164 directly, always only by the means of get_access_replacement() and only
165 when grp_to_be_replaced flag is set. */
166 tree replacement_decl;
167
168 /* Is this particular access write access? */
169 unsigned write : 1;
170
171 /* Is this access an access to a non-addressable field? */
172 unsigned non_addressable : 1;
173
174 /* Is this access currently in the work queue? */
175 unsigned grp_queued : 1;
176
177 /* Does this group contain a write access? This flag is propagated down the
178 access tree. */
179 unsigned grp_write : 1;
180
181 /* Does this group contain a read access? This flag is propagated down the
182 access tree. */
183 unsigned grp_read : 1;
184
185 /* Does this group contain a read access that comes from an assignment
186 statement? This flag is propagated down the access tree. */
187 unsigned grp_assignment_read : 1;
188
189 /* Does this group contain a write access that comes from an assignment
190 statement? This flag is propagated down the access tree. */
191 unsigned grp_assignment_write : 1;
192
193 /* Does this group contain a read access through a scalar type? This flag is
194 not propagated in the access tree in any direction. */
195 unsigned grp_scalar_read : 1;
196
197 /* Does this group contain a write access through a scalar type? This flag
198 is not propagated in the access tree in any direction. */
199 unsigned grp_scalar_write : 1;
200
201 /* Is this access an artificial one created to scalarize some record
202 entirely? */
203 unsigned grp_total_scalarization : 1;
204
205 /* Other passes of the analysis use this bit to make function
206 analyze_access_subtree create scalar replacements for this group if
207 possible. */
208 unsigned grp_hint : 1;
209
210 /* Is the subtree rooted in this access fully covered by scalar
211 replacements? */
212 unsigned grp_covered : 1;
213
214 /* If set to true, this access and all below it in an access tree must not be
215 scalarized. */
216 unsigned grp_unscalarizable_region : 1;
217
218 /* Whether data have been written to parts of the aggregate covered by this
219 access which is not to be scalarized. This flag is propagated up in the
220 access tree. */
221 unsigned grp_unscalarized_data : 1;
222
223 /* Does this access and/or group contain a write access through a
224 BIT_FIELD_REF? */
225 unsigned grp_partial_lhs : 1;
226
227 /* Set when a scalar replacement should be created for this variable. */
228 unsigned grp_to_be_replaced : 1;
229
230 /* Set when we want a replacement for the sole purpose of having it in
231 generated debug statements. */
232 unsigned grp_to_be_debug_replaced : 1;
233
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning : 1;
236
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified : 1;
240
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr : 1;
245
246 /* Set when we discover that this pointer is not safe to dereference in the
247 caller. */
248 unsigned grp_not_necessarilly_dereferenced : 1;
249 };
250
251 typedef struct access *access_p;
252
253
254 /* Alloc pool for allocating access structures. */
255 static alloc_pool access_pool;
256
257 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
258 are used to propagate subaccesses from rhs to lhs as long as they don't
259 conflict with what is already there. */
260 struct assign_link
261 {
262 struct access *lacc, *racc;
263 struct assign_link *next;
264 };
265
266 /* Alloc pool for allocating assign link structures. */
267 static alloc_pool link_pool;
268
269 /* Base (tree) -> Vector (vec<access_p> *) map. */
270 static struct pointer_map_t *base_access_vec;
271
272 /* Set of candidates. */
273 static bitmap candidate_bitmap;
274 static htab_t candidates;
275
276 /* For a candidate UID return the candidates decl. */
277
278 static inline tree
279 candidate (unsigned uid)
280 {
281 struct tree_decl_minimal t;
282 t.uid = uid;
283 return (tree) htab_find_with_hash (candidates, &t, uid);
284 }
285
286 /* Bitmap of candidates which we should try to entirely scalarize away and
287 those which cannot be (because they are and need be used as a whole). */
288 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
289
290 /* Obstack for creation of fancy names. */
291 static struct obstack name_obstack;
292
293 /* Head of a linked list of accesses that need to have its subaccesses
294 propagated to their assignment counterparts. */
295 static struct access *work_queue_head;
296
297 /* Number of parameters of the analyzed function when doing early ipa SRA. */
298 static int func_param_count;
299
300 /* scan_function sets the following to true if it encounters a call to
301 __builtin_apply_args. */
302 static bool encountered_apply_args;
303
304 /* Set by scan_function when it finds a recursive call. */
305 static bool encountered_recursive_call;
306
307 /* Set by scan_function when it finds a recursive call with less actual
308 arguments than formal parameters.. */
309 static bool encountered_unchangable_recursive_call;
310
311 /* This is a table in which for each basic block and parameter there is a
312 distance (offset + size) in that parameter which is dereferenced and
313 accessed in that BB. */
314 static HOST_WIDE_INT *bb_dereferences;
315 /* Bitmap of BBs that can cause the function to "stop" progressing by
316 returning, throwing externally, looping infinitely or calling a function
317 which might abort etc.. */
318 static bitmap final_bbs;
319
320 /* Representative of no accesses at all. */
321 static struct access no_accesses_representant;
322
323 /* Predicate to test the special value. */
324
325 static inline bool
326 no_accesses_p (struct access *access)
327 {
328 return access == &no_accesses_representant;
329 }
330
331 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
332 representative fields are dumped, otherwise those which only describe the
333 individual access are. */
334
335 static struct
336 {
337 /* Number of processed aggregates is readily available in
338 analyze_all_variable_accesses and so is not stored here. */
339
340 /* Number of created scalar replacements. */
341 int replacements;
342
343 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
344 expression. */
345 int exprs;
346
347 /* Number of statements created by generate_subtree_copies. */
348 int subtree_copies;
349
350 /* Number of statements created by load_assign_lhs_subreplacements. */
351 int subreplacements;
352
353 /* Number of times sra_modify_assign has deleted a statement. */
354 int deleted;
355
356 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
357 RHS reparately due to type conversions or nonexistent matching
358 references. */
359 int separate_lhs_rhs_handling;
360
361 /* Number of parameters that were removed because they were unused. */
362 int deleted_unused_parameters;
363
364 /* Number of scalars passed as parameters by reference that have been
365 converted to be passed by value. */
366 int scalar_by_ref_to_by_val;
367
368 /* Number of aggregate parameters that were replaced by one or more of their
369 components. */
370 int aggregate_params_reduced;
371
372 /* Numbber of components created when splitting aggregate parameters. */
373 int param_reductions_created;
374 } sra_stats;
375
376 static void
377 dump_access (FILE *f, struct access *access, bool grp)
378 {
379 fprintf (f, "access { ");
380 fprintf (f, "base = (%d)'", DECL_UID (access->base));
381 print_generic_expr (f, access->base, 0);
382 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
383 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
384 fprintf (f, ", expr = ");
385 print_generic_expr (f, access->expr, 0);
386 fprintf (f, ", type = ");
387 print_generic_expr (f, access->type, 0);
388 if (grp)
389 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
390 "grp_assignment_write = %d, grp_scalar_read = %d, "
391 "grp_scalar_write = %d, grp_total_scalarization = %d, "
392 "grp_hint = %d, grp_covered = %d, "
393 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
394 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
395 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
396 "grp_not_necessarilly_dereferenced = %d\n",
397 access->grp_read, access->grp_write, access->grp_assignment_read,
398 access->grp_assignment_write, access->grp_scalar_read,
399 access->grp_scalar_write, access->grp_total_scalarization,
400 access->grp_hint, access->grp_covered,
401 access->grp_unscalarizable_region, access->grp_unscalarized_data,
402 access->grp_partial_lhs, access->grp_to_be_replaced,
403 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
404 access->grp_not_necessarilly_dereferenced);
405 else
406 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
407 "grp_partial_lhs = %d\n",
408 access->write, access->grp_total_scalarization,
409 access->grp_partial_lhs);
410 }
411
412 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
413
414 static void
415 dump_access_tree_1 (FILE *f, struct access *access, int level)
416 {
417 do
418 {
419 int i;
420
421 for (i = 0; i < level; i++)
422 fputs ("* ", dump_file);
423
424 dump_access (f, access, true);
425
426 if (access->first_child)
427 dump_access_tree_1 (f, access->first_child, level + 1);
428
429 access = access->next_sibling;
430 }
431 while (access);
432 }
433
434 /* Dump all access trees for a variable, given the pointer to the first root in
435 ACCESS. */
436
437 static void
438 dump_access_tree (FILE *f, struct access *access)
439 {
440 for (; access; access = access->next_grp)
441 dump_access_tree_1 (f, access, 0);
442 }
443
444 /* Return true iff ACC is non-NULL and has subaccesses. */
445
446 static inline bool
447 access_has_children_p (struct access *acc)
448 {
449 return acc && acc->first_child;
450 }
451
452 /* Return true iff ACC is (partly) covered by at least one replacement. */
453
454 static bool
455 access_has_replacements_p (struct access *acc)
456 {
457 struct access *child;
458 if (acc->grp_to_be_replaced)
459 return true;
460 for (child = acc->first_child; child; child = child->next_sibling)
461 if (access_has_replacements_p (child))
462 return true;
463 return false;
464 }
465
466 /* Return a vector of pointers to accesses for the variable given in BASE or
467 NULL if there is none. */
468
469 static vec<access_p> *
470 get_base_access_vector (tree base)
471 {
472 void **slot;
473
474 slot = pointer_map_contains (base_access_vec, base);
475 if (!slot)
476 return NULL;
477 else
478 return *(vec<access_p> **) slot;
479 }
480
481 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
482 in ACCESS. Return NULL if it cannot be found. */
483
484 static struct access *
485 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
486 HOST_WIDE_INT size)
487 {
488 while (access && (access->offset != offset || access->size != size))
489 {
490 struct access *child = access->first_child;
491
492 while (child && (child->offset + child->size <= offset))
493 child = child->next_sibling;
494 access = child;
495 }
496
497 return access;
498 }
499
500 /* Return the first group representative for DECL or NULL if none exists. */
501
502 static struct access *
503 get_first_repr_for_decl (tree base)
504 {
505 vec<access_p> *access_vec;
506
507 access_vec = get_base_access_vector (base);
508 if (!access_vec)
509 return NULL;
510
511 return (*access_vec)[0];
512 }
513
514 /* Find an access representative for the variable BASE and given OFFSET and
515 SIZE. Requires that access trees have already been built. Return NULL if
516 it cannot be found. */
517
518 static struct access *
519 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
520 HOST_WIDE_INT size)
521 {
522 struct access *access;
523
524 access = get_first_repr_for_decl (base);
525 while (access && (access->offset + access->size <= offset))
526 access = access->next_grp;
527 if (!access)
528 return NULL;
529
530 return find_access_in_subtree (access, offset, size);
531 }
532
533 /* Add LINK to the linked list of assign links of RACC. */
534 static void
535 add_link_to_rhs (struct access *racc, struct assign_link *link)
536 {
537 gcc_assert (link->racc == racc);
538
539 if (!racc->first_link)
540 {
541 gcc_assert (!racc->last_link);
542 racc->first_link = link;
543 }
544 else
545 racc->last_link->next = link;
546
547 racc->last_link = link;
548 link->next = NULL;
549 }
550
551 /* Move all link structures in their linked list in OLD_RACC to the linked list
552 in NEW_RACC. */
553 static void
554 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
555 {
556 if (!old_racc->first_link)
557 {
558 gcc_assert (!old_racc->last_link);
559 return;
560 }
561
562 if (new_racc->first_link)
563 {
564 gcc_assert (!new_racc->last_link->next);
565 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
566
567 new_racc->last_link->next = old_racc->first_link;
568 new_racc->last_link = old_racc->last_link;
569 }
570 else
571 {
572 gcc_assert (!new_racc->last_link);
573
574 new_racc->first_link = old_racc->first_link;
575 new_racc->last_link = old_racc->last_link;
576 }
577 old_racc->first_link = old_racc->last_link = NULL;
578 }
579
580 /* Add ACCESS to the work queue (which is actually a stack). */
581
582 static void
583 add_access_to_work_queue (struct access *access)
584 {
585 if (!access->grp_queued)
586 {
587 gcc_assert (!access->next_queued);
588 access->next_queued = work_queue_head;
589 access->grp_queued = 1;
590 work_queue_head = access;
591 }
592 }
593
594 /* Pop an access from the work queue, and return it, assuming there is one. */
595
596 static struct access *
597 pop_access_from_work_queue (void)
598 {
599 struct access *access = work_queue_head;
600
601 work_queue_head = access->next_queued;
602 access->next_queued = NULL;
603 access->grp_queued = 0;
604 return access;
605 }
606
607
608 /* Allocate necessary structures. */
609
610 static void
611 sra_initialize (void)
612 {
613 candidate_bitmap = BITMAP_ALLOC (NULL);
614 candidates = htab_create (vec_safe_length (cfun->local_decls) / 2,
615 uid_decl_map_hash, uid_decl_map_eq, NULL);
616 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
617 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
618 gcc_obstack_init (&name_obstack);
619 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
620 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
621 base_access_vec = pointer_map_create ();
622 memset (&sra_stats, 0, sizeof (sra_stats));
623 encountered_apply_args = false;
624 encountered_recursive_call = false;
625 encountered_unchangable_recursive_call = false;
626 }
627
628 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
629
630 static bool
631 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
632 void *data ATTRIBUTE_UNUSED)
633 {
634 vec<access_p> *access_vec = (vec<access_p> *) *value;
635 vec_free (access_vec);
636 return true;
637 }
638
639 /* Deallocate all general structures. */
640
641 static void
642 sra_deinitialize (void)
643 {
644 BITMAP_FREE (candidate_bitmap);
645 htab_delete (candidates);
646 BITMAP_FREE (should_scalarize_away_bitmap);
647 BITMAP_FREE (cannot_scalarize_away_bitmap);
648 free_alloc_pool (access_pool);
649 free_alloc_pool (link_pool);
650 obstack_free (&name_obstack, NULL);
651
652 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
653 pointer_map_destroy (base_access_vec);
654 }
655
656 /* Remove DECL from candidates for SRA and write REASON to the dump file if
657 there is one. */
658 static void
659 disqualify_candidate (tree decl, const char *reason)
660 {
661 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
662 htab_clear_slot (candidates,
663 htab_find_slot_with_hash (candidates, decl,
664 DECL_UID (decl), NO_INSERT));
665
666 if (dump_file && (dump_flags & TDF_DETAILS))
667 {
668 fprintf (dump_file, "! Disqualifying ");
669 print_generic_expr (dump_file, decl, 0);
670 fprintf (dump_file, " - %s\n", reason);
671 }
672 }
673
674 /* Return true iff the type contains a field or an element which does not allow
675 scalarization. */
676
677 static bool
678 type_internals_preclude_sra_p (tree type, const char **msg)
679 {
680 tree fld;
681 tree et;
682
683 switch (TREE_CODE (type))
684 {
685 case RECORD_TYPE:
686 case UNION_TYPE:
687 case QUAL_UNION_TYPE:
688 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
689 if (TREE_CODE (fld) == FIELD_DECL)
690 {
691 tree ft = TREE_TYPE (fld);
692
693 if (TREE_THIS_VOLATILE (fld))
694 {
695 *msg = "volatile structure field";
696 return true;
697 }
698 if (!DECL_FIELD_OFFSET (fld))
699 {
700 *msg = "no structure field offset";
701 return true;
702 }
703 if (!DECL_SIZE (fld))
704 {
705 *msg = "zero structure field size";
706 return true;
707 }
708 if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
709 {
710 *msg = "structure field offset not fixed";
711 return true;
712 }
713 if (!host_integerp (DECL_SIZE (fld), 1))
714 {
715 *msg = "structure field size not fixed";
716 return true;
717 }
718 if (!host_integerp (bit_position (fld), 0))
719 {
720 *msg = "structure field size too big";
721 return true;
722 }
723 if (AGGREGATE_TYPE_P (ft)
724 && int_bit_position (fld) % BITS_PER_UNIT != 0)
725 {
726 *msg = "structure field is bit field";
727 return true;
728 }
729
730 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
731 return true;
732 }
733
734 return false;
735
736 case ARRAY_TYPE:
737 et = TREE_TYPE (type);
738
739 if (TYPE_VOLATILE (et))
740 {
741 *msg = "element type is volatile";
742 return true;
743 }
744
745 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
746 return true;
747
748 return false;
749
750 default:
751 return false;
752 }
753 }
754
755 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
756 base variable if it is. Return T if it is not an SSA_NAME. */
757
758 static tree
759 get_ssa_base_param (tree t)
760 {
761 if (TREE_CODE (t) == SSA_NAME)
762 {
763 if (SSA_NAME_IS_DEFAULT_DEF (t))
764 return SSA_NAME_VAR (t);
765 else
766 return NULL_TREE;
767 }
768 return t;
769 }
770
771 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
772 belongs to, unless the BB has already been marked as a potentially
773 final. */
774
775 static void
776 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
777 {
778 basic_block bb = gimple_bb (stmt);
779 int idx, parm_index = 0;
780 tree parm;
781
782 if (bitmap_bit_p (final_bbs, bb->index))
783 return;
784
785 for (parm = DECL_ARGUMENTS (current_function_decl);
786 parm && parm != base;
787 parm = DECL_CHAIN (parm))
788 parm_index++;
789
790 gcc_assert (parm_index < func_param_count);
791
792 idx = bb->index * func_param_count + parm_index;
793 if (bb_dereferences[idx] < dist)
794 bb_dereferences[idx] = dist;
795 }
796
797 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
798 the three fields. Also add it to the vector of accesses corresponding to
799 the base. Finally, return the new access. */
800
801 static struct access *
802 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
803 {
804 vec<access_p> *v;
805 struct access *access;
806 void **slot;
807
808 access = (struct access *) pool_alloc (access_pool);
809 memset (access, 0, sizeof (struct access));
810 access->base = base;
811 access->offset = offset;
812 access->size = size;
813
814 slot = pointer_map_contains (base_access_vec, base);
815 if (slot)
816 v = (vec<access_p> *) *slot;
817 else
818 vec_alloc (v, 32);
819
820 v->safe_push (access);
821
822 *((vec<access_p> **)
823 pointer_map_insert (base_access_vec, base)) = v;
824
825 return access;
826 }
827
828 /* Create and insert access for EXPR. Return created access, or NULL if it is
829 not possible. */
830
831 static struct access *
832 create_access (tree expr, gimple stmt, bool write)
833 {
834 struct access *access;
835 HOST_WIDE_INT offset, size, max_size;
836 tree base = expr;
837 bool ptr, unscalarizable_region = false;
838
839 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
840
841 if (sra_mode == SRA_MODE_EARLY_IPA
842 && TREE_CODE (base) == MEM_REF)
843 {
844 base = get_ssa_base_param (TREE_OPERAND (base, 0));
845 if (!base)
846 return NULL;
847 ptr = true;
848 }
849 else
850 ptr = false;
851
852 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
853 return NULL;
854
855 if (sra_mode == SRA_MODE_EARLY_IPA)
856 {
857 if (size < 0 || size != max_size)
858 {
859 disqualify_candidate (base, "Encountered a variable sized access.");
860 return NULL;
861 }
862 if (TREE_CODE (expr) == COMPONENT_REF
863 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
864 {
865 disqualify_candidate (base, "Encountered a bit-field access.");
866 return NULL;
867 }
868 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
869
870 if (ptr)
871 mark_parm_dereference (base, offset + size, stmt);
872 }
873 else
874 {
875 if (size != max_size)
876 {
877 size = max_size;
878 unscalarizable_region = true;
879 }
880 if (size < 0)
881 {
882 disqualify_candidate (base, "Encountered an unconstrained access.");
883 return NULL;
884 }
885 }
886
887 access = create_access_1 (base, offset, size);
888 access->expr = expr;
889 access->type = TREE_TYPE (expr);
890 access->write = write;
891 access->grp_unscalarizable_region = unscalarizable_region;
892 access->stmt = stmt;
893
894 if (TREE_CODE (expr) == COMPONENT_REF
895 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
896 access->non_addressable = 1;
897
898 return access;
899 }
900
901
902 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
903 register types or (recursively) records with only these two kinds of fields.
904 It also returns false if any of these records contains a bit-field. */
905
906 static bool
907 type_consists_of_records_p (tree type)
908 {
909 tree fld;
910
911 if (TREE_CODE (type) != RECORD_TYPE)
912 return false;
913
914 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
915 if (TREE_CODE (fld) == FIELD_DECL)
916 {
917 tree ft = TREE_TYPE (fld);
918
919 if (DECL_BIT_FIELD (fld))
920 return false;
921
922 if (!is_gimple_reg_type (ft)
923 && !type_consists_of_records_p (ft))
924 return false;
925 }
926
927 return true;
928 }
929
930 /* Create total_scalarization accesses for all scalar type fields in DECL that
931 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
932 must be the top-most VAR_DECL representing the variable, OFFSET must be the
933 offset of DECL within BASE. REF must be the memory reference expression for
934 the given decl. */
935
936 static void
937 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
938 tree ref)
939 {
940 tree fld, decl_type = TREE_TYPE (decl);
941
942 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
943 if (TREE_CODE (fld) == FIELD_DECL)
944 {
945 HOST_WIDE_INT pos = offset + int_bit_position (fld);
946 tree ft = TREE_TYPE (fld);
947 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
948 NULL_TREE);
949
950 if (is_gimple_reg_type (ft))
951 {
952 struct access *access;
953 HOST_WIDE_INT size;
954
955 size = tree_low_cst (DECL_SIZE (fld), 1);
956 access = create_access_1 (base, pos, size);
957 access->expr = nref;
958 access->type = ft;
959 access->grp_total_scalarization = 1;
960 /* Accesses for intraprocedural SRA can have their stmt NULL. */
961 }
962 else
963 completely_scalarize_record (base, fld, pos, nref);
964 }
965 }
966
967 /* Create total_scalarization accesses for all scalar type fields in VAR and
968 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
969 type_consists_of_records_p. */
970
971 static void
972 completely_scalarize_var (tree var)
973 {
974 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
975 struct access *access;
976
977 access = create_access_1 (var, 0, size);
978 access->expr = var;
979 access->type = TREE_TYPE (var);
980 access->grp_total_scalarization = 1;
981
982 completely_scalarize_record (var, var, 0, var);
983 }
984
985 /* Search the given tree for a declaration by skipping handled components and
986 exclude it from the candidates. */
987
988 static void
989 disqualify_base_of_expr (tree t, const char *reason)
990 {
991 t = get_base_address (t);
992 if (sra_mode == SRA_MODE_EARLY_IPA
993 && TREE_CODE (t) == MEM_REF)
994 t = get_ssa_base_param (TREE_OPERAND (t, 0));
995
996 if (t && DECL_P (t))
997 disqualify_candidate (t, reason);
998 }
999
1000 /* Scan expression EXPR and create access structures for all accesses to
1001 candidates for scalarization. Return the created access or NULL if none is
1002 created. */
1003
1004 static struct access *
1005 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1006 {
1007 struct access *ret = NULL;
1008 bool partial_ref;
1009
1010 if (TREE_CODE (expr) == BIT_FIELD_REF
1011 || TREE_CODE (expr) == IMAGPART_EXPR
1012 || TREE_CODE (expr) == REALPART_EXPR)
1013 {
1014 expr = TREE_OPERAND (expr, 0);
1015 partial_ref = true;
1016 }
1017 else
1018 partial_ref = false;
1019
1020 /* We need to dive through V_C_Es in order to get the size of its parameter
1021 and not the result type. Ada produces such statements. We are also
1022 capable of handling the topmost V_C_E but not any of those buried in other
1023 handled components. */
1024 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1025 expr = TREE_OPERAND (expr, 0);
1026
1027 if (contains_view_convert_expr_p (expr))
1028 {
1029 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1030 "component.");
1031 return NULL;
1032 }
1033
1034 switch (TREE_CODE (expr))
1035 {
1036 case MEM_REF:
1037 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1038 && sra_mode != SRA_MODE_EARLY_IPA)
1039 return NULL;
1040 /* fall through */
1041 case VAR_DECL:
1042 case PARM_DECL:
1043 case RESULT_DECL:
1044 case COMPONENT_REF:
1045 case ARRAY_REF:
1046 case ARRAY_RANGE_REF:
1047 ret = create_access (expr, stmt, write);
1048 break;
1049
1050 default:
1051 break;
1052 }
1053
1054 if (write && partial_ref && ret)
1055 ret->grp_partial_lhs = 1;
1056
1057 return ret;
1058 }
1059
1060 /* Scan expression EXPR and create access structures for all accesses to
1061 candidates for scalarization. Return true if any access has been inserted.
1062 STMT must be the statement from which the expression is taken, WRITE must be
1063 true if the expression is a store and false otherwise. */
1064
1065 static bool
1066 build_access_from_expr (tree expr, gimple stmt, bool write)
1067 {
1068 struct access *access;
1069
1070 access = build_access_from_expr_1 (expr, stmt, write);
1071 if (access)
1072 {
1073 /* This means the aggregate is accesses as a whole in a way other than an
1074 assign statement and thus cannot be removed even if we had a scalar
1075 replacement for everything. */
1076 if (cannot_scalarize_away_bitmap)
1077 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1078 return true;
1079 }
1080 return false;
1081 }
1082
1083 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1084 modes in which it matters, return true iff they have been disqualified. RHS
1085 may be NULL, in that case ignore it. If we scalarize an aggregate in
1086 intra-SRA we may need to add statements after each statement. This is not
1087 possible if a statement unconditionally has to end the basic block. */
1088 static bool
1089 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1090 {
1091 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1092 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1093 {
1094 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1095 if (rhs)
1096 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1097 return true;
1098 }
1099 return false;
1100 }
1101
1102 /* Scan expressions occurring in STMT, create access structures for all accesses
1103 to candidates for scalarization and remove those candidates which occur in
1104 statements or expressions that prevent them from being split apart. Return
1105 true if any access has been inserted. */
1106
1107 static bool
1108 build_accesses_from_assign (gimple stmt)
1109 {
1110 tree lhs, rhs;
1111 struct access *lacc, *racc;
1112
1113 if (!gimple_assign_single_p (stmt)
1114 /* Scope clobbers don't influence scalarization. */
1115 || gimple_clobber_p (stmt))
1116 return false;
1117
1118 lhs = gimple_assign_lhs (stmt);
1119 rhs = gimple_assign_rhs1 (stmt);
1120
1121 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1122 return false;
1123
1124 racc = build_access_from_expr_1 (rhs, stmt, false);
1125 lacc = build_access_from_expr_1 (lhs, stmt, true);
1126
1127 if (lacc)
1128 lacc->grp_assignment_write = 1;
1129
1130 if (racc)
1131 {
1132 racc->grp_assignment_read = 1;
1133 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1134 && !is_gimple_reg_type (racc->type))
1135 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1136 }
1137
1138 if (lacc && racc
1139 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1140 && !lacc->grp_unscalarizable_region
1141 && !racc->grp_unscalarizable_region
1142 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1143 && lacc->size == racc->size
1144 && useless_type_conversion_p (lacc->type, racc->type))
1145 {
1146 struct assign_link *link;
1147
1148 link = (struct assign_link *) pool_alloc (link_pool);
1149 memset (link, 0, sizeof (struct assign_link));
1150
1151 link->lacc = lacc;
1152 link->racc = racc;
1153
1154 add_link_to_rhs (racc, link);
1155 }
1156
1157 return lacc || racc;
1158 }
1159
1160 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1161 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1162
1163 static bool
1164 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1165 void *data ATTRIBUTE_UNUSED)
1166 {
1167 op = get_base_address (op);
1168 if (op
1169 && DECL_P (op))
1170 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1171
1172 return false;
1173 }
1174
1175 /* Return true iff callsite CALL has at least as many actual arguments as there
1176 are formal parameters of the function currently processed by IPA-SRA. */
1177
1178 static inline bool
1179 callsite_has_enough_arguments_p (gimple call)
1180 {
1181 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1182 }
1183
1184 /* Scan function and look for interesting expressions and create access
1185 structures for them. Return true iff any access is created. */
1186
1187 static bool
1188 scan_function (void)
1189 {
1190 basic_block bb;
1191 bool ret = false;
1192
1193 FOR_EACH_BB (bb)
1194 {
1195 gimple_stmt_iterator gsi;
1196 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1197 {
1198 gimple stmt = gsi_stmt (gsi);
1199 tree t;
1200 unsigned i;
1201
1202 if (final_bbs && stmt_can_throw_external (stmt))
1203 bitmap_set_bit (final_bbs, bb->index);
1204 switch (gimple_code (stmt))
1205 {
1206 case GIMPLE_RETURN:
1207 t = gimple_return_retval (stmt);
1208 if (t != NULL_TREE)
1209 ret |= build_access_from_expr (t, stmt, false);
1210 if (final_bbs)
1211 bitmap_set_bit (final_bbs, bb->index);
1212 break;
1213
1214 case GIMPLE_ASSIGN:
1215 ret |= build_accesses_from_assign (stmt);
1216 break;
1217
1218 case GIMPLE_CALL:
1219 for (i = 0; i < gimple_call_num_args (stmt); i++)
1220 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1221 stmt, false);
1222
1223 if (sra_mode == SRA_MODE_EARLY_IPA)
1224 {
1225 tree dest = gimple_call_fndecl (stmt);
1226 int flags = gimple_call_flags (stmt);
1227
1228 if (dest)
1229 {
1230 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1231 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1232 encountered_apply_args = true;
1233 if (cgraph_get_node (dest)
1234 == cgraph_get_node (current_function_decl))
1235 {
1236 encountered_recursive_call = true;
1237 if (!callsite_has_enough_arguments_p (stmt))
1238 encountered_unchangable_recursive_call = true;
1239 }
1240 }
1241
1242 if (final_bbs
1243 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1244 bitmap_set_bit (final_bbs, bb->index);
1245 }
1246
1247 t = gimple_call_lhs (stmt);
1248 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1249 ret |= build_access_from_expr (t, stmt, true);
1250 break;
1251
1252 case GIMPLE_ASM:
1253 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1254 asm_visit_addr);
1255 if (final_bbs)
1256 bitmap_set_bit (final_bbs, bb->index);
1257
1258 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1259 {
1260 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1261 ret |= build_access_from_expr (t, stmt, false);
1262 }
1263 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1264 {
1265 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1266 ret |= build_access_from_expr (t, stmt, true);
1267 }
1268 break;
1269
1270 default:
1271 break;
1272 }
1273 }
1274 }
1275
1276 return ret;
1277 }
1278
1279 /* Helper of QSORT function. There are pointers to accesses in the array. An
1280 access is considered smaller than another if it has smaller offset or if the
1281 offsets are the same but is size is bigger. */
1282
1283 static int
1284 compare_access_positions (const void *a, const void *b)
1285 {
1286 const access_p *fp1 = (const access_p *) a;
1287 const access_p *fp2 = (const access_p *) b;
1288 const access_p f1 = *fp1;
1289 const access_p f2 = *fp2;
1290
1291 if (f1->offset != f2->offset)
1292 return f1->offset < f2->offset ? -1 : 1;
1293
1294 if (f1->size == f2->size)
1295 {
1296 if (f1->type == f2->type)
1297 return 0;
1298 /* Put any non-aggregate type before any aggregate type. */
1299 else if (!is_gimple_reg_type (f1->type)
1300 && is_gimple_reg_type (f2->type))
1301 return 1;
1302 else if (is_gimple_reg_type (f1->type)
1303 && !is_gimple_reg_type (f2->type))
1304 return -1;
1305 /* Put any complex or vector type before any other scalar type. */
1306 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1307 && TREE_CODE (f1->type) != VECTOR_TYPE
1308 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1309 || TREE_CODE (f2->type) == VECTOR_TYPE))
1310 return 1;
1311 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1312 || TREE_CODE (f1->type) == VECTOR_TYPE)
1313 && TREE_CODE (f2->type) != COMPLEX_TYPE
1314 && TREE_CODE (f2->type) != VECTOR_TYPE)
1315 return -1;
1316 /* Put the integral type with the bigger precision first. */
1317 else if (INTEGRAL_TYPE_P (f1->type)
1318 && INTEGRAL_TYPE_P (f2->type))
1319 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1320 /* Put any integral type with non-full precision last. */
1321 else if (INTEGRAL_TYPE_P (f1->type)
1322 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1323 != TYPE_PRECISION (f1->type)))
1324 return 1;
1325 else if (INTEGRAL_TYPE_P (f2->type)
1326 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1327 != TYPE_PRECISION (f2->type)))
1328 return -1;
1329 /* Stabilize the sort. */
1330 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1331 }
1332
1333 /* We want the bigger accesses first, thus the opposite operator in the next
1334 line: */
1335 return f1->size > f2->size ? -1 : 1;
1336 }
1337
1338
1339 /* Append a name of the declaration to the name obstack. A helper function for
1340 make_fancy_name. */
1341
1342 static void
1343 make_fancy_decl_name (tree decl)
1344 {
1345 char buffer[32];
1346
1347 tree name = DECL_NAME (decl);
1348 if (name)
1349 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1350 IDENTIFIER_LENGTH (name));
1351 else
1352 {
1353 sprintf (buffer, "D%u", DECL_UID (decl));
1354 obstack_grow (&name_obstack, buffer, strlen (buffer));
1355 }
1356 }
1357
1358 /* Helper for make_fancy_name. */
1359
1360 static void
1361 make_fancy_name_1 (tree expr)
1362 {
1363 char buffer[32];
1364 tree index;
1365
1366 if (DECL_P (expr))
1367 {
1368 make_fancy_decl_name (expr);
1369 return;
1370 }
1371
1372 switch (TREE_CODE (expr))
1373 {
1374 case COMPONENT_REF:
1375 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1376 obstack_1grow (&name_obstack, '$');
1377 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1378 break;
1379
1380 case ARRAY_REF:
1381 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1382 obstack_1grow (&name_obstack, '$');
1383 /* Arrays with only one element may not have a constant as their
1384 index. */
1385 index = TREE_OPERAND (expr, 1);
1386 if (TREE_CODE (index) != INTEGER_CST)
1387 break;
1388 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1389 obstack_grow (&name_obstack, buffer, strlen (buffer));
1390 break;
1391
1392 case ADDR_EXPR:
1393 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1394 break;
1395
1396 case MEM_REF:
1397 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1398 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1399 {
1400 obstack_1grow (&name_obstack, '$');
1401 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1402 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1403 obstack_grow (&name_obstack, buffer, strlen (buffer));
1404 }
1405 break;
1406
1407 case BIT_FIELD_REF:
1408 case REALPART_EXPR:
1409 case IMAGPART_EXPR:
1410 gcc_unreachable (); /* we treat these as scalars. */
1411 break;
1412 default:
1413 break;
1414 }
1415 }
1416
1417 /* Create a human readable name for replacement variable of ACCESS. */
1418
1419 static char *
1420 make_fancy_name (tree expr)
1421 {
1422 make_fancy_name_1 (expr);
1423 obstack_1grow (&name_obstack, '\0');
1424 return XOBFINISH (&name_obstack, char *);
1425 }
1426
1427 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1428 EXP_TYPE at the given OFFSET. If BASE is something for which
1429 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1430 to insert new statements either before or below the current one as specified
1431 by INSERT_AFTER. This function is not capable of handling bitfields.
1432
1433 BASE must be either a declaration or a memory reference that has correct
1434 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1435
1436 tree
1437 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1438 tree exp_type, gimple_stmt_iterator *gsi,
1439 bool insert_after)
1440 {
1441 tree prev_base = base;
1442 tree off;
1443 HOST_WIDE_INT base_offset;
1444 unsigned HOST_WIDE_INT misalign;
1445 unsigned int align;
1446
1447 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1448 get_object_alignment_1 (base, &align, &misalign);
1449 base = get_addr_base_and_unit_offset (base, &base_offset);
1450
1451 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1452 offset such as array[var_index]. */
1453 if (!base)
1454 {
1455 gimple stmt;
1456 tree tmp, addr;
1457
1458 gcc_checking_assert (gsi);
1459 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1460 addr = build_fold_addr_expr (unshare_expr (prev_base));
1461 STRIP_USELESS_TYPE_CONVERSION (addr);
1462 stmt = gimple_build_assign (tmp, addr);
1463 gimple_set_location (stmt, loc);
1464 if (insert_after)
1465 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1466 else
1467 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1468
1469 off = build_int_cst (reference_alias_ptr_type (prev_base),
1470 offset / BITS_PER_UNIT);
1471 base = tmp;
1472 }
1473 else if (TREE_CODE (base) == MEM_REF)
1474 {
1475 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1476 base_offset + offset / BITS_PER_UNIT);
1477 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1478 base = unshare_expr (TREE_OPERAND (base, 0));
1479 }
1480 else
1481 {
1482 off = build_int_cst (reference_alias_ptr_type (base),
1483 base_offset + offset / BITS_PER_UNIT);
1484 base = build_fold_addr_expr (unshare_expr (base));
1485 }
1486
1487 misalign = (misalign + offset) & (align - 1);
1488 if (misalign != 0)
1489 align = (misalign & -misalign);
1490 if (align < TYPE_ALIGN (exp_type))
1491 exp_type = build_aligned_type (exp_type, align);
1492
1493 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1494 }
1495
1496 /* Construct a memory reference to a part of an aggregate BASE at the given
1497 OFFSET and of the same type as MODEL. In case this is a reference to a
1498 bit-field, the function will replicate the last component_ref of model's
1499 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1500 build_ref_for_offset. */
1501
1502 static tree
1503 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1504 struct access *model, gimple_stmt_iterator *gsi,
1505 bool insert_after)
1506 {
1507 if (TREE_CODE (model->expr) == COMPONENT_REF
1508 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1509 {
1510 /* This access represents a bit-field. */
1511 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1512
1513 offset -= int_bit_position (fld);
1514 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1515 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1516 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1517 NULL_TREE);
1518 }
1519 else
1520 return build_ref_for_offset (loc, base, offset, model->type,
1521 gsi, insert_after);
1522 }
1523
1524 /* Attempt to build a memory reference that we could but into a gimple
1525 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1526 create statements and return s NULL instead. This function also ignores
1527 alignment issues and so its results should never end up in non-debug
1528 statements. */
1529
1530 static tree
1531 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1532 struct access *model)
1533 {
1534 HOST_WIDE_INT base_offset;
1535 tree off;
1536
1537 if (TREE_CODE (model->expr) == COMPONENT_REF
1538 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1539 return NULL_TREE;
1540
1541 base = get_addr_base_and_unit_offset (base, &base_offset);
1542 if (!base)
1543 return NULL_TREE;
1544 if (TREE_CODE (base) == MEM_REF)
1545 {
1546 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1547 base_offset + offset / BITS_PER_UNIT);
1548 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1549 base = unshare_expr (TREE_OPERAND (base, 0));
1550 }
1551 else
1552 {
1553 off = build_int_cst (reference_alias_ptr_type (base),
1554 base_offset + offset / BITS_PER_UNIT);
1555 base = build_fold_addr_expr (unshare_expr (base));
1556 }
1557
1558 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1559 }
1560
1561 /* Construct a memory reference consisting of component_refs and array_refs to
1562 a part of an aggregate *RES (which is of type TYPE). The requested part
1563 should have type EXP_TYPE at be the given OFFSET. This function might not
1564 succeed, it returns true when it does and only then *RES points to something
1565 meaningful. This function should be used only to build expressions that we
1566 might need to present to user (e.g. in warnings). In all other situations,
1567 build_ref_for_model or build_ref_for_offset should be used instead. */
1568
1569 static bool
1570 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1571 tree exp_type)
1572 {
1573 while (1)
1574 {
1575 tree fld;
1576 tree tr_size, index, minidx;
1577 HOST_WIDE_INT el_size;
1578
1579 if (offset == 0 && exp_type
1580 && types_compatible_p (exp_type, type))
1581 return true;
1582
1583 switch (TREE_CODE (type))
1584 {
1585 case UNION_TYPE:
1586 case QUAL_UNION_TYPE:
1587 case RECORD_TYPE:
1588 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1589 {
1590 HOST_WIDE_INT pos, size;
1591 tree tr_pos, expr, *expr_ptr;
1592
1593 if (TREE_CODE (fld) != FIELD_DECL)
1594 continue;
1595
1596 tr_pos = bit_position (fld);
1597 if (!tr_pos || !host_integerp (tr_pos, 1))
1598 continue;
1599 pos = TREE_INT_CST_LOW (tr_pos);
1600 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1601 tr_size = DECL_SIZE (fld);
1602 if (!tr_size || !host_integerp (tr_size, 1))
1603 continue;
1604 size = TREE_INT_CST_LOW (tr_size);
1605 if (size == 0)
1606 {
1607 if (pos != offset)
1608 continue;
1609 }
1610 else if (pos > offset || (pos + size) <= offset)
1611 continue;
1612
1613 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1614 NULL_TREE);
1615 expr_ptr = &expr;
1616 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1617 offset - pos, exp_type))
1618 {
1619 *res = expr;
1620 return true;
1621 }
1622 }
1623 return false;
1624
1625 case ARRAY_TYPE:
1626 tr_size = TYPE_SIZE (TREE_TYPE (type));
1627 if (!tr_size || !host_integerp (tr_size, 1))
1628 return false;
1629 el_size = tree_low_cst (tr_size, 1);
1630
1631 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1632 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1633 return false;
1634 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1635 if (!integer_zerop (minidx))
1636 index = int_const_binop (PLUS_EXPR, index, minidx);
1637 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1638 NULL_TREE, NULL_TREE);
1639 offset = offset % el_size;
1640 type = TREE_TYPE (type);
1641 break;
1642
1643 default:
1644 if (offset != 0)
1645 return false;
1646
1647 if (exp_type)
1648 return false;
1649 else
1650 return true;
1651 }
1652 }
1653 }
1654
1655 /* Return true iff TYPE is stdarg va_list type. */
1656
1657 static inline bool
1658 is_va_list_type (tree type)
1659 {
1660 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1661 }
1662
1663 /* Print message to dump file why a variable was rejected. */
1664
1665 static void
1666 reject (tree var, const char *msg)
1667 {
1668 if (dump_file && (dump_flags & TDF_DETAILS))
1669 {
1670 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1671 print_generic_expr (dump_file, var, 0);
1672 fprintf (dump_file, "\n");
1673 }
1674 }
1675
1676 /* Return true if VAR is a candidate for SRA. */
1677
1678 static bool
1679 maybe_add_sra_candidate (tree var)
1680 {
1681 tree type = TREE_TYPE (var);
1682 const char *msg;
1683 void **slot;
1684
1685 if (!AGGREGATE_TYPE_P (type))
1686 {
1687 reject (var, "not aggregate");
1688 return false;
1689 }
1690 if (needs_to_live_in_memory (var))
1691 {
1692 reject (var, "needs to live in memory");
1693 return false;
1694 }
1695 if (TREE_THIS_VOLATILE (var))
1696 {
1697 reject (var, "is volatile");
1698 return false;
1699 }
1700 if (!COMPLETE_TYPE_P (type))
1701 {
1702 reject (var, "has incomplete type");
1703 return false;
1704 }
1705 if (!host_integerp (TYPE_SIZE (type), 1))
1706 {
1707 reject (var, "type size not fixed");
1708 return false;
1709 }
1710 if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1711 {
1712 reject (var, "type size is zero");
1713 return false;
1714 }
1715 if (type_internals_preclude_sra_p (type, &msg))
1716 {
1717 reject (var, msg);
1718 return false;
1719 }
1720 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1721 we also want to schedule it rather late. Thus we ignore it in
1722 the early pass. */
1723 (sra_mode == SRA_MODE_EARLY_INTRA
1724 && is_va_list_type (type)))
1725 {
1726 reject (var, "is va_list");
1727 return false;
1728 }
1729
1730 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1731 slot = htab_find_slot_with_hash (candidates, var, DECL_UID (var), INSERT);
1732 *slot = (void *) var;
1733
1734 if (dump_file && (dump_flags & TDF_DETAILS))
1735 {
1736 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1737 print_generic_expr (dump_file, var, 0);
1738 fprintf (dump_file, "\n");
1739 }
1740
1741 return true;
1742 }
1743
1744 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1745 those with type which is suitable for scalarization. */
1746
1747 static bool
1748 find_var_candidates (void)
1749 {
1750 tree var, parm;
1751 unsigned int i;
1752 bool ret = false;
1753
1754 for (parm = DECL_ARGUMENTS (current_function_decl);
1755 parm;
1756 parm = DECL_CHAIN (parm))
1757 ret |= maybe_add_sra_candidate (parm);
1758
1759 FOR_EACH_LOCAL_DECL (cfun, i, var)
1760 {
1761 if (TREE_CODE (var) != VAR_DECL)
1762 continue;
1763
1764 ret |= maybe_add_sra_candidate (var);
1765 }
1766
1767 return ret;
1768 }
1769
1770 /* Sort all accesses for the given variable, check for partial overlaps and
1771 return NULL if there are any. If there are none, pick a representative for
1772 each combination of offset and size and create a linked list out of them.
1773 Return the pointer to the first representative and make sure it is the first
1774 one in the vector of accesses. */
1775
1776 static struct access *
1777 sort_and_splice_var_accesses (tree var)
1778 {
1779 int i, j, access_count;
1780 struct access *res, **prev_acc_ptr = &res;
1781 vec<access_p> *access_vec;
1782 bool first = true;
1783 HOST_WIDE_INT low = -1, high = 0;
1784
1785 access_vec = get_base_access_vector (var);
1786 if (!access_vec)
1787 return NULL;
1788 access_count = access_vec->length ();
1789
1790 /* Sort by <OFFSET, SIZE>. */
1791 access_vec->qsort (compare_access_positions);
1792
1793 i = 0;
1794 while (i < access_count)
1795 {
1796 struct access *access = (*access_vec)[i];
1797 bool grp_write = access->write;
1798 bool grp_read = !access->write;
1799 bool grp_scalar_write = access->write
1800 && is_gimple_reg_type (access->type);
1801 bool grp_scalar_read = !access->write
1802 && is_gimple_reg_type (access->type);
1803 bool grp_assignment_read = access->grp_assignment_read;
1804 bool grp_assignment_write = access->grp_assignment_write;
1805 bool multiple_scalar_reads = false;
1806 bool total_scalarization = access->grp_total_scalarization;
1807 bool grp_partial_lhs = access->grp_partial_lhs;
1808 bool first_scalar = is_gimple_reg_type (access->type);
1809 bool unscalarizable_region = access->grp_unscalarizable_region;
1810
1811 if (first || access->offset >= high)
1812 {
1813 first = false;
1814 low = access->offset;
1815 high = access->offset + access->size;
1816 }
1817 else if (access->offset > low && access->offset + access->size > high)
1818 return NULL;
1819 else
1820 gcc_assert (access->offset >= low
1821 && access->offset + access->size <= high);
1822
1823 j = i + 1;
1824 while (j < access_count)
1825 {
1826 struct access *ac2 = (*access_vec)[j];
1827 if (ac2->offset != access->offset || ac2->size != access->size)
1828 break;
1829 if (ac2->write)
1830 {
1831 grp_write = true;
1832 grp_scalar_write = (grp_scalar_write
1833 || is_gimple_reg_type (ac2->type));
1834 }
1835 else
1836 {
1837 grp_read = true;
1838 if (is_gimple_reg_type (ac2->type))
1839 {
1840 if (grp_scalar_read)
1841 multiple_scalar_reads = true;
1842 else
1843 grp_scalar_read = true;
1844 }
1845 }
1846 grp_assignment_read |= ac2->grp_assignment_read;
1847 grp_assignment_write |= ac2->grp_assignment_write;
1848 grp_partial_lhs |= ac2->grp_partial_lhs;
1849 unscalarizable_region |= ac2->grp_unscalarizable_region;
1850 total_scalarization |= ac2->grp_total_scalarization;
1851 relink_to_new_repr (access, ac2);
1852
1853 /* If there are both aggregate-type and scalar-type accesses with
1854 this combination of size and offset, the comparison function
1855 should have put the scalars first. */
1856 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1857 ac2->group_representative = access;
1858 j++;
1859 }
1860
1861 i = j;
1862
1863 access->group_representative = access;
1864 access->grp_write = grp_write;
1865 access->grp_read = grp_read;
1866 access->grp_scalar_read = grp_scalar_read;
1867 access->grp_scalar_write = grp_scalar_write;
1868 access->grp_assignment_read = grp_assignment_read;
1869 access->grp_assignment_write = grp_assignment_write;
1870 access->grp_hint = multiple_scalar_reads || total_scalarization;
1871 access->grp_total_scalarization = total_scalarization;
1872 access->grp_partial_lhs = grp_partial_lhs;
1873 access->grp_unscalarizable_region = unscalarizable_region;
1874 if (access->first_link)
1875 add_access_to_work_queue (access);
1876
1877 *prev_acc_ptr = access;
1878 prev_acc_ptr = &access->next_grp;
1879 }
1880
1881 gcc_assert (res == (*access_vec)[0]);
1882 return res;
1883 }
1884
1885 /* Create a variable for the given ACCESS which determines the type, name and a
1886 few other properties. Return the variable declaration and store it also to
1887 ACCESS->replacement. */
1888
1889 static tree
1890 create_access_replacement (struct access *access)
1891 {
1892 tree repl;
1893
1894 if (access->grp_to_be_debug_replaced)
1895 {
1896 repl = create_tmp_var_raw (access->type, NULL);
1897 DECL_CONTEXT (repl) = current_function_decl;
1898 }
1899 else
1900 repl = create_tmp_var (access->type, "SR");
1901 if (TREE_CODE (access->type) == COMPLEX_TYPE
1902 || TREE_CODE (access->type) == VECTOR_TYPE)
1903 {
1904 if (!access->grp_partial_lhs)
1905 DECL_GIMPLE_REG_P (repl) = 1;
1906 }
1907 else if (access->grp_partial_lhs
1908 && is_gimple_reg_type (access->type))
1909 TREE_ADDRESSABLE (repl) = 1;
1910
1911 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1912 DECL_ARTIFICIAL (repl) = 1;
1913 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1914
1915 if (DECL_NAME (access->base)
1916 && !DECL_IGNORED_P (access->base)
1917 && !DECL_ARTIFICIAL (access->base))
1918 {
1919 char *pretty_name = make_fancy_name (access->expr);
1920 tree debug_expr = unshare_expr (access->expr), d;
1921 bool fail = false;
1922
1923 DECL_NAME (repl) = get_identifier (pretty_name);
1924 obstack_free (&name_obstack, pretty_name);
1925
1926 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1927 as DECL_DEBUG_EXPR isn't considered when looking for still
1928 used SSA_NAMEs and thus they could be freed. All debug info
1929 generation cares is whether something is constant or variable
1930 and that get_ref_base_and_extent works properly on the
1931 expression. It cannot handle accesses at a non-constant offset
1932 though, so just give up in those cases. */
1933 for (d = debug_expr;
1934 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
1935 d = TREE_OPERAND (d, 0))
1936 switch (TREE_CODE (d))
1937 {
1938 case ARRAY_REF:
1939 case ARRAY_RANGE_REF:
1940 if (TREE_OPERAND (d, 1)
1941 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
1942 fail = true;
1943 if (TREE_OPERAND (d, 3)
1944 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
1945 fail = true;
1946 /* FALLTHRU */
1947 case COMPONENT_REF:
1948 if (TREE_OPERAND (d, 2)
1949 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
1950 fail = true;
1951 break;
1952 case MEM_REF:
1953 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
1954 fail = true;
1955 else
1956 d = TREE_OPERAND (d, 0);
1957 break;
1958 default:
1959 break;
1960 }
1961 if (!fail)
1962 {
1963 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1964 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1965 }
1966 if (access->grp_no_warning)
1967 TREE_NO_WARNING (repl) = 1;
1968 else
1969 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1970 }
1971 else
1972 TREE_NO_WARNING (repl) = 1;
1973
1974 if (dump_file)
1975 {
1976 if (access->grp_to_be_debug_replaced)
1977 {
1978 fprintf (dump_file, "Created a debug-only replacement for ");
1979 print_generic_expr (dump_file, access->base, 0);
1980 fprintf (dump_file, " offset: %u, size: %u\n",
1981 (unsigned) access->offset, (unsigned) access->size);
1982 }
1983 else
1984 {
1985 fprintf (dump_file, "Created a replacement for ");
1986 print_generic_expr (dump_file, access->base, 0);
1987 fprintf (dump_file, " offset: %u, size: %u: ",
1988 (unsigned) access->offset, (unsigned) access->size);
1989 print_generic_expr (dump_file, repl, 0);
1990 fprintf (dump_file, "\n");
1991 }
1992 }
1993 sra_stats.replacements++;
1994
1995 return repl;
1996 }
1997
1998 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1999
2000 static inline tree
2001 get_access_replacement (struct access *access)
2002 {
2003 if (!access->replacement_decl)
2004 access->replacement_decl = create_access_replacement (access);
2005 return access->replacement_decl;
2006 }
2007
2008
2009 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2010 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2011 to it is not "within" the root. Return false iff some accesses partially
2012 overlap. */
2013
2014 static bool
2015 build_access_subtree (struct access **access)
2016 {
2017 struct access *root = *access, *last_child = NULL;
2018 HOST_WIDE_INT limit = root->offset + root->size;
2019
2020 *access = (*access)->next_grp;
2021 while (*access && (*access)->offset + (*access)->size <= limit)
2022 {
2023 if (!last_child)
2024 root->first_child = *access;
2025 else
2026 last_child->next_sibling = *access;
2027 last_child = *access;
2028
2029 if (!build_access_subtree (access))
2030 return false;
2031 }
2032
2033 if (*access && (*access)->offset < limit)
2034 return false;
2035
2036 return true;
2037 }
2038
2039 /* Build a tree of access representatives, ACCESS is the pointer to the first
2040 one, others are linked in a list by the next_grp field. Return false iff
2041 some accesses partially overlap. */
2042
2043 static bool
2044 build_access_trees (struct access *access)
2045 {
2046 while (access)
2047 {
2048 struct access *root = access;
2049
2050 if (!build_access_subtree (&access))
2051 return false;
2052 root->next_grp = access;
2053 }
2054 return true;
2055 }
2056
2057 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2058 array. */
2059
2060 static bool
2061 expr_with_var_bounded_array_refs_p (tree expr)
2062 {
2063 while (handled_component_p (expr))
2064 {
2065 if (TREE_CODE (expr) == ARRAY_REF
2066 && !host_integerp (array_ref_low_bound (expr), 0))
2067 return true;
2068 expr = TREE_OPERAND (expr, 0);
2069 }
2070 return false;
2071 }
2072
2073 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2074 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2075 sorts of access flags appropriately along the way, notably always set
2076 grp_read and grp_assign_read according to MARK_READ and grp_write when
2077 MARK_WRITE is true.
2078
2079 Creating a replacement for a scalar access is considered beneficial if its
2080 grp_hint is set (this means we are either attempting total scalarization or
2081 there is more than one direct read access) or according to the following
2082 table:
2083
2084 Access written to through a scalar type (once or more times)
2085 |
2086 | Written to in an assignment statement
2087 | |
2088 | | Access read as scalar _once_
2089 | | |
2090 | | | Read in an assignment statement
2091 | | | |
2092 | | | | Scalarize Comment
2093 -----------------------------------------------------------------------------
2094 0 0 0 0 No access for the scalar
2095 0 0 0 1 No access for the scalar
2096 0 0 1 0 No Single read - won't help
2097 0 0 1 1 No The same case
2098 0 1 0 0 No access for the scalar
2099 0 1 0 1 No access for the scalar
2100 0 1 1 0 Yes s = *g; return s.i;
2101 0 1 1 1 Yes The same case as above
2102 1 0 0 0 No Won't help
2103 1 0 0 1 Yes s.i = 1; *g = s;
2104 1 0 1 0 Yes s.i = 5; g = s.i;
2105 1 0 1 1 Yes The same case as above
2106 1 1 0 0 No Won't help.
2107 1 1 0 1 Yes s.i = 1; *g = s;
2108 1 1 1 0 Yes s = *g; return s.i;
2109 1 1 1 1 Yes Any of the above yeses */
2110
2111 static bool
2112 analyze_access_subtree (struct access *root, struct access *parent,
2113 bool allow_replacements)
2114 {
2115 struct access *child;
2116 HOST_WIDE_INT limit = root->offset + root->size;
2117 HOST_WIDE_INT covered_to = root->offset;
2118 bool scalar = is_gimple_reg_type (root->type);
2119 bool hole = false, sth_created = false;
2120
2121 if (parent)
2122 {
2123 if (parent->grp_read)
2124 root->grp_read = 1;
2125 if (parent->grp_assignment_read)
2126 root->grp_assignment_read = 1;
2127 if (parent->grp_write)
2128 root->grp_write = 1;
2129 if (parent->grp_assignment_write)
2130 root->grp_assignment_write = 1;
2131 if (parent->grp_total_scalarization)
2132 root->grp_total_scalarization = 1;
2133 }
2134
2135 if (root->grp_unscalarizable_region)
2136 allow_replacements = false;
2137
2138 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2139 allow_replacements = false;
2140
2141 for (child = root->first_child; child; child = child->next_sibling)
2142 {
2143 hole |= covered_to < child->offset;
2144 sth_created |= analyze_access_subtree (child, root,
2145 allow_replacements && !scalar);
2146
2147 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2148 root->grp_total_scalarization &= child->grp_total_scalarization;
2149 if (child->grp_covered)
2150 covered_to += child->size;
2151 else
2152 hole = true;
2153 }
2154
2155 if (allow_replacements && scalar && !root->first_child
2156 && (root->grp_hint
2157 || ((root->grp_scalar_read || root->grp_assignment_read)
2158 && (root->grp_scalar_write || root->grp_assignment_write))))
2159 {
2160 bool new_integer_type;
2161 /* Always create access replacements that cover the whole access.
2162 For integral types this means the precision has to match.
2163 Avoid assumptions based on the integral type kind, too. */
2164 if (INTEGRAL_TYPE_P (root->type)
2165 && (TREE_CODE (root->type) != INTEGER_TYPE
2166 || TYPE_PRECISION (root->type) != root->size)
2167 /* But leave bitfield accesses alone. */
2168 && (TREE_CODE (root->expr) != COMPONENT_REF
2169 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2170 {
2171 tree rt = root->type;
2172 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2173 && (root->size % BITS_PER_UNIT) == 0);
2174 root->type = build_nonstandard_integer_type (root->size,
2175 TYPE_UNSIGNED (rt));
2176 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2177 root->base, root->offset,
2178 root->type, NULL, false);
2179 new_integer_type = true;
2180 }
2181 else
2182 new_integer_type = false;
2183
2184 if (dump_file && (dump_flags & TDF_DETAILS))
2185 {
2186 fprintf (dump_file, "Marking ");
2187 print_generic_expr (dump_file, root->base, 0);
2188 fprintf (dump_file, " offset: %u, size: %u ",
2189 (unsigned) root->offset, (unsigned) root->size);
2190 fprintf (dump_file, " to be replaced%s.\n",
2191 new_integer_type ? " with an integer": "");
2192 }
2193
2194 root->grp_to_be_replaced = 1;
2195 sth_created = true;
2196 hole = false;
2197 }
2198 else
2199 {
2200 if (allow_replacements
2201 && scalar && !root->first_child
2202 && (root->grp_scalar_write || root->grp_assignment_write))
2203 {
2204 gcc_checking_assert (!root->grp_scalar_read
2205 && !root->grp_assignment_read);
2206 sth_created = true;
2207 if (MAY_HAVE_DEBUG_STMTS)
2208 {
2209 root->grp_to_be_debug_replaced = 1;
2210 if (dump_file && (dump_flags & TDF_DETAILS))
2211 {
2212 fprintf (dump_file, "Marking ");
2213 print_generic_expr (dump_file, root->base, 0);
2214 fprintf (dump_file, " offset: %u, size: %u ",
2215 (unsigned) root->offset, (unsigned) root->size);
2216 fprintf (dump_file, " to be replaced with debug "
2217 "statements.\n");
2218 }
2219 }
2220 }
2221
2222 if (covered_to < limit)
2223 hole = true;
2224 if (scalar)
2225 root->grp_total_scalarization = 0;
2226 }
2227
2228 if (!hole || root->grp_total_scalarization)
2229 root->grp_covered = 1;
2230 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2231 root->grp_unscalarized_data = 1; /* not covered and written to */
2232 return sth_created;
2233 }
2234
2235 /* Analyze all access trees linked by next_grp by the means of
2236 analyze_access_subtree. */
2237 static bool
2238 analyze_access_trees (struct access *access)
2239 {
2240 bool ret = false;
2241
2242 while (access)
2243 {
2244 if (analyze_access_subtree (access, NULL, true))
2245 ret = true;
2246 access = access->next_grp;
2247 }
2248
2249 return ret;
2250 }
2251
2252 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2253 SIZE would conflict with an already existing one. If exactly such a child
2254 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2255
2256 static bool
2257 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2258 HOST_WIDE_INT size, struct access **exact_match)
2259 {
2260 struct access *child;
2261
2262 for (child = lacc->first_child; child; child = child->next_sibling)
2263 {
2264 if (child->offset == norm_offset && child->size == size)
2265 {
2266 *exact_match = child;
2267 return true;
2268 }
2269
2270 if (child->offset < norm_offset + size
2271 && child->offset + child->size > norm_offset)
2272 return true;
2273 }
2274
2275 return false;
2276 }
2277
2278 /* Create a new child access of PARENT, with all properties just like MODEL
2279 except for its offset and with its grp_write false and grp_read true.
2280 Return the new access or NULL if it cannot be created. Note that this access
2281 is created long after all splicing and sorting, it's not located in any
2282 access vector and is automatically a representative of its group. */
2283
2284 static struct access *
2285 create_artificial_child_access (struct access *parent, struct access *model,
2286 HOST_WIDE_INT new_offset)
2287 {
2288 struct access *access;
2289 struct access **child;
2290 tree expr = parent->base;
2291
2292 gcc_assert (!model->grp_unscalarizable_region);
2293
2294 access = (struct access *) pool_alloc (access_pool);
2295 memset (access, 0, sizeof (struct access));
2296 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2297 model->type))
2298 {
2299 access->grp_no_warning = true;
2300 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2301 new_offset, model, NULL, false);
2302 }
2303
2304 access->base = parent->base;
2305 access->expr = expr;
2306 access->offset = new_offset;
2307 access->size = model->size;
2308 access->type = model->type;
2309 access->grp_write = true;
2310 access->grp_read = false;
2311
2312 child = &parent->first_child;
2313 while (*child && (*child)->offset < new_offset)
2314 child = &(*child)->next_sibling;
2315
2316 access->next_sibling = *child;
2317 *child = access;
2318
2319 return access;
2320 }
2321
2322
2323 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2324 true if any new subaccess was created. Additionally, if RACC is a scalar
2325 access but LACC is not, change the type of the latter, if possible. */
2326
2327 static bool
2328 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2329 {
2330 struct access *rchild;
2331 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2332 bool ret = false;
2333
2334 if (is_gimple_reg_type (lacc->type)
2335 || lacc->grp_unscalarizable_region
2336 || racc->grp_unscalarizable_region)
2337 return false;
2338
2339 if (is_gimple_reg_type (racc->type))
2340 {
2341 if (!lacc->first_child && !racc->first_child)
2342 {
2343 tree t = lacc->base;
2344
2345 lacc->type = racc->type;
2346 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2347 lacc->offset, racc->type))
2348 lacc->expr = t;
2349 else
2350 {
2351 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2352 lacc->base, lacc->offset,
2353 racc, NULL, false);
2354 lacc->grp_no_warning = true;
2355 }
2356 }
2357 return false;
2358 }
2359
2360 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2361 {
2362 struct access *new_acc = NULL;
2363 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2364
2365 if (rchild->grp_unscalarizable_region)
2366 continue;
2367
2368 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2369 &new_acc))
2370 {
2371 if (new_acc)
2372 {
2373 rchild->grp_hint = 1;
2374 new_acc->grp_hint |= new_acc->grp_read;
2375 if (rchild->first_child)
2376 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2377 }
2378 continue;
2379 }
2380
2381 rchild->grp_hint = 1;
2382 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2383 if (new_acc)
2384 {
2385 ret = true;
2386 if (racc->first_child)
2387 propagate_subaccesses_across_link (new_acc, rchild);
2388 }
2389 }
2390
2391 return ret;
2392 }
2393
2394 /* Propagate all subaccesses across assignment links. */
2395
2396 static void
2397 propagate_all_subaccesses (void)
2398 {
2399 while (work_queue_head)
2400 {
2401 struct access *racc = pop_access_from_work_queue ();
2402 struct assign_link *link;
2403
2404 gcc_assert (racc->first_link);
2405
2406 for (link = racc->first_link; link; link = link->next)
2407 {
2408 struct access *lacc = link->lacc;
2409
2410 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2411 continue;
2412 lacc = lacc->group_representative;
2413 if (propagate_subaccesses_across_link (lacc, racc)
2414 && lacc->first_link)
2415 add_access_to_work_queue (lacc);
2416 }
2417 }
2418 }
2419
2420 /* Go through all accesses collected throughout the (intraprocedural) analysis
2421 stage, exclude overlapping ones, identify representatives and build trees
2422 out of them, making decisions about scalarization on the way. Return true
2423 iff there are any to-be-scalarized variables after this stage. */
2424
2425 static bool
2426 analyze_all_variable_accesses (void)
2427 {
2428 int res = 0;
2429 bitmap tmp = BITMAP_ALLOC (NULL);
2430 bitmap_iterator bi;
2431 unsigned i, max_total_scalarization_size;
2432
2433 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2434 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2435
2436 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2437 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2438 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2439 {
2440 tree var = candidate (i);
2441
2442 if (TREE_CODE (var) == VAR_DECL
2443 && type_consists_of_records_p (TREE_TYPE (var)))
2444 {
2445 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2446 <= max_total_scalarization_size)
2447 {
2448 completely_scalarize_var (var);
2449 if (dump_file && (dump_flags & TDF_DETAILS))
2450 {
2451 fprintf (dump_file, "Will attempt to totally scalarize ");
2452 print_generic_expr (dump_file, var, 0);
2453 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2454 }
2455 }
2456 else if (dump_file && (dump_flags & TDF_DETAILS))
2457 {
2458 fprintf (dump_file, "Too big to totally scalarize: ");
2459 print_generic_expr (dump_file, var, 0);
2460 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2461 }
2462 }
2463 }
2464
2465 bitmap_copy (tmp, candidate_bitmap);
2466 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2467 {
2468 tree var = candidate (i);
2469 struct access *access;
2470
2471 access = sort_and_splice_var_accesses (var);
2472 if (!access || !build_access_trees (access))
2473 disqualify_candidate (var,
2474 "No or inhibitingly overlapping accesses.");
2475 }
2476
2477 propagate_all_subaccesses ();
2478
2479 bitmap_copy (tmp, candidate_bitmap);
2480 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2481 {
2482 tree var = candidate (i);
2483 struct access *access = get_first_repr_for_decl (var);
2484
2485 if (analyze_access_trees (access))
2486 {
2487 res++;
2488 if (dump_file && (dump_flags & TDF_DETAILS))
2489 {
2490 fprintf (dump_file, "\nAccess trees for ");
2491 print_generic_expr (dump_file, var, 0);
2492 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2493 dump_access_tree (dump_file, access);
2494 fprintf (dump_file, "\n");
2495 }
2496 }
2497 else
2498 disqualify_candidate (var, "No scalar replacements to be created.");
2499 }
2500
2501 BITMAP_FREE (tmp);
2502
2503 if (res)
2504 {
2505 statistics_counter_event (cfun, "Scalarized aggregates", res);
2506 return true;
2507 }
2508 else
2509 return false;
2510 }
2511
2512 /* Generate statements copying scalar replacements of accesses within a subtree
2513 into or out of AGG. ACCESS, all its children, siblings and their children
2514 are to be processed. AGG is an aggregate type expression (can be a
2515 declaration but does not have to be, it can for example also be a mem_ref or
2516 a series of handled components). TOP_OFFSET is the offset of the processed
2517 subtree which has to be subtracted from offsets of individual accesses to
2518 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2519 replacements in the interval <start_offset, start_offset + chunk_size>,
2520 otherwise copy all. GSI is a statement iterator used to place the new
2521 statements. WRITE should be true when the statements should write from AGG
2522 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2523 statements will be added after the current statement in GSI, they will be
2524 added before the statement otherwise. */
2525
2526 static void
2527 generate_subtree_copies (struct access *access, tree agg,
2528 HOST_WIDE_INT top_offset,
2529 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2530 gimple_stmt_iterator *gsi, bool write,
2531 bool insert_after, location_t loc)
2532 {
2533 do
2534 {
2535 if (chunk_size && access->offset >= start_offset + chunk_size)
2536 return;
2537
2538 if (access->grp_to_be_replaced
2539 && (chunk_size == 0
2540 || access->offset + access->size > start_offset))
2541 {
2542 tree expr, repl = get_access_replacement (access);
2543 gimple stmt;
2544
2545 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2546 access, gsi, insert_after);
2547
2548 if (write)
2549 {
2550 if (access->grp_partial_lhs)
2551 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2552 !insert_after,
2553 insert_after ? GSI_NEW_STMT
2554 : GSI_SAME_STMT);
2555 stmt = gimple_build_assign (repl, expr);
2556 }
2557 else
2558 {
2559 TREE_NO_WARNING (repl) = 1;
2560 if (access->grp_partial_lhs)
2561 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2562 !insert_after,
2563 insert_after ? GSI_NEW_STMT
2564 : GSI_SAME_STMT);
2565 stmt = gimple_build_assign (expr, repl);
2566 }
2567 gimple_set_location (stmt, loc);
2568
2569 if (insert_after)
2570 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2571 else
2572 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2573 update_stmt (stmt);
2574 sra_stats.subtree_copies++;
2575 }
2576 else if (write
2577 && access->grp_to_be_debug_replaced
2578 && (chunk_size == 0
2579 || access->offset + access->size > start_offset))
2580 {
2581 gimple ds;
2582 tree drhs = build_debug_ref_for_model (loc, agg,
2583 access->offset - top_offset,
2584 access);
2585 ds = gimple_build_debug_bind (get_access_replacement (access),
2586 drhs, gsi_stmt (*gsi));
2587 if (insert_after)
2588 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2589 else
2590 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2591 }
2592
2593 if (access->first_child)
2594 generate_subtree_copies (access->first_child, agg, top_offset,
2595 start_offset, chunk_size, gsi,
2596 write, insert_after, loc);
2597
2598 access = access->next_sibling;
2599 }
2600 while (access);
2601 }
2602
2603 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2604 the root of the subtree to be processed. GSI is the statement iterator used
2605 for inserting statements which are added after the current statement if
2606 INSERT_AFTER is true or before it otherwise. */
2607
2608 static void
2609 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2610 bool insert_after, location_t loc)
2611
2612 {
2613 struct access *child;
2614
2615 if (access->grp_to_be_replaced)
2616 {
2617 gimple stmt;
2618
2619 stmt = gimple_build_assign (get_access_replacement (access),
2620 build_zero_cst (access->type));
2621 if (insert_after)
2622 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2623 else
2624 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2625 update_stmt (stmt);
2626 gimple_set_location (stmt, loc);
2627 }
2628 else if (access->grp_to_be_debug_replaced)
2629 {
2630 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2631 build_zero_cst (access->type),
2632 gsi_stmt (*gsi));
2633 if (insert_after)
2634 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2635 else
2636 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2637 }
2638
2639 for (child = access->first_child; child; child = child->next_sibling)
2640 init_subtree_with_zero (child, gsi, insert_after, loc);
2641 }
2642
2643 /* Search for an access representative for the given expression EXPR and
2644 return it or NULL if it cannot be found. */
2645
2646 static struct access *
2647 get_access_for_expr (tree expr)
2648 {
2649 HOST_WIDE_INT offset, size, max_size;
2650 tree base;
2651
2652 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2653 a different size than the size of its argument and we need the latter
2654 one. */
2655 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2656 expr = TREE_OPERAND (expr, 0);
2657
2658 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2659 if (max_size == -1 || !DECL_P (base))
2660 return NULL;
2661
2662 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2663 return NULL;
2664
2665 return get_var_base_offset_size_access (base, offset, max_size);
2666 }
2667
2668 /* Replace the expression EXPR with a scalar replacement if there is one and
2669 generate other statements to do type conversion or subtree copying if
2670 necessary. GSI is used to place newly created statements, WRITE is true if
2671 the expression is being written to (it is on a LHS of a statement or output
2672 in an assembly statement). */
2673
2674 static bool
2675 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2676 {
2677 location_t loc;
2678 struct access *access;
2679 tree type, bfr;
2680
2681 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2682 {
2683 bfr = *expr;
2684 expr = &TREE_OPERAND (*expr, 0);
2685 }
2686 else
2687 bfr = NULL_TREE;
2688
2689 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2690 expr = &TREE_OPERAND (*expr, 0);
2691 access = get_access_for_expr (*expr);
2692 if (!access)
2693 return false;
2694 type = TREE_TYPE (*expr);
2695
2696 loc = gimple_location (gsi_stmt (*gsi));
2697 if (access->grp_to_be_replaced)
2698 {
2699 tree repl = get_access_replacement (access);
2700 /* If we replace a non-register typed access simply use the original
2701 access expression to extract the scalar component afterwards.
2702 This happens if scalarizing a function return value or parameter
2703 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2704 gcc.c-torture/compile/20011217-1.c.
2705
2706 We also want to use this when accessing a complex or vector which can
2707 be accessed as a different type too, potentially creating a need for
2708 type conversion (see PR42196) and when scalarized unions are involved
2709 in assembler statements (see PR42398). */
2710 if (!useless_type_conversion_p (type, access->type))
2711 {
2712 tree ref;
2713
2714 ref = build_ref_for_model (loc, access->base, access->offset, access,
2715 NULL, false);
2716
2717 if (write)
2718 {
2719 gimple stmt;
2720
2721 if (access->grp_partial_lhs)
2722 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2723 false, GSI_NEW_STMT);
2724 stmt = gimple_build_assign (repl, ref);
2725 gimple_set_location (stmt, loc);
2726 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2727 }
2728 else
2729 {
2730 gimple stmt;
2731
2732 if (access->grp_partial_lhs)
2733 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2734 true, GSI_SAME_STMT);
2735 stmt = gimple_build_assign (ref, repl);
2736 gimple_set_location (stmt, loc);
2737 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2738 }
2739 }
2740 else
2741 *expr = repl;
2742 sra_stats.exprs++;
2743 }
2744 else if (write && access->grp_to_be_debug_replaced)
2745 {
2746 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2747 NULL_TREE,
2748 gsi_stmt (*gsi));
2749 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2750 }
2751
2752 if (access->first_child)
2753 {
2754 HOST_WIDE_INT start_offset, chunk_size;
2755 if (bfr
2756 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2757 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2758 {
2759 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2760 start_offset = access->offset
2761 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2762 }
2763 else
2764 start_offset = chunk_size = 0;
2765
2766 generate_subtree_copies (access->first_child, access->base, 0,
2767 start_offset, chunk_size, gsi, write, write,
2768 loc);
2769 }
2770 return true;
2771 }
2772
2773 /* Where scalar replacements of the RHS have been written to when a replacement
2774 of a LHS of an assigments cannot be direclty loaded from a replacement of
2775 the RHS. */
2776 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2777 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2778 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2779
2780 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2781 base aggregate if there are unscalarized data or directly to LHS of the
2782 statement that is pointed to by GSI otherwise. */
2783
2784 static enum unscalarized_data_handling
2785 handle_unscalarized_data_in_subtree (struct access *top_racc,
2786 gimple_stmt_iterator *gsi)
2787 {
2788 if (top_racc->grp_unscalarized_data)
2789 {
2790 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2791 gsi, false, false,
2792 gimple_location (gsi_stmt (*gsi)));
2793 return SRA_UDH_RIGHT;
2794 }
2795 else
2796 {
2797 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2798 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2799 0, 0, gsi, false, false,
2800 gimple_location (gsi_stmt (*gsi)));
2801 return SRA_UDH_LEFT;
2802 }
2803 }
2804
2805
2806 /* Try to generate statements to load all sub-replacements in an access subtree
2807 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2808 If that is not possible, refresh the TOP_RACC base aggregate and load the
2809 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2810 copied. NEW_GSI is stmt iterator used for statement insertions after the
2811 original assignment, OLD_GSI is used to insert statements before the
2812 assignment. *REFRESHED keeps the information whether we have needed to
2813 refresh replacements of the LHS and from which side of the assignments this
2814 takes place. */
2815
2816 static void
2817 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2818 HOST_WIDE_INT left_offset,
2819 gimple_stmt_iterator *old_gsi,
2820 gimple_stmt_iterator *new_gsi,
2821 enum unscalarized_data_handling *refreshed)
2822 {
2823 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2824 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2825 {
2826 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2827
2828 if (lacc->grp_to_be_replaced)
2829 {
2830 struct access *racc;
2831 gimple stmt;
2832 tree rhs;
2833
2834 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2835 if (racc && racc->grp_to_be_replaced)
2836 {
2837 rhs = get_access_replacement (racc);
2838 if (!useless_type_conversion_p (lacc->type, racc->type))
2839 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2840
2841 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2842 rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2843 true, GSI_SAME_STMT);
2844 }
2845 else
2846 {
2847 /* No suitable access on the right hand side, need to load from
2848 the aggregate. See if we have to update it first... */
2849 if (*refreshed == SRA_UDH_NONE)
2850 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2851 old_gsi);
2852
2853 if (*refreshed == SRA_UDH_LEFT)
2854 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2855 new_gsi, true);
2856 else
2857 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2858 new_gsi, true);
2859 if (lacc->grp_partial_lhs)
2860 rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2861 false, GSI_NEW_STMT);
2862 }
2863
2864 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2865 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2866 gimple_set_location (stmt, loc);
2867 update_stmt (stmt);
2868 sra_stats.subreplacements++;
2869 }
2870 else
2871 {
2872 if (*refreshed == SRA_UDH_NONE
2873 && lacc->grp_read && !lacc->grp_covered)
2874 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2875 old_gsi);
2876 if (lacc && lacc->grp_to_be_debug_replaced)
2877 {
2878 gimple ds;
2879 tree drhs;
2880 struct access *racc = find_access_in_subtree (top_racc, offset,
2881 lacc->size);
2882
2883 if (racc && racc->grp_to_be_replaced)
2884 drhs = get_access_replacement (racc);
2885 else if (*refreshed == SRA_UDH_LEFT)
2886 drhs = build_debug_ref_for_model (loc, lacc->base, lacc->offset,
2887 lacc);
2888 else if (*refreshed == SRA_UDH_RIGHT)
2889 drhs = build_debug_ref_for_model (loc, top_racc->base, offset,
2890 lacc);
2891 else
2892 drhs = NULL_TREE;
2893 ds = gimple_build_debug_bind (get_access_replacement (lacc),
2894 drhs, gsi_stmt (*old_gsi));
2895 gsi_insert_after (new_gsi, ds, GSI_NEW_STMT);
2896 }
2897 }
2898
2899 if (lacc->first_child)
2900 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2901 old_gsi, new_gsi, refreshed);
2902 }
2903 }
2904
2905 /* Result code for SRA assignment modification. */
2906 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2907 SRA_AM_MODIFIED, /* stmt changed but not
2908 removed */
2909 SRA_AM_REMOVED }; /* stmt eliminated */
2910
2911 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2912 to the assignment and GSI is the statement iterator pointing at it. Returns
2913 the same values as sra_modify_assign. */
2914
2915 static enum assignment_mod_result
2916 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2917 {
2918 tree lhs = gimple_assign_lhs (*stmt);
2919 struct access *acc;
2920 location_t loc;
2921
2922 acc = get_access_for_expr (lhs);
2923 if (!acc)
2924 return SRA_AM_NONE;
2925
2926 if (gimple_clobber_p (*stmt))
2927 {
2928 /* Remove clobbers of fully scalarized variables, otherwise
2929 do nothing. */
2930 if (acc->grp_covered)
2931 {
2932 unlink_stmt_vdef (*stmt);
2933 gsi_remove (gsi, true);
2934 release_defs (*stmt);
2935 return SRA_AM_REMOVED;
2936 }
2937 else
2938 return SRA_AM_NONE;
2939 }
2940
2941 loc = gimple_location (*stmt);
2942 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2943 {
2944 /* I have never seen this code path trigger but if it can happen the
2945 following should handle it gracefully. */
2946 if (access_has_children_p (acc))
2947 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2948 true, true, loc);
2949 return SRA_AM_MODIFIED;
2950 }
2951
2952 if (acc->grp_covered)
2953 {
2954 init_subtree_with_zero (acc, gsi, false, loc);
2955 unlink_stmt_vdef (*stmt);
2956 gsi_remove (gsi, true);
2957 release_defs (*stmt);
2958 return SRA_AM_REMOVED;
2959 }
2960 else
2961 {
2962 init_subtree_with_zero (acc, gsi, true, loc);
2963 return SRA_AM_MODIFIED;
2964 }
2965 }
2966
2967 /* Create and return a new suitable default definition SSA_NAME for RACC which
2968 is an access describing an uninitialized part of an aggregate that is being
2969 loaded. */
2970
2971 static tree
2972 get_repl_default_def_ssa_name (struct access *racc)
2973 {
2974 return get_or_create_ssa_default_def (cfun, get_access_replacement (racc));
2975 }
2976
2977 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2978 somewhere in it. */
2979
2980 static inline bool
2981 contains_bitfld_comp_ref_p (const_tree ref)
2982 {
2983 while (handled_component_p (ref))
2984 {
2985 if (TREE_CODE (ref) == COMPONENT_REF
2986 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2987 return true;
2988 ref = TREE_OPERAND (ref, 0);
2989 }
2990
2991 return false;
2992 }
2993
2994 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2995 bit-field field declaration somewhere in it. */
2996
2997 static inline bool
2998 contains_vce_or_bfcref_p (const_tree ref)
2999 {
3000 while (handled_component_p (ref))
3001 {
3002 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3003 || (TREE_CODE (ref) == COMPONENT_REF
3004 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3005 return true;
3006 ref = TREE_OPERAND (ref, 0);
3007 }
3008
3009 return false;
3010 }
3011
3012 /* Examine both sides of the assignment statement pointed to by STMT, replace
3013 them with a scalare replacement if there is one and generate copying of
3014 replacements if scalarized aggregates have been used in the assignment. GSI
3015 is used to hold generated statements for type conversions and subtree
3016 copying. */
3017
3018 static enum assignment_mod_result
3019 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3020 {
3021 struct access *lacc, *racc;
3022 tree lhs, rhs;
3023 bool modify_this_stmt = false;
3024 bool force_gimple_rhs = false;
3025 location_t loc;
3026 gimple_stmt_iterator orig_gsi = *gsi;
3027
3028 if (!gimple_assign_single_p (*stmt))
3029 return SRA_AM_NONE;
3030 lhs = gimple_assign_lhs (*stmt);
3031 rhs = gimple_assign_rhs1 (*stmt);
3032
3033 if (TREE_CODE (rhs) == CONSTRUCTOR)
3034 return sra_modify_constructor_assign (stmt, gsi);
3035
3036 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3037 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3038 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3039 {
3040 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
3041 gsi, false);
3042 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
3043 gsi, true);
3044 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3045 }
3046
3047 lacc = get_access_for_expr (lhs);
3048 racc = get_access_for_expr (rhs);
3049 if (!lacc && !racc)
3050 return SRA_AM_NONE;
3051
3052 loc = gimple_location (*stmt);
3053 if (lacc && lacc->grp_to_be_replaced)
3054 {
3055 lhs = get_access_replacement (lacc);
3056 gimple_assign_set_lhs (*stmt, lhs);
3057 modify_this_stmt = true;
3058 if (lacc->grp_partial_lhs)
3059 force_gimple_rhs = true;
3060 sra_stats.exprs++;
3061 }
3062
3063 if (racc && racc->grp_to_be_replaced)
3064 {
3065 rhs = get_access_replacement (racc);
3066 modify_this_stmt = true;
3067 if (racc->grp_partial_lhs)
3068 force_gimple_rhs = true;
3069 sra_stats.exprs++;
3070 }
3071 else if (racc
3072 && !racc->grp_unscalarized_data
3073 && TREE_CODE (lhs) == SSA_NAME
3074 && !access_has_replacements_p (racc))
3075 {
3076 rhs = get_repl_default_def_ssa_name (racc);
3077 modify_this_stmt = true;
3078 sra_stats.exprs++;
3079 }
3080
3081 if (modify_this_stmt)
3082 {
3083 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3084 {
3085 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3086 ??? This should move to fold_stmt which we simply should
3087 call after building a VIEW_CONVERT_EXPR here. */
3088 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3089 && !contains_bitfld_comp_ref_p (lhs))
3090 {
3091 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3092 gimple_assign_set_lhs (*stmt, lhs);
3093 }
3094 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3095 && !contains_vce_or_bfcref_p (rhs))
3096 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3097
3098 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3099 {
3100 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3101 rhs);
3102 if (is_gimple_reg_type (TREE_TYPE (lhs))
3103 && TREE_CODE (lhs) != SSA_NAME)
3104 force_gimple_rhs = true;
3105 }
3106 }
3107 }
3108
3109 if (lacc && lacc->grp_to_be_debug_replaced)
3110 {
3111 gimple ds = gimple_build_debug_bind (get_access_replacement (lacc),
3112 unshare_expr (rhs), *stmt);
3113 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3114 }
3115
3116 /* From this point on, the function deals with assignments in between
3117 aggregates when at least one has scalar reductions of some of its
3118 components. There are three possible scenarios: Both the LHS and RHS have
3119 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3120
3121 In the first case, we would like to load the LHS components from RHS
3122 components whenever possible. If that is not possible, we would like to
3123 read it directly from the RHS (after updating it by storing in it its own
3124 components). If there are some necessary unscalarized data in the LHS,
3125 those will be loaded by the original assignment too. If neither of these
3126 cases happen, the original statement can be removed. Most of this is done
3127 by load_assign_lhs_subreplacements.
3128
3129 In the second case, we would like to store all RHS scalarized components
3130 directly into LHS and if they cover the aggregate completely, remove the
3131 statement too. In the third case, we want the LHS components to be loaded
3132 directly from the RHS (DSE will remove the original statement if it
3133 becomes redundant).
3134
3135 This is a bit complex but manageable when types match and when unions do
3136 not cause confusion in a way that we cannot really load a component of LHS
3137 from the RHS or vice versa (the access representing this level can have
3138 subaccesses that are accessible only through a different union field at a
3139 higher level - different from the one used in the examined expression).
3140 Unions are fun.
3141
3142 Therefore, I specially handle a fourth case, happening when there is a
3143 specific type cast or it is impossible to locate a scalarized subaccess on
3144 the other side of the expression. If that happens, I simply "refresh" the
3145 RHS by storing in it is scalarized components leave the original statement
3146 there to do the copying and then load the scalar replacements of the LHS.
3147 This is what the first branch does. */
3148
3149 if (modify_this_stmt
3150 || gimple_has_volatile_ops (*stmt)
3151 || contains_vce_or_bfcref_p (rhs)
3152 || contains_vce_or_bfcref_p (lhs))
3153 {
3154 if (access_has_children_p (racc))
3155 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3156 gsi, false, false, loc);
3157 if (access_has_children_p (lacc))
3158 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3159 gsi, true, true, loc);
3160 sra_stats.separate_lhs_rhs_handling++;
3161
3162 /* This gimplification must be done after generate_subtree_copies,
3163 lest we insert the subtree copies in the middle of the gimplified
3164 sequence. */
3165 if (force_gimple_rhs)
3166 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3167 true, GSI_SAME_STMT);
3168 if (gimple_assign_rhs1 (*stmt) != rhs)
3169 {
3170 modify_this_stmt = true;
3171 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3172 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3173 }
3174
3175 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3176 }
3177 else
3178 {
3179 if (access_has_children_p (lacc)
3180 && access_has_children_p (racc)
3181 /* When an access represents an unscalarizable region, it usually
3182 represents accesses with variable offset and thus must not be used
3183 to generate new memory accesses. */
3184 && !lacc->grp_unscalarizable_region
3185 && !racc->grp_unscalarizable_region)
3186 {
3187 gimple_stmt_iterator orig_gsi = *gsi;
3188 enum unscalarized_data_handling refreshed;
3189
3190 if (lacc->grp_read && !lacc->grp_covered)
3191 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3192 else
3193 refreshed = SRA_UDH_NONE;
3194
3195 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3196 &orig_gsi, gsi, &refreshed);
3197 if (refreshed != SRA_UDH_RIGHT)
3198 {
3199 gsi_next (gsi);
3200 unlink_stmt_vdef (*stmt);
3201 gsi_remove (&orig_gsi, true);
3202 release_defs (*stmt);
3203 sra_stats.deleted++;
3204 return SRA_AM_REMOVED;
3205 }
3206 }
3207 else
3208 {
3209 if (access_has_children_p (racc)
3210 && !racc->grp_unscalarized_data)
3211 {
3212 if (dump_file)
3213 {
3214 fprintf (dump_file, "Removing load: ");
3215 print_gimple_stmt (dump_file, *stmt, 0, 0);
3216 }
3217 generate_subtree_copies (racc->first_child, lhs,
3218 racc->offset, 0, 0, gsi,
3219 false, false, loc);
3220 gcc_assert (*stmt == gsi_stmt (*gsi));
3221 unlink_stmt_vdef (*stmt);
3222 gsi_remove (gsi, true);
3223 release_defs (*stmt);
3224 sra_stats.deleted++;
3225 return SRA_AM_REMOVED;
3226 }
3227 /* Restore the aggregate RHS from its components so the
3228 prevailing aggregate copy does the right thing. */
3229 if (access_has_children_p (racc))
3230 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3231 gsi, false, false, loc);
3232 /* Re-load the components of the aggregate copy destination.
3233 But use the RHS aggregate to load from to expose more
3234 optimization opportunities. */
3235 if (access_has_children_p (lacc))
3236 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3237 0, 0, gsi, true, true, loc);
3238 }
3239
3240 return SRA_AM_NONE;
3241 }
3242 }
3243
3244 /* Traverse the function body and all modifications as decided in
3245 analyze_all_variable_accesses. Return true iff the CFG has been
3246 changed. */
3247
3248 static bool
3249 sra_modify_function_body (void)
3250 {
3251 bool cfg_changed = false;
3252 basic_block bb;
3253
3254 FOR_EACH_BB (bb)
3255 {
3256 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3257 while (!gsi_end_p (gsi))
3258 {
3259 gimple stmt = gsi_stmt (gsi);
3260 enum assignment_mod_result assign_result;
3261 bool modified = false, deleted = false;
3262 tree *t;
3263 unsigned i;
3264
3265 switch (gimple_code (stmt))
3266 {
3267 case GIMPLE_RETURN:
3268 t = gimple_return_retval_ptr (stmt);
3269 if (*t != NULL_TREE)
3270 modified |= sra_modify_expr (t, &gsi, false);
3271 break;
3272
3273 case GIMPLE_ASSIGN:
3274 assign_result = sra_modify_assign (&stmt, &gsi);
3275 modified |= assign_result == SRA_AM_MODIFIED;
3276 deleted = assign_result == SRA_AM_REMOVED;
3277 break;
3278
3279 case GIMPLE_CALL:
3280 /* Operands must be processed before the lhs. */
3281 for (i = 0; i < gimple_call_num_args (stmt); i++)
3282 {
3283 t = gimple_call_arg_ptr (stmt, i);
3284 modified |= sra_modify_expr (t, &gsi, false);
3285 }
3286
3287 if (gimple_call_lhs (stmt))
3288 {
3289 t = gimple_call_lhs_ptr (stmt);
3290 modified |= sra_modify_expr (t, &gsi, true);
3291 }
3292 break;
3293
3294 case GIMPLE_ASM:
3295 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3296 {
3297 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3298 modified |= sra_modify_expr (t, &gsi, false);
3299 }
3300 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3301 {
3302 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3303 modified |= sra_modify_expr (t, &gsi, true);
3304 }
3305 break;
3306
3307 default:
3308 break;
3309 }
3310
3311 if (modified)
3312 {
3313 update_stmt (stmt);
3314 if (maybe_clean_eh_stmt (stmt)
3315 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3316 cfg_changed = true;
3317 }
3318 if (!deleted)
3319 gsi_next (&gsi);
3320 }
3321 }
3322
3323 return cfg_changed;
3324 }
3325
3326 /* Generate statements initializing scalar replacements of parts of function
3327 parameters. */
3328
3329 static void
3330 initialize_parameter_reductions (void)
3331 {
3332 gimple_stmt_iterator gsi;
3333 gimple_seq seq = NULL;
3334 tree parm;
3335
3336 gsi = gsi_start (seq);
3337 for (parm = DECL_ARGUMENTS (current_function_decl);
3338 parm;
3339 parm = DECL_CHAIN (parm))
3340 {
3341 vec<access_p> *access_vec;
3342 struct access *access;
3343
3344 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3345 continue;
3346 access_vec = get_base_access_vector (parm);
3347 if (!access_vec)
3348 continue;
3349
3350 for (access = (*access_vec)[0];
3351 access;
3352 access = access->next_grp)
3353 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3354 EXPR_LOCATION (parm));
3355 }
3356
3357 seq = gsi_seq (gsi);
3358 if (seq)
3359 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3360 }
3361
3362 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3363 it reveals there are components of some aggregates to be scalarized, it runs
3364 the required transformations. */
3365 static unsigned int
3366 perform_intra_sra (void)
3367 {
3368 int ret = 0;
3369 sra_initialize ();
3370
3371 if (!find_var_candidates ())
3372 goto out;
3373
3374 if (!scan_function ())
3375 goto out;
3376
3377 if (!analyze_all_variable_accesses ())
3378 goto out;
3379
3380 if (sra_modify_function_body ())
3381 ret = TODO_update_ssa | TODO_cleanup_cfg;
3382 else
3383 ret = TODO_update_ssa;
3384 initialize_parameter_reductions ();
3385
3386 statistics_counter_event (cfun, "Scalar replacements created",
3387 sra_stats.replacements);
3388 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3389 statistics_counter_event (cfun, "Subtree copy stmts",
3390 sra_stats.subtree_copies);
3391 statistics_counter_event (cfun, "Subreplacement stmts",
3392 sra_stats.subreplacements);
3393 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3394 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3395 sra_stats.separate_lhs_rhs_handling);
3396
3397 out:
3398 sra_deinitialize ();
3399 return ret;
3400 }
3401
3402 /* Perform early intraprocedural SRA. */
3403 static unsigned int
3404 early_intra_sra (void)
3405 {
3406 sra_mode = SRA_MODE_EARLY_INTRA;
3407 return perform_intra_sra ();
3408 }
3409
3410 /* Perform "late" intraprocedural SRA. */
3411 static unsigned int
3412 late_intra_sra (void)
3413 {
3414 sra_mode = SRA_MODE_INTRA;
3415 return perform_intra_sra ();
3416 }
3417
3418
3419 static bool
3420 gate_intra_sra (void)
3421 {
3422 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3423 }
3424
3425
3426 struct gimple_opt_pass pass_sra_early =
3427 {
3428 {
3429 GIMPLE_PASS,
3430 "esra", /* name */
3431 OPTGROUP_NONE, /* optinfo_flags */
3432 gate_intra_sra, /* gate */
3433 early_intra_sra, /* execute */
3434 NULL, /* sub */
3435 NULL, /* next */
3436 0, /* static_pass_number */
3437 TV_TREE_SRA, /* tv_id */
3438 PROP_cfg | PROP_ssa, /* properties_required */
3439 0, /* properties_provided */
3440 0, /* properties_destroyed */
3441 0, /* todo_flags_start */
3442 TODO_update_ssa
3443 | TODO_ggc_collect
3444 | TODO_verify_ssa /* todo_flags_finish */
3445 }
3446 };
3447
3448 struct gimple_opt_pass pass_sra =
3449 {
3450 {
3451 GIMPLE_PASS,
3452 "sra", /* name */
3453 OPTGROUP_NONE, /* optinfo_flags */
3454 gate_intra_sra, /* gate */
3455 late_intra_sra, /* execute */
3456 NULL, /* sub */
3457 NULL, /* next */
3458 0, /* static_pass_number */
3459 TV_TREE_SRA, /* tv_id */
3460 PROP_cfg | PROP_ssa, /* properties_required */
3461 0, /* properties_provided */
3462 0, /* properties_destroyed */
3463 TODO_update_address_taken, /* todo_flags_start */
3464 TODO_update_ssa
3465 | TODO_ggc_collect
3466 | TODO_verify_ssa /* todo_flags_finish */
3467 }
3468 };
3469
3470
3471 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3472 parameter. */
3473
3474 static bool
3475 is_unused_scalar_param (tree parm)
3476 {
3477 tree name;
3478 return (is_gimple_reg (parm)
3479 && (!(name = ssa_default_def (cfun, parm))
3480 || has_zero_uses (name)));
3481 }
3482
3483 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3484 examine whether there are any direct or otherwise infeasible ones. If so,
3485 return true, otherwise return false. PARM must be a gimple register with a
3486 non-NULL default definition. */
3487
3488 static bool
3489 ptr_parm_has_direct_uses (tree parm)
3490 {
3491 imm_use_iterator ui;
3492 gimple stmt;
3493 tree name = ssa_default_def (cfun, parm);
3494 bool ret = false;
3495
3496 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3497 {
3498 int uses_ok = 0;
3499 use_operand_p use_p;
3500
3501 if (is_gimple_debug (stmt))
3502 continue;
3503
3504 /* Valid uses include dereferences on the lhs and the rhs. */
3505 if (gimple_has_lhs (stmt))
3506 {
3507 tree lhs = gimple_get_lhs (stmt);
3508 while (handled_component_p (lhs))
3509 lhs = TREE_OPERAND (lhs, 0);
3510 if (TREE_CODE (lhs) == MEM_REF
3511 && TREE_OPERAND (lhs, 0) == name
3512 && integer_zerop (TREE_OPERAND (lhs, 1))
3513 && types_compatible_p (TREE_TYPE (lhs),
3514 TREE_TYPE (TREE_TYPE (name)))
3515 && !TREE_THIS_VOLATILE (lhs))
3516 uses_ok++;
3517 }
3518 if (gimple_assign_single_p (stmt))
3519 {
3520 tree rhs = gimple_assign_rhs1 (stmt);
3521 while (handled_component_p (rhs))
3522 rhs = TREE_OPERAND (rhs, 0);
3523 if (TREE_CODE (rhs) == MEM_REF
3524 && TREE_OPERAND (rhs, 0) == name
3525 && integer_zerop (TREE_OPERAND (rhs, 1))
3526 && types_compatible_p (TREE_TYPE (rhs),
3527 TREE_TYPE (TREE_TYPE (name)))
3528 && !TREE_THIS_VOLATILE (rhs))
3529 uses_ok++;
3530 }
3531 else if (is_gimple_call (stmt))
3532 {
3533 unsigned i;
3534 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3535 {
3536 tree arg = gimple_call_arg (stmt, i);
3537 while (handled_component_p (arg))
3538 arg = TREE_OPERAND (arg, 0);
3539 if (TREE_CODE (arg) == MEM_REF
3540 && TREE_OPERAND (arg, 0) == name
3541 && integer_zerop (TREE_OPERAND (arg, 1))
3542 && types_compatible_p (TREE_TYPE (arg),
3543 TREE_TYPE (TREE_TYPE (name)))
3544 && !TREE_THIS_VOLATILE (arg))
3545 uses_ok++;
3546 }
3547 }
3548
3549 /* If the number of valid uses does not match the number of
3550 uses in this stmt there is an unhandled use. */
3551 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3552 --uses_ok;
3553
3554 if (uses_ok != 0)
3555 ret = true;
3556
3557 if (ret)
3558 BREAK_FROM_IMM_USE_STMT (ui);
3559 }
3560
3561 return ret;
3562 }
3563
3564 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3565 them in candidate_bitmap. Note that these do not necessarily include
3566 parameter which are unused and thus can be removed. Return true iff any
3567 such candidate has been found. */
3568
3569 static bool
3570 find_param_candidates (void)
3571 {
3572 tree parm;
3573 int count = 0;
3574 bool ret = false;
3575 const char *msg;
3576
3577 for (parm = DECL_ARGUMENTS (current_function_decl);
3578 parm;
3579 parm = DECL_CHAIN (parm))
3580 {
3581 tree type = TREE_TYPE (parm);
3582 void **slot;
3583
3584 count++;
3585
3586 if (TREE_THIS_VOLATILE (parm)
3587 || TREE_ADDRESSABLE (parm)
3588 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3589 continue;
3590
3591 if (is_unused_scalar_param (parm))
3592 {
3593 ret = true;
3594 continue;
3595 }
3596
3597 if (POINTER_TYPE_P (type))
3598 {
3599 type = TREE_TYPE (type);
3600
3601 if (TREE_CODE (type) == FUNCTION_TYPE
3602 || TYPE_VOLATILE (type)
3603 || (TREE_CODE (type) == ARRAY_TYPE
3604 && TYPE_NONALIASED_COMPONENT (type))
3605 || !is_gimple_reg (parm)
3606 || is_va_list_type (type)
3607 || ptr_parm_has_direct_uses (parm))
3608 continue;
3609 }
3610 else if (!AGGREGATE_TYPE_P (type))
3611 continue;
3612
3613 if (!COMPLETE_TYPE_P (type)
3614 || !host_integerp (TYPE_SIZE (type), 1)
3615 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3616 || (AGGREGATE_TYPE_P (type)
3617 && type_internals_preclude_sra_p (type, &msg)))
3618 continue;
3619
3620 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3621 slot = htab_find_slot_with_hash (candidates, parm,
3622 DECL_UID (parm), INSERT);
3623 *slot = (void *) parm;
3624
3625 ret = true;
3626 if (dump_file && (dump_flags & TDF_DETAILS))
3627 {
3628 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3629 print_generic_expr (dump_file, parm, 0);
3630 fprintf (dump_file, "\n");
3631 }
3632 }
3633
3634 func_param_count = count;
3635 return ret;
3636 }
3637
3638 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3639 maybe_modified. */
3640
3641 static bool
3642 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3643 void *data)
3644 {
3645 struct access *repr = (struct access *) data;
3646
3647 repr->grp_maybe_modified = 1;
3648 return true;
3649 }
3650
3651 /* Analyze what representatives (in linked lists accessible from
3652 REPRESENTATIVES) can be modified by side effects of statements in the
3653 current function. */
3654
3655 static void
3656 analyze_modified_params (vec<access_p> representatives)
3657 {
3658 int i;
3659
3660 for (i = 0; i < func_param_count; i++)
3661 {
3662 struct access *repr;
3663
3664 for (repr = representatives[i];
3665 repr;
3666 repr = repr->next_grp)
3667 {
3668 struct access *access;
3669 bitmap visited;
3670 ao_ref ar;
3671
3672 if (no_accesses_p (repr))
3673 continue;
3674 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3675 || repr->grp_maybe_modified)
3676 continue;
3677
3678 ao_ref_init (&ar, repr->expr);
3679 visited = BITMAP_ALLOC (NULL);
3680 for (access = repr; access; access = access->next_sibling)
3681 {
3682 /* All accesses are read ones, otherwise grp_maybe_modified would
3683 be trivially set. */
3684 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3685 mark_maybe_modified, repr, &visited);
3686 if (repr->grp_maybe_modified)
3687 break;
3688 }
3689 BITMAP_FREE (visited);
3690 }
3691 }
3692 }
3693
3694 /* Propagate distances in bb_dereferences in the opposite direction than the
3695 control flow edges, in each step storing the maximum of the current value
3696 and the minimum of all successors. These steps are repeated until the table
3697 stabilizes. Note that BBs which might terminate the functions (according to
3698 final_bbs bitmap) never updated in this way. */
3699
3700 static void
3701 propagate_dereference_distances (void)
3702 {
3703 vec<basic_block> queue;
3704 basic_block bb;
3705
3706 queue.create (last_basic_block_for_function (cfun));
3707 queue.quick_push (ENTRY_BLOCK_PTR);
3708 FOR_EACH_BB (bb)
3709 {
3710 queue.quick_push (bb);
3711 bb->aux = bb;
3712 }
3713
3714 while (!queue.is_empty ())
3715 {
3716 edge_iterator ei;
3717 edge e;
3718 bool change = false;
3719 int i;
3720
3721 bb = queue.pop ();
3722 bb->aux = NULL;
3723
3724 if (bitmap_bit_p (final_bbs, bb->index))
3725 continue;
3726
3727 for (i = 0; i < func_param_count; i++)
3728 {
3729 int idx = bb->index * func_param_count + i;
3730 bool first = true;
3731 HOST_WIDE_INT inh = 0;
3732
3733 FOR_EACH_EDGE (e, ei, bb->succs)
3734 {
3735 int succ_idx = e->dest->index * func_param_count + i;
3736
3737 if (e->src == EXIT_BLOCK_PTR)
3738 continue;
3739
3740 if (first)
3741 {
3742 first = false;
3743 inh = bb_dereferences [succ_idx];
3744 }
3745 else if (bb_dereferences [succ_idx] < inh)
3746 inh = bb_dereferences [succ_idx];
3747 }
3748
3749 if (!first && bb_dereferences[idx] < inh)
3750 {
3751 bb_dereferences[idx] = inh;
3752 change = true;
3753 }
3754 }
3755
3756 if (change && !bitmap_bit_p (final_bbs, bb->index))
3757 FOR_EACH_EDGE (e, ei, bb->preds)
3758 {
3759 if (e->src->aux)
3760 continue;
3761
3762 e->src->aux = e->src;
3763 queue.quick_push (e->src);
3764 }
3765 }
3766
3767 queue.release ();
3768 }
3769
3770 /* Dump a dereferences TABLE with heading STR to file F. */
3771
3772 static void
3773 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3774 {
3775 basic_block bb;
3776
3777 fprintf (dump_file, str);
3778 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3779 {
3780 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3781 if (bb != EXIT_BLOCK_PTR)
3782 {
3783 int i;
3784 for (i = 0; i < func_param_count; i++)
3785 {
3786 int idx = bb->index * func_param_count + i;
3787 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3788 }
3789 }
3790 fprintf (f, "\n");
3791 }
3792 fprintf (dump_file, "\n");
3793 }
3794
3795 /* Determine what (parts of) parameters passed by reference that are not
3796 assigned to are not certainly dereferenced in this function and thus the
3797 dereferencing cannot be safely moved to the caller without potentially
3798 introducing a segfault. Mark such REPRESENTATIVES as
3799 grp_not_necessarilly_dereferenced.
3800
3801 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3802 part is calculated rather than simple booleans are calculated for each
3803 pointer parameter to handle cases when only a fraction of the whole
3804 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3805 an example).
3806
3807 The maximum dereference distances for each pointer parameter and BB are
3808 already stored in bb_dereference. This routine simply propagates these
3809 values upwards by propagate_dereference_distances and then compares the
3810 distances of individual parameters in the ENTRY BB to the equivalent
3811 distances of each representative of a (fraction of a) parameter. */
3812
3813 static void
3814 analyze_caller_dereference_legality (vec<access_p> representatives)
3815 {
3816 int i;
3817
3818 if (dump_file && (dump_flags & TDF_DETAILS))
3819 dump_dereferences_table (dump_file,
3820 "Dereference table before propagation:\n",
3821 bb_dereferences);
3822
3823 propagate_dereference_distances ();
3824
3825 if (dump_file && (dump_flags & TDF_DETAILS))
3826 dump_dereferences_table (dump_file,
3827 "Dereference table after propagation:\n",
3828 bb_dereferences);
3829
3830 for (i = 0; i < func_param_count; i++)
3831 {
3832 struct access *repr = representatives[i];
3833 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3834
3835 if (!repr || no_accesses_p (repr))
3836 continue;
3837
3838 do
3839 {
3840 if ((repr->offset + repr->size) > bb_dereferences[idx])
3841 repr->grp_not_necessarilly_dereferenced = 1;
3842 repr = repr->next_grp;
3843 }
3844 while (repr);
3845 }
3846 }
3847
3848 /* Return the representative access for the parameter declaration PARM if it is
3849 a scalar passed by reference which is not written to and the pointer value
3850 is not used directly. Thus, if it is legal to dereference it in the caller
3851 and we can rule out modifications through aliases, such parameter should be
3852 turned into one passed by value. Return NULL otherwise. */
3853
3854 static struct access *
3855 unmodified_by_ref_scalar_representative (tree parm)
3856 {
3857 int i, access_count;
3858 struct access *repr;
3859 vec<access_p> *access_vec;
3860
3861 access_vec = get_base_access_vector (parm);
3862 gcc_assert (access_vec);
3863 repr = (*access_vec)[0];
3864 if (repr->write)
3865 return NULL;
3866 repr->group_representative = repr;
3867
3868 access_count = access_vec->length ();
3869 for (i = 1; i < access_count; i++)
3870 {
3871 struct access *access = (*access_vec)[i];
3872 if (access->write)
3873 return NULL;
3874 access->group_representative = repr;
3875 access->next_sibling = repr->next_sibling;
3876 repr->next_sibling = access;
3877 }
3878
3879 repr->grp_read = 1;
3880 repr->grp_scalar_ptr = 1;
3881 return repr;
3882 }
3883
3884 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
3885 associated with. REQ_ALIGN is the minimum required alignment. */
3886
3887 static bool
3888 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
3889 {
3890 unsigned int exp_align;
3891 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3892 is incompatible assign in a call statement (and possibly even in asm
3893 statements). This can be relaxed by using a new temporary but only for
3894 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3895 intraprocedural SRA we deal with this by keeping the old aggregate around,
3896 something we cannot do in IPA-SRA.) */
3897 if (access->write
3898 && (is_gimple_call (access->stmt)
3899 || gimple_code (access->stmt) == GIMPLE_ASM))
3900 return true;
3901
3902 exp_align = get_object_alignment (access->expr);
3903 if (exp_align < req_align)
3904 return true;
3905
3906 return false;
3907 }
3908
3909
3910 /* Sort collected accesses for parameter PARM, identify representatives for
3911 each accessed region and link them together. Return NULL if there are
3912 different but overlapping accesses, return the special ptr value meaning
3913 there are no accesses for this parameter if that is the case and return the
3914 first representative otherwise. Set *RO_GRP if there is a group of accesses
3915 with only read (i.e. no write) accesses. */
3916
3917 static struct access *
3918 splice_param_accesses (tree parm, bool *ro_grp)
3919 {
3920 int i, j, access_count, group_count;
3921 int agg_size, total_size = 0;
3922 struct access *access, *res, **prev_acc_ptr = &res;
3923 vec<access_p> *access_vec;
3924
3925 access_vec = get_base_access_vector (parm);
3926 if (!access_vec)
3927 return &no_accesses_representant;
3928 access_count = access_vec->length ();
3929
3930 access_vec->qsort (compare_access_positions);
3931
3932 i = 0;
3933 total_size = 0;
3934 group_count = 0;
3935 while (i < access_count)
3936 {
3937 bool modification;
3938 tree a1_alias_type;
3939 access = (*access_vec)[i];
3940 modification = access->write;
3941 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
3942 return NULL;
3943 a1_alias_type = reference_alias_ptr_type (access->expr);
3944
3945 /* Access is about to become group representative unless we find some
3946 nasty overlap which would preclude us from breaking this parameter
3947 apart. */
3948
3949 j = i + 1;
3950 while (j < access_count)
3951 {
3952 struct access *ac2 = (*access_vec)[j];
3953 if (ac2->offset != access->offset)
3954 {
3955 /* All or nothing law for parameters. */
3956 if (access->offset + access->size > ac2->offset)
3957 return NULL;
3958 else
3959 break;
3960 }
3961 else if (ac2->size != access->size)
3962 return NULL;
3963
3964 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
3965 || (ac2->type != access->type
3966 && (TREE_ADDRESSABLE (ac2->type)
3967 || TREE_ADDRESSABLE (access->type)))
3968 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3969 return NULL;
3970
3971 modification |= ac2->write;
3972 ac2->group_representative = access;
3973 ac2->next_sibling = access->next_sibling;
3974 access->next_sibling = ac2;
3975 j++;
3976 }
3977
3978 group_count++;
3979 access->grp_maybe_modified = modification;
3980 if (!modification)
3981 *ro_grp = true;
3982 *prev_acc_ptr = access;
3983 prev_acc_ptr = &access->next_grp;
3984 total_size += access->size;
3985 i = j;
3986 }
3987
3988 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3989 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3990 else
3991 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3992 if (total_size >= agg_size)
3993 return NULL;
3994
3995 gcc_assert (group_count > 0);
3996 return res;
3997 }
3998
3999 /* Decide whether parameters with representative accesses given by REPR should
4000 be reduced into components. */
4001
4002 static int
4003 decide_one_param_reduction (struct access *repr)
4004 {
4005 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4006 bool by_ref;
4007 tree parm;
4008
4009 parm = repr->base;
4010 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
4011 gcc_assert (cur_parm_size > 0);
4012
4013 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4014 {
4015 by_ref = true;
4016 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
4017 }
4018 else
4019 {
4020 by_ref = false;
4021 agg_size = cur_parm_size;
4022 }
4023
4024 if (dump_file)
4025 {
4026 struct access *acc;
4027 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4028 print_generic_expr (dump_file, parm, 0);
4029 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4030 for (acc = repr; acc; acc = acc->next_grp)
4031 dump_access (dump_file, acc, true);
4032 }
4033
4034 total_size = 0;
4035 new_param_count = 0;
4036
4037 for (; repr; repr = repr->next_grp)
4038 {
4039 gcc_assert (parm == repr->base);
4040
4041 /* Taking the address of a non-addressable field is verboten. */
4042 if (by_ref && repr->non_addressable)
4043 return 0;
4044
4045 /* Do not decompose a non-BLKmode param in a way that would
4046 create BLKmode params. Especially for by-reference passing
4047 (thus, pointer-type param) this is hardly worthwhile. */
4048 if (DECL_MODE (parm) != BLKmode
4049 && TYPE_MODE (repr->type) == BLKmode)
4050 return 0;
4051
4052 if (!by_ref || (!repr->grp_maybe_modified
4053 && !repr->grp_not_necessarilly_dereferenced))
4054 total_size += repr->size;
4055 else
4056 total_size += cur_parm_size;
4057
4058 new_param_count++;
4059 }
4060
4061 gcc_assert (new_param_count > 0);
4062
4063 if (optimize_function_for_size_p (cfun))
4064 parm_size_limit = cur_parm_size;
4065 else
4066 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4067 * cur_parm_size);
4068
4069 if (total_size < agg_size
4070 && total_size <= parm_size_limit)
4071 {
4072 if (dump_file)
4073 fprintf (dump_file, " ....will be split into %i components\n",
4074 new_param_count);
4075 return new_param_count;
4076 }
4077 else
4078 return 0;
4079 }
4080
4081 /* The order of the following enums is important, we need to do extra work for
4082 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4083 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4084 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4085
4086 /* Identify representatives of all accesses to all candidate parameters for
4087 IPA-SRA. Return result based on what representatives have been found. */
4088
4089 static enum ipa_splicing_result
4090 splice_all_param_accesses (vec<access_p> &representatives)
4091 {
4092 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4093 tree parm;
4094 struct access *repr;
4095
4096 representatives.create (func_param_count);
4097
4098 for (parm = DECL_ARGUMENTS (current_function_decl);
4099 parm;
4100 parm = DECL_CHAIN (parm))
4101 {
4102 if (is_unused_scalar_param (parm))
4103 {
4104 representatives.quick_push (&no_accesses_representant);
4105 if (result == NO_GOOD_ACCESS)
4106 result = UNUSED_PARAMS;
4107 }
4108 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4109 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4110 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4111 {
4112 repr = unmodified_by_ref_scalar_representative (parm);
4113 representatives.quick_push (repr);
4114 if (repr)
4115 result = UNMODIF_BY_REF_ACCESSES;
4116 }
4117 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4118 {
4119 bool ro_grp = false;
4120 repr = splice_param_accesses (parm, &ro_grp);
4121 representatives.quick_push (repr);
4122
4123 if (repr && !no_accesses_p (repr))
4124 {
4125 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4126 {
4127 if (ro_grp)
4128 result = UNMODIF_BY_REF_ACCESSES;
4129 else if (result < MODIF_BY_REF_ACCESSES)
4130 result = MODIF_BY_REF_ACCESSES;
4131 }
4132 else if (result < BY_VAL_ACCESSES)
4133 result = BY_VAL_ACCESSES;
4134 }
4135 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4136 result = UNUSED_PARAMS;
4137 }
4138 else
4139 representatives.quick_push (NULL);
4140 }
4141
4142 if (result == NO_GOOD_ACCESS)
4143 {
4144 representatives.release ();
4145 return NO_GOOD_ACCESS;
4146 }
4147
4148 return result;
4149 }
4150
4151 /* Return the index of BASE in PARMS. Abort if it is not found. */
4152
4153 static inline int
4154 get_param_index (tree base, vec<tree> parms)
4155 {
4156 int i, len;
4157
4158 len = parms.length ();
4159 for (i = 0; i < len; i++)
4160 if (parms[i] == base)
4161 return i;
4162 gcc_unreachable ();
4163 }
4164
4165 /* Convert the decisions made at the representative level into compact
4166 parameter adjustments. REPRESENTATIVES are pointers to first
4167 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4168 final number of adjustments. */
4169
4170 static ipa_parm_adjustment_vec
4171 turn_representatives_into_adjustments (vec<access_p> representatives,
4172 int adjustments_count)
4173 {
4174 vec<tree> parms;
4175 ipa_parm_adjustment_vec adjustments;
4176 tree parm;
4177 int i;
4178
4179 gcc_assert (adjustments_count > 0);
4180 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4181 adjustments.create (adjustments_count);
4182 parm = DECL_ARGUMENTS (current_function_decl);
4183 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4184 {
4185 struct access *repr = representatives[i];
4186
4187 if (!repr || no_accesses_p (repr))
4188 {
4189 struct ipa_parm_adjustment adj;
4190
4191 memset (&adj, 0, sizeof (adj));
4192 adj.base_index = get_param_index (parm, parms);
4193 adj.base = parm;
4194 if (!repr)
4195 adj.copy_param = 1;
4196 else
4197 adj.remove_param = 1;
4198 adjustments.quick_push (adj);
4199 }
4200 else
4201 {
4202 struct ipa_parm_adjustment adj;
4203 int index = get_param_index (parm, parms);
4204
4205 for (; repr; repr = repr->next_grp)
4206 {
4207 memset (&adj, 0, sizeof (adj));
4208 gcc_assert (repr->base == parm);
4209 adj.base_index = index;
4210 adj.base = repr->base;
4211 adj.type = repr->type;
4212 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4213 adj.offset = repr->offset;
4214 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4215 && (repr->grp_maybe_modified
4216 || repr->grp_not_necessarilly_dereferenced));
4217 adjustments.quick_push (adj);
4218 }
4219 }
4220 }
4221 parms.release ();
4222 return adjustments;
4223 }
4224
4225 /* Analyze the collected accesses and produce a plan what to do with the
4226 parameters in the form of adjustments, NULL meaning nothing. */
4227
4228 static ipa_parm_adjustment_vec
4229 analyze_all_param_acesses (void)
4230 {
4231 enum ipa_splicing_result repr_state;
4232 bool proceed = false;
4233 int i, adjustments_count = 0;
4234 vec<access_p> representatives;
4235 ipa_parm_adjustment_vec adjustments;
4236
4237 repr_state = splice_all_param_accesses (representatives);
4238 if (repr_state == NO_GOOD_ACCESS)
4239 return ipa_parm_adjustment_vec();
4240
4241 /* If there are any parameters passed by reference which are not modified
4242 directly, we need to check whether they can be modified indirectly. */
4243 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4244 {
4245 analyze_caller_dereference_legality (representatives);
4246 analyze_modified_params (representatives);
4247 }
4248
4249 for (i = 0; i < func_param_count; i++)
4250 {
4251 struct access *repr = representatives[i];
4252
4253 if (repr && !no_accesses_p (repr))
4254 {
4255 if (repr->grp_scalar_ptr)
4256 {
4257 adjustments_count++;
4258 if (repr->grp_not_necessarilly_dereferenced
4259 || repr->grp_maybe_modified)
4260 representatives[i] = NULL;
4261 else
4262 {
4263 proceed = true;
4264 sra_stats.scalar_by_ref_to_by_val++;
4265 }
4266 }
4267 else
4268 {
4269 int new_components = decide_one_param_reduction (repr);
4270
4271 if (new_components == 0)
4272 {
4273 representatives[i] = NULL;
4274 adjustments_count++;
4275 }
4276 else
4277 {
4278 adjustments_count += new_components;
4279 sra_stats.aggregate_params_reduced++;
4280 sra_stats.param_reductions_created += new_components;
4281 proceed = true;
4282 }
4283 }
4284 }
4285 else
4286 {
4287 if (no_accesses_p (repr))
4288 {
4289 proceed = true;
4290 sra_stats.deleted_unused_parameters++;
4291 }
4292 adjustments_count++;
4293 }
4294 }
4295
4296 if (!proceed && dump_file)
4297 fprintf (dump_file, "NOT proceeding to change params.\n");
4298
4299 if (proceed)
4300 adjustments = turn_representatives_into_adjustments (representatives,
4301 adjustments_count);
4302 else
4303 adjustments = ipa_parm_adjustment_vec();
4304
4305 representatives.release ();
4306 return adjustments;
4307 }
4308
4309 /* If a parameter replacement identified by ADJ does not yet exist in the form
4310 of declaration, create it and record it, otherwise return the previously
4311 created one. */
4312
4313 static tree
4314 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4315 {
4316 tree repl;
4317 if (!adj->new_ssa_base)
4318 {
4319 char *pretty_name = make_fancy_name (adj->base);
4320
4321 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4322 DECL_NAME (repl) = get_identifier (pretty_name);
4323 obstack_free (&name_obstack, pretty_name);
4324
4325 adj->new_ssa_base = repl;
4326 }
4327 else
4328 repl = adj->new_ssa_base;
4329 return repl;
4330 }
4331
4332 /* Find the first adjustment for a particular parameter BASE in a vector of
4333 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4334 adjustment. */
4335
4336 static struct ipa_parm_adjustment *
4337 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4338 {
4339 int i, len;
4340
4341 len = adjustments.length ();
4342 for (i = 0; i < len; i++)
4343 {
4344 struct ipa_parm_adjustment *adj;
4345
4346 adj = &adjustments[i];
4347 if (!adj->copy_param && adj->base == base)
4348 return adj;
4349 }
4350
4351 return NULL;
4352 }
4353
4354 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4355 removed because its value is not used, replace the SSA_NAME with a one
4356 relating to a created VAR_DECL together all of its uses and return true.
4357 ADJUSTMENTS is a pointer to an adjustments vector. */
4358
4359 static bool
4360 replace_removed_params_ssa_names (gimple stmt,
4361 ipa_parm_adjustment_vec adjustments)
4362 {
4363 struct ipa_parm_adjustment *adj;
4364 tree lhs, decl, repl, name;
4365
4366 if (gimple_code (stmt) == GIMPLE_PHI)
4367 lhs = gimple_phi_result (stmt);
4368 else if (is_gimple_assign (stmt))
4369 lhs = gimple_assign_lhs (stmt);
4370 else if (is_gimple_call (stmt))
4371 lhs = gimple_call_lhs (stmt);
4372 else
4373 gcc_unreachable ();
4374
4375 if (TREE_CODE (lhs) != SSA_NAME)
4376 return false;
4377
4378 decl = SSA_NAME_VAR (lhs);
4379 if (decl == NULL_TREE
4380 || TREE_CODE (decl) != PARM_DECL)
4381 return false;
4382
4383 adj = get_adjustment_for_base (adjustments, decl);
4384 if (!adj)
4385 return false;
4386
4387 repl = get_replaced_param_substitute (adj);
4388 name = make_ssa_name (repl, stmt);
4389
4390 if (dump_file)
4391 {
4392 fprintf (dump_file, "replacing an SSA name of a removed param ");
4393 print_generic_expr (dump_file, lhs, 0);
4394 fprintf (dump_file, " with ");
4395 print_generic_expr (dump_file, name, 0);
4396 fprintf (dump_file, "\n");
4397 }
4398
4399 if (is_gimple_assign (stmt))
4400 gimple_assign_set_lhs (stmt, name);
4401 else if (is_gimple_call (stmt))
4402 gimple_call_set_lhs (stmt, name);
4403 else
4404 gimple_phi_set_result (stmt, name);
4405
4406 replace_uses_by (lhs, name);
4407 release_ssa_name (lhs);
4408 return true;
4409 }
4410
4411 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4412 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4413 specifies whether the function should care about type incompatibility the
4414 current and new expressions. If it is false, the function will leave
4415 incompatibility issues to the caller. Return true iff the expression
4416 was modified. */
4417
4418 static bool
4419 sra_ipa_modify_expr (tree *expr, bool convert,
4420 ipa_parm_adjustment_vec adjustments)
4421 {
4422 int i, len;
4423 struct ipa_parm_adjustment *adj, *cand = NULL;
4424 HOST_WIDE_INT offset, size, max_size;
4425 tree base, src;
4426
4427 len = adjustments.length ();
4428
4429 if (TREE_CODE (*expr) == BIT_FIELD_REF
4430 || TREE_CODE (*expr) == IMAGPART_EXPR
4431 || TREE_CODE (*expr) == REALPART_EXPR)
4432 {
4433 expr = &TREE_OPERAND (*expr, 0);
4434 convert = true;
4435 }
4436
4437 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4438 if (!base || size == -1 || max_size == -1)
4439 return false;
4440
4441 if (TREE_CODE (base) == MEM_REF)
4442 {
4443 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4444 base = TREE_OPERAND (base, 0);
4445 }
4446
4447 base = get_ssa_base_param (base);
4448 if (!base || TREE_CODE (base) != PARM_DECL)
4449 return false;
4450
4451 for (i = 0; i < len; i++)
4452 {
4453 adj = &adjustments[i];
4454
4455 if (adj->base == base &&
4456 (adj->offset == offset || adj->remove_param))
4457 {
4458 cand = adj;
4459 break;
4460 }
4461 }
4462 if (!cand || cand->copy_param || cand->remove_param)
4463 return false;
4464
4465 if (cand->by_ref)
4466 src = build_simple_mem_ref (cand->reduction);
4467 else
4468 src = cand->reduction;
4469
4470 if (dump_file && (dump_flags & TDF_DETAILS))
4471 {
4472 fprintf (dump_file, "About to replace expr ");
4473 print_generic_expr (dump_file, *expr, 0);
4474 fprintf (dump_file, " with ");
4475 print_generic_expr (dump_file, src, 0);
4476 fprintf (dump_file, "\n");
4477 }
4478
4479 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4480 {
4481 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4482 *expr = vce;
4483 }
4484 else
4485 *expr = src;
4486 return true;
4487 }
4488
4489 /* If the statement pointed to by STMT_PTR contains any expressions that need
4490 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4491 potential type incompatibilities (GSI is used to accommodate conversion
4492 statements and must point to the statement). Return true iff the statement
4493 was modified. */
4494
4495 static bool
4496 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4497 ipa_parm_adjustment_vec adjustments)
4498 {
4499 gimple stmt = *stmt_ptr;
4500 tree *lhs_p, *rhs_p;
4501 bool any;
4502
4503 if (!gimple_assign_single_p (stmt))
4504 return false;
4505
4506 rhs_p = gimple_assign_rhs1_ptr (stmt);
4507 lhs_p = gimple_assign_lhs_ptr (stmt);
4508
4509 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4510 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4511 if (any)
4512 {
4513 tree new_rhs = NULL_TREE;
4514
4515 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4516 {
4517 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4518 {
4519 /* V_C_Es of constructors can cause trouble (PR 42714). */
4520 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4521 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4522 else
4523 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4524 NULL);
4525 }
4526 else
4527 new_rhs = fold_build1_loc (gimple_location (stmt),
4528 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4529 *rhs_p);
4530 }
4531 else if (REFERENCE_CLASS_P (*rhs_p)
4532 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4533 && !is_gimple_reg (*lhs_p))
4534 /* This can happen when an assignment in between two single field
4535 structures is turned into an assignment in between two pointers to
4536 scalars (PR 42237). */
4537 new_rhs = *rhs_p;
4538
4539 if (new_rhs)
4540 {
4541 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4542 true, GSI_SAME_STMT);
4543
4544 gimple_assign_set_rhs_from_tree (gsi, tmp);
4545 }
4546
4547 return true;
4548 }
4549
4550 return false;
4551 }
4552
4553 /* Traverse the function body and all modifications as described in
4554 ADJUSTMENTS. Return true iff the CFG has been changed. */
4555
4556 static bool
4557 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4558 {
4559 bool cfg_changed = false;
4560 basic_block bb;
4561
4562 FOR_EACH_BB (bb)
4563 {
4564 gimple_stmt_iterator gsi;
4565
4566 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4567 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4568
4569 gsi = gsi_start_bb (bb);
4570 while (!gsi_end_p (gsi))
4571 {
4572 gimple stmt = gsi_stmt (gsi);
4573 bool modified = false;
4574 tree *t;
4575 unsigned i;
4576
4577 switch (gimple_code (stmt))
4578 {
4579 case GIMPLE_RETURN:
4580 t = gimple_return_retval_ptr (stmt);
4581 if (*t != NULL_TREE)
4582 modified |= sra_ipa_modify_expr (t, true, adjustments);
4583 break;
4584
4585 case GIMPLE_ASSIGN:
4586 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4587 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4588 break;
4589
4590 case GIMPLE_CALL:
4591 /* Operands must be processed before the lhs. */
4592 for (i = 0; i < gimple_call_num_args (stmt); i++)
4593 {
4594 t = gimple_call_arg_ptr (stmt, i);
4595 modified |= sra_ipa_modify_expr (t, true, adjustments);
4596 }
4597
4598 if (gimple_call_lhs (stmt))
4599 {
4600 t = gimple_call_lhs_ptr (stmt);
4601 modified |= sra_ipa_modify_expr (t, false, adjustments);
4602 modified |= replace_removed_params_ssa_names (stmt,
4603 adjustments);
4604 }
4605 break;
4606
4607 case GIMPLE_ASM:
4608 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4609 {
4610 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4611 modified |= sra_ipa_modify_expr (t, true, adjustments);
4612 }
4613 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4614 {
4615 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4616 modified |= sra_ipa_modify_expr (t, false, adjustments);
4617 }
4618 break;
4619
4620 default:
4621 break;
4622 }
4623
4624 if (modified)
4625 {
4626 update_stmt (stmt);
4627 if (maybe_clean_eh_stmt (stmt)
4628 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4629 cfg_changed = true;
4630 }
4631 gsi_next (&gsi);
4632 }
4633 }
4634
4635 return cfg_changed;
4636 }
4637
4638 /* Call gimple_debug_bind_reset_value on all debug statements describing
4639 gimple register parameters that are being removed or replaced. */
4640
4641 static void
4642 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4643 {
4644 int i, len;
4645 gimple_stmt_iterator *gsip = NULL, gsi;
4646
4647 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4648 {
4649 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4650 gsip = &gsi;
4651 }
4652 len = adjustments.length ();
4653 for (i = 0; i < len; i++)
4654 {
4655 struct ipa_parm_adjustment *adj;
4656 imm_use_iterator ui;
4657 gimple stmt, def_temp;
4658 tree name, vexpr, copy = NULL_TREE;
4659 use_operand_p use_p;
4660
4661 adj = &adjustments[i];
4662 if (adj->copy_param || !is_gimple_reg (adj->base))
4663 continue;
4664 name = ssa_default_def (cfun, adj->base);
4665 vexpr = NULL;
4666 if (name)
4667 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4668 {
4669 /* All other users must have been removed by
4670 ipa_sra_modify_function_body. */
4671 gcc_assert (is_gimple_debug (stmt));
4672 if (vexpr == NULL && gsip != NULL)
4673 {
4674 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4675 vexpr = make_node (DEBUG_EXPR_DECL);
4676 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4677 NULL);
4678 DECL_ARTIFICIAL (vexpr) = 1;
4679 TREE_TYPE (vexpr) = TREE_TYPE (name);
4680 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4681 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4682 }
4683 if (vexpr)
4684 {
4685 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4686 SET_USE (use_p, vexpr);
4687 }
4688 else
4689 gimple_debug_bind_reset_value (stmt);
4690 update_stmt (stmt);
4691 }
4692 /* Create a VAR_DECL for debug info purposes. */
4693 if (!DECL_IGNORED_P (adj->base))
4694 {
4695 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4696 VAR_DECL, DECL_NAME (adj->base),
4697 TREE_TYPE (adj->base));
4698 if (DECL_PT_UID_SET_P (adj->base))
4699 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4700 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4701 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4702 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4703 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4704 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4705 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4706 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4707 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4708 SET_DECL_RTL (copy, 0);
4709 TREE_USED (copy) = 1;
4710 DECL_CONTEXT (copy) = current_function_decl;
4711 add_local_decl (cfun, copy);
4712 DECL_CHAIN (copy) =
4713 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4714 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4715 }
4716 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4717 {
4718 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4719 if (vexpr)
4720 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4721 else
4722 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4723 NULL);
4724 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4725 }
4726 }
4727 }
4728
4729 /* Return false iff all callers have at least as many actual arguments as there
4730 are formal parameters in the current function. */
4731
4732 static bool
4733 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4734 void *data ATTRIBUTE_UNUSED)
4735 {
4736 struct cgraph_edge *cs;
4737 for (cs = node->callers; cs; cs = cs->next_caller)
4738 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4739 return true;
4740
4741 return false;
4742 }
4743
4744 /* Convert all callers of NODE. */
4745
4746 static bool
4747 convert_callers_for_node (struct cgraph_node *node,
4748 void *data)
4749 {
4750 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4751 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4752 struct cgraph_edge *cs;
4753
4754 for (cs = node->callers; cs; cs = cs->next_caller)
4755 {
4756 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl));
4757
4758 if (dump_file)
4759 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4760 cs->caller->uid, cs->callee->uid,
4761 xstrdup (cgraph_node_name (cs->caller)),
4762 xstrdup (cgraph_node_name (cs->callee)));
4763
4764 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4765
4766 pop_cfun ();
4767 }
4768
4769 for (cs = node->callers; cs; cs = cs->next_caller)
4770 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4771 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl)))
4772 compute_inline_parameters (cs->caller, true);
4773 BITMAP_FREE (recomputed_callers);
4774
4775 return true;
4776 }
4777
4778 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4779
4780 static void
4781 convert_callers (struct cgraph_node *node, tree old_decl,
4782 ipa_parm_adjustment_vec adjustments)
4783 {
4784 basic_block this_block;
4785
4786 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4787 &adjustments, false);
4788
4789 if (!encountered_recursive_call)
4790 return;
4791
4792 FOR_EACH_BB (this_block)
4793 {
4794 gimple_stmt_iterator gsi;
4795
4796 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4797 {
4798 gimple stmt = gsi_stmt (gsi);
4799 tree call_fndecl;
4800 if (gimple_code (stmt) != GIMPLE_CALL)
4801 continue;
4802 call_fndecl = gimple_call_fndecl (stmt);
4803 if (call_fndecl == old_decl)
4804 {
4805 if (dump_file)
4806 fprintf (dump_file, "Adjusting recursive call");
4807 gimple_call_set_fndecl (stmt, node->symbol.decl);
4808 ipa_modify_call_arguments (NULL, stmt, adjustments);
4809 }
4810 }
4811 }
4812
4813 return;
4814 }
4815
4816 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4817 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4818
4819 static bool
4820 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4821 {
4822 struct cgraph_node *new_node;
4823 bool cfg_changed;
4824 vec<cgraph_edge_p> redirect_callers = collect_callers_of_node (node);
4825
4826 rebuild_cgraph_edges ();
4827 free_dominance_info (CDI_DOMINATORS);
4828 pop_cfun ();
4829
4830 new_node = cgraph_function_versioning (node, redirect_callers,
4831 NULL,
4832 NULL, false, NULL, NULL, "isra");
4833 redirect_callers.release ();
4834
4835 push_cfun (DECL_STRUCT_FUNCTION (new_node->symbol.decl));
4836 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4837 cfg_changed = ipa_sra_modify_function_body (adjustments);
4838 sra_ipa_reset_debug_stmts (adjustments);
4839 convert_callers (new_node, node->symbol.decl, adjustments);
4840 cgraph_make_node_local (new_node);
4841 return cfg_changed;
4842 }
4843
4844 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4845 attributes, return true otherwise. NODE is the cgraph node of the current
4846 function. */
4847
4848 static bool
4849 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4850 {
4851 if (!cgraph_node_can_be_local_p (node))
4852 {
4853 if (dump_file)
4854 fprintf (dump_file, "Function not local to this compilation unit.\n");
4855 return false;
4856 }
4857
4858 if (!node->local.can_change_signature)
4859 {
4860 if (dump_file)
4861 fprintf (dump_file, "Function can not change signature.\n");
4862 return false;
4863 }
4864
4865 if (!tree_versionable_function_p (node->symbol.decl))
4866 {
4867 if (dump_file)
4868 fprintf (dump_file, "Function is not versionable.\n");
4869 return false;
4870 }
4871
4872 if (DECL_VIRTUAL_P (current_function_decl))
4873 {
4874 if (dump_file)
4875 fprintf (dump_file, "Function is a virtual method.\n");
4876 return false;
4877 }
4878
4879 if ((DECL_COMDAT (node->symbol.decl) || DECL_EXTERNAL (node->symbol.decl))
4880 && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4881 {
4882 if (dump_file)
4883 fprintf (dump_file, "Function too big to be made truly local.\n");
4884 return false;
4885 }
4886
4887 if (!node->callers)
4888 {
4889 if (dump_file)
4890 fprintf (dump_file,
4891 "Function has no callers in this compilation unit.\n");
4892 return false;
4893 }
4894
4895 if (cfun->stdarg)
4896 {
4897 if (dump_file)
4898 fprintf (dump_file, "Function uses stdarg. \n");
4899 return false;
4900 }
4901
4902 if (TYPE_ATTRIBUTES (TREE_TYPE (node->symbol.decl)))
4903 return false;
4904
4905 return true;
4906 }
4907
4908 /* Perform early interprocedural SRA. */
4909
4910 static unsigned int
4911 ipa_early_sra (void)
4912 {
4913 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4914 ipa_parm_adjustment_vec adjustments;
4915 int ret = 0;
4916
4917 if (!ipa_sra_preliminary_function_checks (node))
4918 return 0;
4919
4920 sra_initialize ();
4921 sra_mode = SRA_MODE_EARLY_IPA;
4922
4923 if (!find_param_candidates ())
4924 {
4925 if (dump_file)
4926 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4927 goto simple_out;
4928 }
4929
4930 if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4931 NULL, true))
4932 {
4933 if (dump_file)
4934 fprintf (dump_file, "There are callers with insufficient number of "
4935 "arguments.\n");
4936 goto simple_out;
4937 }
4938
4939 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4940 func_param_count
4941 * last_basic_block_for_function (cfun));
4942 final_bbs = BITMAP_ALLOC (NULL);
4943
4944 scan_function ();
4945 if (encountered_apply_args)
4946 {
4947 if (dump_file)
4948 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4949 goto out;
4950 }
4951
4952 if (encountered_unchangable_recursive_call)
4953 {
4954 if (dump_file)
4955 fprintf (dump_file, "Function calls itself with insufficient "
4956 "number of arguments.\n");
4957 goto out;
4958 }
4959
4960 adjustments = analyze_all_param_acesses ();
4961 if (!adjustments.exists ())
4962 goto out;
4963 if (dump_file)
4964 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4965
4966 if (modify_function (node, adjustments))
4967 ret = TODO_update_ssa | TODO_cleanup_cfg;
4968 else
4969 ret = TODO_update_ssa;
4970 adjustments.release ();
4971
4972 statistics_counter_event (cfun, "Unused parameters deleted",
4973 sra_stats.deleted_unused_parameters);
4974 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4975 sra_stats.scalar_by_ref_to_by_val);
4976 statistics_counter_event (cfun, "Aggregate parameters broken up",
4977 sra_stats.aggregate_params_reduced);
4978 statistics_counter_event (cfun, "Aggregate parameter components created",
4979 sra_stats.param_reductions_created);
4980
4981 out:
4982 BITMAP_FREE (final_bbs);
4983 free (bb_dereferences);
4984 simple_out:
4985 sra_deinitialize ();
4986 return ret;
4987 }
4988
4989 /* Return if early ipa sra shall be performed. */
4990 static bool
4991 ipa_early_sra_gate (void)
4992 {
4993 return flag_ipa_sra && dbg_cnt (eipa_sra);
4994 }
4995
4996 struct gimple_opt_pass pass_early_ipa_sra =
4997 {
4998 {
4999 GIMPLE_PASS,
5000 "eipa_sra", /* name */
5001 OPTGROUP_NONE, /* optinfo_flags */
5002 ipa_early_sra_gate, /* gate */
5003 ipa_early_sra, /* execute */
5004 NULL, /* sub */
5005 NULL, /* next */
5006 0, /* static_pass_number */
5007 TV_IPA_SRA, /* tv_id */
5008 0, /* properties_required */
5009 0, /* properties_provided */
5010 0, /* properties_destroyed */
5011 0, /* todo_flags_start */
5012 TODO_dump_symtab /* todo_flags_finish */
5013 }
5014 };