re PR tree-optimization/46349 (incorrect scalarization)
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "toplev.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "cgraph.h"
83 #include "tree-flow.h"
84 #include "ipa-prop.h"
85 #include "tree-pretty-print.h"
86 #include "statistics.h"
87 #include "tree-dump.h"
88 #include "timevar.h"
89 #include "params.h"
90 #include "target.h"
91 #include "flags.h"
92 #include "dbgcnt.h"
93 #include "tree-inline.h"
94 #include "gimple-pretty-print.h"
95
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
98 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 SRA_MODE_INTRA }; /* late intraprocedural SRA */
100
101 /* Global variable describing which aggregate reduction we are performing at
102 the moment. */
103 static enum sra_mode sra_mode;
104
105 struct assign_link;
106
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
112
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
118
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
122
123 struct access
124 {
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset;
129 HOST_WIDE_INT size;
130 tree base;
131
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
134 testcase. */
135 tree expr;
136 /* Type. */
137 tree type;
138
139 /* The statement this access belongs to. */
140 gimple stmt;
141
142 /* Next group representative for this aggregate. */
143 struct access *next_grp;
144
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access *group_representative;
148
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access *first_child;
152
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access *next_sibling;
157
158 /* Pointers to the first and last element in the linked list of assign
159 links. */
160 struct assign_link *first_link, *last_link;
161
162 /* Pointer to the next access in the work queue. */
163 struct access *next_queued;
164
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl;
169
170 /* Is this particular access write access? */
171 unsigned write : 1;
172
173 /* Is this access an artificial one created to scalarize some record
174 entirely? */
175 unsigned total_scalarization : 1;
176
177 /* Is this access currently in the work queue? */
178 unsigned grp_queued : 1;
179
180 /* Does this group contain a write access? This flag is propagated down the
181 access tree. */
182 unsigned grp_write : 1;
183
184 /* Does this group contain a read access? This flag is propagated down the
185 access tree. */
186 unsigned grp_read : 1;
187
188 /* Does this group contain a read access that comes from an assignment
189 statement? This flag is propagated down the access tree. */
190 unsigned grp_assignment_read : 1;
191
192 /* Does this group contain a write access that comes from an assignment
193 statement? This flag is propagated down the access tree. */
194 unsigned grp_assignment_write : 1;
195
196 /* Other passes of the analysis use this bit to make function
197 analyze_access_subtree create scalar replacements for this group if
198 possible. */
199 unsigned grp_hint : 1;
200
201 /* Is the subtree rooted in this access fully covered by scalar
202 replacements? */
203 unsigned grp_covered : 1;
204
205 /* If set to true, this access and all below it in an access tree must not be
206 scalarized. */
207 unsigned grp_unscalarizable_region : 1;
208
209 /* Whether data have been written to parts of the aggregate covered by this
210 access which is not to be scalarized. This flag is propagated up in the
211 access tree. */
212 unsigned grp_unscalarized_data : 1;
213
214 /* Does this access and/or group contain a write access through a
215 BIT_FIELD_REF? */
216 unsigned grp_partial_lhs : 1;
217
218 /* Set when a scalar replacement should be created for this variable. We do
219 the decision and creation at different places because create_tmp_var
220 cannot be called from within FOR_EACH_REFERENCED_VAR. */
221 unsigned grp_to_be_replaced : 1;
222
223 /* Should TREE_NO_WARNING of a replacement be set? */
224 unsigned grp_no_warning : 1;
225
226 /* Is it possible that the group refers to data which might be (directly or
227 otherwise) modified? */
228 unsigned grp_maybe_modified : 1;
229
230 /* Set when this is a representative of a pointer to scalar (i.e. by
231 reference) parameter which we consider for turning into a plain scalar
232 (i.e. a by value parameter). */
233 unsigned grp_scalar_ptr : 1;
234
235 /* Set when we discover that this pointer is not safe to dereference in the
236 caller. */
237 unsigned grp_not_necessarilly_dereferenced : 1;
238 };
239
240 typedef struct access *access_p;
241
242 DEF_VEC_P (access_p);
243 DEF_VEC_ALLOC_P (access_p, heap);
244
245 /* Alloc pool for allocating access structures. */
246 static alloc_pool access_pool;
247
248 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
249 are used to propagate subaccesses from rhs to lhs as long as they don't
250 conflict with what is already there. */
251 struct assign_link
252 {
253 struct access *lacc, *racc;
254 struct assign_link *next;
255 };
256
257 /* Alloc pool for allocating assign link structures. */
258 static alloc_pool link_pool;
259
260 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
261 static struct pointer_map_t *base_access_vec;
262
263 /* Bitmap of candidates. */
264 static bitmap candidate_bitmap;
265
266 /* Bitmap of candidates which we should try to entirely scalarize away and
267 those which cannot be (because they are and need be used as a whole). */
268 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
269
270 /* Obstack for creation of fancy names. */
271 static struct obstack name_obstack;
272
273 /* Head of a linked list of accesses that need to have its subaccesses
274 propagated to their assignment counterparts. */
275 static struct access *work_queue_head;
276
277 /* Number of parameters of the analyzed function when doing early ipa SRA. */
278 static int func_param_count;
279
280 /* scan_function sets the following to true if it encounters a call to
281 __builtin_apply_args. */
282 static bool encountered_apply_args;
283
284 /* Set by scan_function when it finds a recursive call. */
285 static bool encountered_recursive_call;
286
287 /* Set by scan_function when it finds a recursive call with less actual
288 arguments than formal parameters.. */
289 static bool encountered_unchangable_recursive_call;
290
291 /* This is a table in which for each basic block and parameter there is a
292 distance (offset + size) in that parameter which is dereferenced and
293 accessed in that BB. */
294 static HOST_WIDE_INT *bb_dereferences;
295 /* Bitmap of BBs that can cause the function to "stop" progressing by
296 returning, throwing externally, looping infinitely or calling a function
297 which might abort etc.. */
298 static bitmap final_bbs;
299
300 /* Representative of no accesses at all. */
301 static struct access no_accesses_representant;
302
303 /* Predicate to test the special value. */
304
305 static inline bool
306 no_accesses_p (struct access *access)
307 {
308 return access == &no_accesses_representant;
309 }
310
311 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
312 representative fields are dumped, otherwise those which only describe the
313 individual access are. */
314
315 static struct
316 {
317 /* Number of processed aggregates is readily available in
318 analyze_all_variable_accesses and so is not stored here. */
319
320 /* Number of created scalar replacements. */
321 int replacements;
322
323 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
324 expression. */
325 int exprs;
326
327 /* Number of statements created by generate_subtree_copies. */
328 int subtree_copies;
329
330 /* Number of statements created by load_assign_lhs_subreplacements. */
331 int subreplacements;
332
333 /* Number of times sra_modify_assign has deleted a statement. */
334 int deleted;
335
336 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
337 RHS reparately due to type conversions or nonexistent matching
338 references. */
339 int separate_lhs_rhs_handling;
340
341 /* Number of parameters that were removed because they were unused. */
342 int deleted_unused_parameters;
343
344 /* Number of scalars passed as parameters by reference that have been
345 converted to be passed by value. */
346 int scalar_by_ref_to_by_val;
347
348 /* Number of aggregate parameters that were replaced by one or more of their
349 components. */
350 int aggregate_params_reduced;
351
352 /* Numbber of components created when splitting aggregate parameters. */
353 int param_reductions_created;
354 } sra_stats;
355
356 static void
357 dump_access (FILE *f, struct access *access, bool grp)
358 {
359 fprintf (f, "access { ");
360 fprintf (f, "base = (%d)'", DECL_UID (access->base));
361 print_generic_expr (f, access->base, 0);
362 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
363 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
364 fprintf (f, ", expr = ");
365 print_generic_expr (f, access->expr, 0);
366 fprintf (f, ", type = ");
367 print_generic_expr (f, access->type, 0);
368 if (grp)
369 fprintf (f, ", grp_write = %d, total_scalarization = %d, "
370 "grp_read = %d, grp_hint = %d, grp_assignment_read = %d,"
371 "grp_assignment_write = %d, grp_covered = %d, "
372 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
373 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
374 "grp_maybe_modified = %d, "
375 "grp_not_necessarilly_dereferenced = %d\n",
376 access->grp_write, access->total_scalarization,
377 access->grp_read, access->grp_hint, access->grp_assignment_read,
378 access->grp_assignment_write, access->grp_covered,
379 access->grp_unscalarizable_region, access->grp_unscalarized_data,
380 access->grp_partial_lhs, access->grp_to_be_replaced,
381 access->grp_maybe_modified,
382 access->grp_not_necessarilly_dereferenced);
383 else
384 fprintf (f, ", write = %d, total_scalarization = %d, "
385 "grp_partial_lhs = %d\n",
386 access->write, access->total_scalarization,
387 access->grp_partial_lhs);
388 }
389
390 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
391
392 static void
393 dump_access_tree_1 (FILE *f, struct access *access, int level)
394 {
395 do
396 {
397 int i;
398
399 for (i = 0; i < level; i++)
400 fputs ("* ", dump_file);
401
402 dump_access (f, access, true);
403
404 if (access->first_child)
405 dump_access_tree_1 (f, access->first_child, level + 1);
406
407 access = access->next_sibling;
408 }
409 while (access);
410 }
411
412 /* Dump all access trees for a variable, given the pointer to the first root in
413 ACCESS. */
414
415 static void
416 dump_access_tree (FILE *f, struct access *access)
417 {
418 for (; access; access = access->next_grp)
419 dump_access_tree_1 (f, access, 0);
420 }
421
422 /* Return true iff ACC is non-NULL and has subaccesses. */
423
424 static inline bool
425 access_has_children_p (struct access *acc)
426 {
427 return acc && acc->first_child;
428 }
429
430 /* Return a vector of pointers to accesses for the variable given in BASE or
431 NULL if there is none. */
432
433 static VEC (access_p, heap) *
434 get_base_access_vector (tree base)
435 {
436 void **slot;
437
438 slot = pointer_map_contains (base_access_vec, base);
439 if (!slot)
440 return NULL;
441 else
442 return *(VEC (access_p, heap) **) slot;
443 }
444
445 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
446 in ACCESS. Return NULL if it cannot be found. */
447
448 static struct access *
449 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
450 HOST_WIDE_INT size)
451 {
452 while (access && (access->offset != offset || access->size != size))
453 {
454 struct access *child = access->first_child;
455
456 while (child && (child->offset + child->size <= offset))
457 child = child->next_sibling;
458 access = child;
459 }
460
461 return access;
462 }
463
464 /* Return the first group representative for DECL or NULL if none exists. */
465
466 static struct access *
467 get_first_repr_for_decl (tree base)
468 {
469 VEC (access_p, heap) *access_vec;
470
471 access_vec = get_base_access_vector (base);
472 if (!access_vec)
473 return NULL;
474
475 return VEC_index (access_p, access_vec, 0);
476 }
477
478 /* Find an access representative for the variable BASE and given OFFSET and
479 SIZE. Requires that access trees have already been built. Return NULL if
480 it cannot be found. */
481
482 static struct access *
483 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
484 HOST_WIDE_INT size)
485 {
486 struct access *access;
487
488 access = get_first_repr_for_decl (base);
489 while (access && (access->offset + access->size <= offset))
490 access = access->next_grp;
491 if (!access)
492 return NULL;
493
494 return find_access_in_subtree (access, offset, size);
495 }
496
497 /* Add LINK to the linked list of assign links of RACC. */
498 static void
499 add_link_to_rhs (struct access *racc, struct assign_link *link)
500 {
501 gcc_assert (link->racc == racc);
502
503 if (!racc->first_link)
504 {
505 gcc_assert (!racc->last_link);
506 racc->first_link = link;
507 }
508 else
509 racc->last_link->next = link;
510
511 racc->last_link = link;
512 link->next = NULL;
513 }
514
515 /* Move all link structures in their linked list in OLD_RACC to the linked list
516 in NEW_RACC. */
517 static void
518 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
519 {
520 if (!old_racc->first_link)
521 {
522 gcc_assert (!old_racc->last_link);
523 return;
524 }
525
526 if (new_racc->first_link)
527 {
528 gcc_assert (!new_racc->last_link->next);
529 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
530
531 new_racc->last_link->next = old_racc->first_link;
532 new_racc->last_link = old_racc->last_link;
533 }
534 else
535 {
536 gcc_assert (!new_racc->last_link);
537
538 new_racc->first_link = old_racc->first_link;
539 new_racc->last_link = old_racc->last_link;
540 }
541 old_racc->first_link = old_racc->last_link = NULL;
542 }
543
544 /* Add ACCESS to the work queue (which is actually a stack). */
545
546 static void
547 add_access_to_work_queue (struct access *access)
548 {
549 if (!access->grp_queued)
550 {
551 gcc_assert (!access->next_queued);
552 access->next_queued = work_queue_head;
553 access->grp_queued = 1;
554 work_queue_head = access;
555 }
556 }
557
558 /* Pop an access from the work queue, and return it, assuming there is one. */
559
560 static struct access *
561 pop_access_from_work_queue (void)
562 {
563 struct access *access = work_queue_head;
564
565 work_queue_head = access->next_queued;
566 access->next_queued = NULL;
567 access->grp_queued = 0;
568 return access;
569 }
570
571
572 /* Allocate necessary structures. */
573
574 static void
575 sra_initialize (void)
576 {
577 candidate_bitmap = BITMAP_ALLOC (NULL);
578 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
579 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
580 gcc_obstack_init (&name_obstack);
581 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
582 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
583 base_access_vec = pointer_map_create ();
584 memset (&sra_stats, 0, sizeof (sra_stats));
585 encountered_apply_args = false;
586 encountered_recursive_call = false;
587 encountered_unchangable_recursive_call = false;
588 }
589
590 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
591
592 static bool
593 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
594 void *data ATTRIBUTE_UNUSED)
595 {
596 VEC (access_p, heap) *access_vec;
597 access_vec = (VEC (access_p, heap) *) *value;
598 VEC_free (access_p, heap, access_vec);
599
600 return true;
601 }
602
603 /* Deallocate all general structures. */
604
605 static void
606 sra_deinitialize (void)
607 {
608 BITMAP_FREE (candidate_bitmap);
609 BITMAP_FREE (should_scalarize_away_bitmap);
610 BITMAP_FREE (cannot_scalarize_away_bitmap);
611 free_alloc_pool (access_pool);
612 free_alloc_pool (link_pool);
613 obstack_free (&name_obstack, NULL);
614
615 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
616 pointer_map_destroy (base_access_vec);
617 }
618
619 /* Remove DECL from candidates for SRA and write REASON to the dump file if
620 there is one. */
621 static void
622 disqualify_candidate (tree decl, const char *reason)
623 {
624 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
625
626 if (dump_file && (dump_flags & TDF_DETAILS))
627 {
628 fprintf (dump_file, "! Disqualifying ");
629 print_generic_expr (dump_file, decl, 0);
630 fprintf (dump_file, " - %s\n", reason);
631 }
632 }
633
634 /* Return true iff the type contains a field or an element which does not allow
635 scalarization. */
636
637 static bool
638 type_internals_preclude_sra_p (tree type)
639 {
640 tree fld;
641 tree et;
642
643 switch (TREE_CODE (type))
644 {
645 case RECORD_TYPE:
646 case UNION_TYPE:
647 case QUAL_UNION_TYPE:
648 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
649 if (TREE_CODE (fld) == FIELD_DECL)
650 {
651 tree ft = TREE_TYPE (fld);
652
653 if (TREE_THIS_VOLATILE (fld)
654 || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
655 || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
656 || !host_integerp (DECL_SIZE (fld), 1)
657 || (DECL_BIT_FIELD (fld) && AGGREGATE_TYPE_P (ft)))
658 return true;
659
660 if (AGGREGATE_TYPE_P (ft)
661 && type_internals_preclude_sra_p (ft))
662 return true;
663 }
664
665 return false;
666
667 case ARRAY_TYPE:
668 et = TREE_TYPE (type);
669
670 if (AGGREGATE_TYPE_P (et))
671 return type_internals_preclude_sra_p (et);
672 else
673 return false;
674
675 default:
676 return false;
677 }
678 }
679
680 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
681 base variable if it is. Return T if it is not an SSA_NAME. */
682
683 static tree
684 get_ssa_base_param (tree t)
685 {
686 if (TREE_CODE (t) == SSA_NAME)
687 {
688 if (SSA_NAME_IS_DEFAULT_DEF (t))
689 return SSA_NAME_VAR (t);
690 else
691 return NULL_TREE;
692 }
693 return t;
694 }
695
696 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
697 belongs to, unless the BB has already been marked as a potentially
698 final. */
699
700 static void
701 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
702 {
703 basic_block bb = gimple_bb (stmt);
704 int idx, parm_index = 0;
705 tree parm;
706
707 if (bitmap_bit_p (final_bbs, bb->index))
708 return;
709
710 for (parm = DECL_ARGUMENTS (current_function_decl);
711 parm && parm != base;
712 parm = DECL_CHAIN (parm))
713 parm_index++;
714
715 gcc_assert (parm_index < func_param_count);
716
717 idx = bb->index * func_param_count + parm_index;
718 if (bb_dereferences[idx] < dist)
719 bb_dereferences[idx] = dist;
720 }
721
722 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
723 the three fields. Also add it to the vector of accesses corresponding to
724 the base. Finally, return the new access. */
725
726 static struct access *
727 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
728 {
729 VEC (access_p, heap) *vec;
730 struct access *access;
731 void **slot;
732
733 access = (struct access *) pool_alloc (access_pool);
734 memset (access, 0, sizeof (struct access));
735 access->base = base;
736 access->offset = offset;
737 access->size = size;
738
739 slot = pointer_map_contains (base_access_vec, base);
740 if (slot)
741 vec = (VEC (access_p, heap) *) *slot;
742 else
743 vec = VEC_alloc (access_p, heap, 32);
744
745 VEC_safe_push (access_p, heap, vec, access);
746
747 *((struct VEC (access_p,heap) **)
748 pointer_map_insert (base_access_vec, base)) = vec;
749
750 return access;
751 }
752
753 /* Create and insert access for EXPR. Return created access, or NULL if it is
754 not possible. */
755
756 static struct access *
757 create_access (tree expr, gimple stmt, bool write)
758 {
759 struct access *access;
760 HOST_WIDE_INT offset, size, max_size;
761 tree base = expr;
762 bool ptr, unscalarizable_region = false;
763
764 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
765
766 if (sra_mode == SRA_MODE_EARLY_IPA
767 && TREE_CODE (base) == MEM_REF)
768 {
769 base = get_ssa_base_param (TREE_OPERAND (base, 0));
770 if (!base)
771 return NULL;
772 ptr = true;
773 }
774 else
775 ptr = false;
776
777 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
778 return NULL;
779
780 if (sra_mode == SRA_MODE_EARLY_IPA)
781 {
782 if (size < 0 || size != max_size)
783 {
784 disqualify_candidate (base, "Encountered a variable sized access.");
785 return NULL;
786 }
787 if (TREE_CODE (expr) == COMPONENT_REF
788 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
789 {
790 disqualify_candidate (base, "Encountered a bit-field access.");
791 return NULL;
792 }
793 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
794
795 if (ptr)
796 mark_parm_dereference (base, offset + size, stmt);
797 }
798 else
799 {
800 if (size != max_size)
801 {
802 size = max_size;
803 unscalarizable_region = true;
804 }
805 if (size < 0)
806 {
807 disqualify_candidate (base, "Encountered an unconstrained access.");
808 return NULL;
809 }
810 }
811
812 access = create_access_1 (base, offset, size);
813 access->expr = expr;
814 access->type = TREE_TYPE (expr);
815 access->write = write;
816 access->grp_unscalarizable_region = unscalarizable_region;
817 access->stmt = stmt;
818
819 return access;
820 }
821
822
823 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
824 register types or (recursively) records with only these two kinds of fields.
825 It also returns false if any of these records contains a bit-field. */
826
827 static bool
828 type_consists_of_records_p (tree type)
829 {
830 tree fld;
831
832 if (TREE_CODE (type) != RECORD_TYPE)
833 return false;
834
835 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
836 if (TREE_CODE (fld) == FIELD_DECL)
837 {
838 tree ft = TREE_TYPE (fld);
839
840 if (DECL_BIT_FIELD (fld))
841 return false;
842
843 if (!is_gimple_reg_type (ft)
844 && !type_consists_of_records_p (ft))
845 return false;
846 }
847
848 return true;
849 }
850
851 /* Create total_scalarization accesses for all scalar type fields in DECL that
852 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
853 must be the top-most VAR_DECL representing the variable, OFFSET must be the
854 offset of DECL within BASE. REF must be the memory reference expression for
855 the given decl. */
856
857 static void
858 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
859 tree ref)
860 {
861 tree fld, decl_type = TREE_TYPE (decl);
862
863 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
864 if (TREE_CODE (fld) == FIELD_DECL)
865 {
866 HOST_WIDE_INT pos = offset + int_bit_position (fld);
867 tree ft = TREE_TYPE (fld);
868 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
869 NULL_TREE);
870
871 if (is_gimple_reg_type (ft))
872 {
873 struct access *access;
874 HOST_WIDE_INT size;
875
876 size = tree_low_cst (DECL_SIZE (fld), 1);
877 access = create_access_1 (base, pos, size);
878 access->expr = nref;
879 access->type = ft;
880 access->total_scalarization = 1;
881 /* Accesses for intraprocedural SRA can have their stmt NULL. */
882 }
883 else
884 completely_scalarize_record (base, fld, pos, nref);
885 }
886 }
887
888
889 /* Search the given tree for a declaration by skipping handled components and
890 exclude it from the candidates. */
891
892 static void
893 disqualify_base_of_expr (tree t, const char *reason)
894 {
895 t = get_base_address (t);
896 if (sra_mode == SRA_MODE_EARLY_IPA
897 && TREE_CODE (t) == MEM_REF)
898 t = get_ssa_base_param (TREE_OPERAND (t, 0));
899
900 if (t && DECL_P (t))
901 disqualify_candidate (t, reason);
902 }
903
904 /* Scan expression EXPR and create access structures for all accesses to
905 candidates for scalarization. Return the created access or NULL if none is
906 created. */
907
908 static struct access *
909 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
910 {
911 struct access *ret = NULL;
912 bool partial_ref;
913
914 if (TREE_CODE (expr) == BIT_FIELD_REF
915 || TREE_CODE (expr) == IMAGPART_EXPR
916 || TREE_CODE (expr) == REALPART_EXPR)
917 {
918 expr = TREE_OPERAND (expr, 0);
919 partial_ref = true;
920 }
921 else
922 partial_ref = false;
923
924 /* We need to dive through V_C_Es in order to get the size of its parameter
925 and not the result type. Ada produces such statements. We are also
926 capable of handling the topmost V_C_E but not any of those buried in other
927 handled components. */
928 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
929 expr = TREE_OPERAND (expr, 0);
930
931 if (contains_view_convert_expr_p (expr))
932 {
933 disqualify_base_of_expr (expr, "V_C_E under a different handled "
934 "component.");
935 return NULL;
936 }
937
938 switch (TREE_CODE (expr))
939 {
940 case MEM_REF:
941 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
942 && sra_mode != SRA_MODE_EARLY_IPA)
943 return NULL;
944 /* fall through */
945 case VAR_DECL:
946 case PARM_DECL:
947 case RESULT_DECL:
948 case COMPONENT_REF:
949 case ARRAY_REF:
950 case ARRAY_RANGE_REF:
951 ret = create_access (expr, stmt, write);
952 break;
953
954 default:
955 break;
956 }
957
958 if (write && partial_ref && ret)
959 ret->grp_partial_lhs = 1;
960
961 return ret;
962 }
963
964 /* Scan expression EXPR and create access structures for all accesses to
965 candidates for scalarization. Return true if any access has been inserted.
966 STMT must be the statement from which the expression is taken, WRITE must be
967 true if the expression is a store and false otherwise. */
968
969 static bool
970 build_access_from_expr (tree expr, gimple stmt, bool write)
971 {
972 struct access *access;
973
974 access = build_access_from_expr_1 (expr, stmt, write);
975 if (access)
976 {
977 /* This means the aggregate is accesses as a whole in a way other than an
978 assign statement and thus cannot be removed even if we had a scalar
979 replacement for everything. */
980 if (cannot_scalarize_away_bitmap)
981 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
982 return true;
983 }
984 return false;
985 }
986
987 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
988 modes in which it matters, return true iff they have been disqualified. RHS
989 may be NULL, in that case ignore it. If we scalarize an aggregate in
990 intra-SRA we may need to add statements after each statement. This is not
991 possible if a statement unconditionally has to end the basic block. */
992 static bool
993 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
994 {
995 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
996 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
997 {
998 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
999 if (rhs)
1000 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1001 return true;
1002 }
1003 return false;
1004 }
1005
1006 /* Scan expressions occuring in STMT, create access structures for all accesses
1007 to candidates for scalarization and remove those candidates which occur in
1008 statements or expressions that prevent them from being split apart. Return
1009 true if any access has been inserted. */
1010
1011 static bool
1012 build_accesses_from_assign (gimple stmt)
1013 {
1014 tree lhs, rhs;
1015 struct access *lacc, *racc;
1016
1017 if (!gimple_assign_single_p (stmt))
1018 return false;
1019
1020 lhs = gimple_assign_lhs (stmt);
1021 rhs = gimple_assign_rhs1 (stmt);
1022
1023 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1024 return false;
1025
1026 racc = build_access_from_expr_1 (rhs, stmt, false);
1027 lacc = build_access_from_expr_1 (lhs, stmt, true);
1028
1029 if (lacc)
1030 lacc->grp_assignment_write = 1;
1031
1032 if (racc)
1033 {
1034 racc->grp_assignment_read = 1;
1035 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1036 && !is_gimple_reg_type (racc->type))
1037 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1038 }
1039
1040 if (lacc && racc
1041 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1042 && !lacc->grp_unscalarizable_region
1043 && !racc->grp_unscalarizable_region
1044 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1045 /* FIXME: Turn the following line into an assert after PR 40058 is
1046 fixed. */
1047 && lacc->size == racc->size
1048 && useless_type_conversion_p (lacc->type, racc->type))
1049 {
1050 struct assign_link *link;
1051
1052 link = (struct assign_link *) pool_alloc (link_pool);
1053 memset (link, 0, sizeof (struct assign_link));
1054
1055 link->lacc = lacc;
1056 link->racc = racc;
1057
1058 add_link_to_rhs (racc, link);
1059 }
1060
1061 return lacc || racc;
1062 }
1063
1064 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1065 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1066
1067 static bool
1068 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1069 void *data ATTRIBUTE_UNUSED)
1070 {
1071 op = get_base_address (op);
1072 if (op
1073 && DECL_P (op))
1074 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1075
1076 return false;
1077 }
1078
1079 /* Return true iff callsite CALL has at least as many actual arguments as there
1080 are formal parameters of the function currently processed by IPA-SRA. */
1081
1082 static inline bool
1083 callsite_has_enough_arguments_p (gimple call)
1084 {
1085 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1086 }
1087
1088 /* Scan function and look for interesting expressions and create access
1089 structures for them. Return true iff any access is created. */
1090
1091 static bool
1092 scan_function (void)
1093 {
1094 basic_block bb;
1095 bool ret = false;
1096
1097 FOR_EACH_BB (bb)
1098 {
1099 gimple_stmt_iterator gsi;
1100 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1101 {
1102 gimple stmt = gsi_stmt (gsi);
1103 tree t;
1104 unsigned i;
1105
1106 if (final_bbs && stmt_can_throw_external (stmt))
1107 bitmap_set_bit (final_bbs, bb->index);
1108 switch (gimple_code (stmt))
1109 {
1110 case GIMPLE_RETURN:
1111 t = gimple_return_retval (stmt);
1112 if (t != NULL_TREE)
1113 ret |= build_access_from_expr (t, stmt, false);
1114 if (final_bbs)
1115 bitmap_set_bit (final_bbs, bb->index);
1116 break;
1117
1118 case GIMPLE_ASSIGN:
1119 ret |= build_accesses_from_assign (stmt);
1120 break;
1121
1122 case GIMPLE_CALL:
1123 for (i = 0; i < gimple_call_num_args (stmt); i++)
1124 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1125 stmt, false);
1126
1127 if (sra_mode == SRA_MODE_EARLY_IPA)
1128 {
1129 tree dest = gimple_call_fndecl (stmt);
1130 int flags = gimple_call_flags (stmt);
1131
1132 if (dest)
1133 {
1134 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1135 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1136 encountered_apply_args = true;
1137 if (cgraph_get_node (dest)
1138 == cgraph_get_node (current_function_decl))
1139 {
1140 encountered_recursive_call = true;
1141 if (!callsite_has_enough_arguments_p (stmt))
1142 encountered_unchangable_recursive_call = true;
1143 }
1144 }
1145
1146 if (final_bbs
1147 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1148 bitmap_set_bit (final_bbs, bb->index);
1149 }
1150
1151 t = gimple_call_lhs (stmt);
1152 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1153 ret |= build_access_from_expr (t, stmt, true);
1154 break;
1155
1156 case GIMPLE_ASM:
1157 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1158 asm_visit_addr);
1159 if (final_bbs)
1160 bitmap_set_bit (final_bbs, bb->index);
1161
1162 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1163 {
1164 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1165 ret |= build_access_from_expr (t, stmt, false);
1166 }
1167 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1168 {
1169 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1170 ret |= build_access_from_expr (t, stmt, true);
1171 }
1172 break;
1173
1174 default:
1175 break;
1176 }
1177 }
1178 }
1179
1180 return ret;
1181 }
1182
1183 /* Helper of QSORT function. There are pointers to accesses in the array. An
1184 access is considered smaller than another if it has smaller offset or if the
1185 offsets are the same but is size is bigger. */
1186
1187 static int
1188 compare_access_positions (const void *a, const void *b)
1189 {
1190 const access_p *fp1 = (const access_p *) a;
1191 const access_p *fp2 = (const access_p *) b;
1192 const access_p f1 = *fp1;
1193 const access_p f2 = *fp2;
1194
1195 if (f1->offset != f2->offset)
1196 return f1->offset < f2->offset ? -1 : 1;
1197
1198 if (f1->size == f2->size)
1199 {
1200 if (f1->type == f2->type)
1201 return 0;
1202 /* Put any non-aggregate type before any aggregate type. */
1203 else if (!is_gimple_reg_type (f1->type)
1204 && is_gimple_reg_type (f2->type))
1205 return 1;
1206 else if (is_gimple_reg_type (f1->type)
1207 && !is_gimple_reg_type (f2->type))
1208 return -1;
1209 /* Put any complex or vector type before any other scalar type. */
1210 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1211 && TREE_CODE (f1->type) != VECTOR_TYPE
1212 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1213 || TREE_CODE (f2->type) == VECTOR_TYPE))
1214 return 1;
1215 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1216 || TREE_CODE (f1->type) == VECTOR_TYPE)
1217 && TREE_CODE (f2->type) != COMPLEX_TYPE
1218 && TREE_CODE (f2->type) != VECTOR_TYPE)
1219 return -1;
1220 /* Put the integral type with the bigger precision first. */
1221 else if (INTEGRAL_TYPE_P (f1->type)
1222 && INTEGRAL_TYPE_P (f2->type))
1223 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1224 /* Put any integral type with non-full precision last. */
1225 else if (INTEGRAL_TYPE_P (f1->type)
1226 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1227 != TYPE_PRECISION (f1->type)))
1228 return 1;
1229 else if (INTEGRAL_TYPE_P (f2->type)
1230 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1231 != TYPE_PRECISION (f2->type)))
1232 return -1;
1233 /* Stabilize the sort. */
1234 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1235 }
1236
1237 /* We want the bigger accesses first, thus the opposite operator in the next
1238 line: */
1239 return f1->size > f2->size ? -1 : 1;
1240 }
1241
1242
1243 /* Append a name of the declaration to the name obstack. A helper function for
1244 make_fancy_name. */
1245
1246 static void
1247 make_fancy_decl_name (tree decl)
1248 {
1249 char buffer[32];
1250
1251 tree name = DECL_NAME (decl);
1252 if (name)
1253 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1254 IDENTIFIER_LENGTH (name));
1255 else
1256 {
1257 sprintf (buffer, "D%u", DECL_UID (decl));
1258 obstack_grow (&name_obstack, buffer, strlen (buffer));
1259 }
1260 }
1261
1262 /* Helper for make_fancy_name. */
1263
1264 static void
1265 make_fancy_name_1 (tree expr)
1266 {
1267 char buffer[32];
1268 tree index;
1269
1270 if (DECL_P (expr))
1271 {
1272 make_fancy_decl_name (expr);
1273 return;
1274 }
1275
1276 switch (TREE_CODE (expr))
1277 {
1278 case COMPONENT_REF:
1279 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1280 obstack_1grow (&name_obstack, '$');
1281 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1282 break;
1283
1284 case ARRAY_REF:
1285 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1286 obstack_1grow (&name_obstack, '$');
1287 /* Arrays with only one element may not have a constant as their
1288 index. */
1289 index = TREE_OPERAND (expr, 1);
1290 if (TREE_CODE (index) != INTEGER_CST)
1291 break;
1292 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1293 obstack_grow (&name_obstack, buffer, strlen (buffer));
1294 break;
1295
1296 case ADDR_EXPR:
1297 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1298 break;
1299
1300 case MEM_REF:
1301 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1302 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1303 {
1304 obstack_1grow (&name_obstack, '$');
1305 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1306 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1307 obstack_grow (&name_obstack, buffer, strlen (buffer));
1308 }
1309 break;
1310
1311 case BIT_FIELD_REF:
1312 case REALPART_EXPR:
1313 case IMAGPART_EXPR:
1314 gcc_unreachable (); /* we treat these as scalars. */
1315 break;
1316 default:
1317 break;
1318 }
1319 }
1320
1321 /* Create a human readable name for replacement variable of ACCESS. */
1322
1323 static char *
1324 make_fancy_name (tree expr)
1325 {
1326 make_fancy_name_1 (expr);
1327 obstack_1grow (&name_obstack, '\0');
1328 return XOBFINISH (&name_obstack, char *);
1329 }
1330
1331 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1332 EXP_TYPE at the given OFFSET. If BASE is something for which
1333 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1334 to insert new statements either before or below the current one as specified
1335 by INSERT_AFTER. This function is not capable of handling bitfields. */
1336
1337 tree
1338 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1339 tree exp_type, gimple_stmt_iterator *gsi,
1340 bool insert_after)
1341 {
1342 tree prev_base = base;
1343 tree off;
1344 HOST_WIDE_INT base_offset;
1345
1346 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1347
1348 base = get_addr_base_and_unit_offset (base, &base_offset);
1349
1350 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1351 offset such as array[var_index]. */
1352 if (!base)
1353 {
1354 gimple stmt;
1355 tree tmp, addr;
1356
1357 gcc_checking_assert (gsi);
1358 tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1359 add_referenced_var (tmp);
1360 tmp = make_ssa_name (tmp, NULL);
1361 addr = build_fold_addr_expr (unshare_expr (prev_base));
1362 stmt = gimple_build_assign (tmp, addr);
1363 gimple_set_location (stmt, loc);
1364 SSA_NAME_DEF_STMT (tmp) = stmt;
1365 if (insert_after)
1366 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1367 else
1368 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1369 update_stmt (stmt);
1370
1371 off = build_int_cst (reference_alias_ptr_type (prev_base),
1372 offset / BITS_PER_UNIT);
1373 base = tmp;
1374 }
1375 else if (TREE_CODE (base) == MEM_REF)
1376 {
1377 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1378 base_offset + offset / BITS_PER_UNIT);
1379 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off, 0);
1380 base = unshare_expr (TREE_OPERAND (base, 0));
1381 }
1382 else
1383 {
1384 off = build_int_cst (reference_alias_ptr_type (base),
1385 base_offset + offset / BITS_PER_UNIT);
1386 base = build_fold_addr_expr (unshare_expr (base));
1387 }
1388
1389 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1390 }
1391
1392 /* Construct a memory reference to a part of an aggregate BASE at the given
1393 OFFSET and of the same type as MODEL. In case this is a reference to a
1394 component, the function will replicate the last COMPONENT_REF of model's
1395 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1396 build_ref_for_offset. */
1397
1398 static tree
1399 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1400 struct access *model, gimple_stmt_iterator *gsi,
1401 bool insert_after)
1402 {
1403 if (TREE_CODE (model->expr) == COMPONENT_REF)
1404 {
1405 tree t, exp_type;
1406 offset -= int_bit_position (TREE_OPERAND (model->expr, 1));
1407 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1408 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1409 return fold_build3_loc (loc, COMPONENT_REF, model->type, t,
1410 TREE_OPERAND (model->expr, 1), NULL_TREE);
1411 }
1412 else
1413 return build_ref_for_offset (loc, base, offset, model->type,
1414 gsi, insert_after);
1415 }
1416
1417 /* Construct a memory reference consisting of component_refs and array_refs to
1418 a part of an aggregate *RES (which is of type TYPE). The requested part
1419 should have type EXP_TYPE at be the given OFFSET. This function might not
1420 succeed, it returns true when it does and only then *RES points to something
1421 meaningful. This function should be used only to build expressions that we
1422 might need to present to user (e.g. in warnings). In all other situations,
1423 build_ref_for_model or build_ref_for_offset should be used instead. */
1424
1425 static bool
1426 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1427 tree exp_type)
1428 {
1429 while (1)
1430 {
1431 tree fld;
1432 tree tr_size, index, minidx;
1433 HOST_WIDE_INT el_size;
1434
1435 if (offset == 0 && exp_type
1436 && types_compatible_p (exp_type, type))
1437 return true;
1438
1439 switch (TREE_CODE (type))
1440 {
1441 case UNION_TYPE:
1442 case QUAL_UNION_TYPE:
1443 case RECORD_TYPE:
1444 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1445 {
1446 HOST_WIDE_INT pos, size;
1447 tree expr, *expr_ptr;
1448
1449 if (TREE_CODE (fld) != FIELD_DECL)
1450 continue;
1451
1452 pos = int_bit_position (fld);
1453 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1454 tr_size = DECL_SIZE (fld);
1455 if (!tr_size || !host_integerp (tr_size, 1))
1456 continue;
1457 size = tree_low_cst (tr_size, 1);
1458 if (size == 0)
1459 {
1460 if (pos != offset)
1461 continue;
1462 }
1463 else if (pos > offset || (pos + size) <= offset)
1464 continue;
1465
1466 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1467 NULL_TREE);
1468 expr_ptr = &expr;
1469 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1470 offset - pos, exp_type))
1471 {
1472 *res = expr;
1473 return true;
1474 }
1475 }
1476 return false;
1477
1478 case ARRAY_TYPE:
1479 tr_size = TYPE_SIZE (TREE_TYPE (type));
1480 if (!tr_size || !host_integerp (tr_size, 1))
1481 return false;
1482 el_size = tree_low_cst (tr_size, 1);
1483
1484 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1485 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1486 return false;
1487 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1488 if (!integer_zerop (minidx))
1489 index = int_const_binop (PLUS_EXPR, index, minidx, 0);
1490 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1491 NULL_TREE, NULL_TREE);
1492 offset = offset % el_size;
1493 type = TREE_TYPE (type);
1494 break;
1495
1496 default:
1497 if (offset != 0)
1498 return false;
1499
1500 if (exp_type)
1501 return false;
1502 else
1503 return true;
1504 }
1505 }
1506 }
1507
1508 /* Return true iff TYPE is stdarg va_list type. */
1509
1510 static inline bool
1511 is_va_list_type (tree type)
1512 {
1513 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1514 }
1515
1516 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1517 those with type which is suitable for scalarization. */
1518
1519 static bool
1520 find_var_candidates (void)
1521 {
1522 tree var, type;
1523 referenced_var_iterator rvi;
1524 bool ret = false;
1525
1526 FOR_EACH_REFERENCED_VAR (var, rvi)
1527 {
1528 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1529 continue;
1530 type = TREE_TYPE (var);
1531
1532 if (!AGGREGATE_TYPE_P (type)
1533 || needs_to_live_in_memory (var)
1534 || TREE_THIS_VOLATILE (var)
1535 || !COMPLETE_TYPE_P (type)
1536 || !host_integerp (TYPE_SIZE (type), 1)
1537 || tree_low_cst (TYPE_SIZE (type), 1) == 0
1538 || type_internals_preclude_sra_p (type)
1539 /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1540 we also want to schedule it rather late. Thus we ignore it in
1541 the early pass. */
1542 || (sra_mode == SRA_MODE_EARLY_INTRA
1543 && is_va_list_type (type)))
1544 continue;
1545
1546 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1547
1548 if (dump_file && (dump_flags & TDF_DETAILS))
1549 {
1550 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1551 print_generic_expr (dump_file, var, 0);
1552 fprintf (dump_file, "\n");
1553 }
1554 ret = true;
1555 }
1556
1557 return ret;
1558 }
1559
1560 /* Sort all accesses for the given variable, check for partial overlaps and
1561 return NULL if there are any. If there are none, pick a representative for
1562 each combination of offset and size and create a linked list out of them.
1563 Return the pointer to the first representative and make sure it is the first
1564 one in the vector of accesses. */
1565
1566 static struct access *
1567 sort_and_splice_var_accesses (tree var)
1568 {
1569 int i, j, access_count;
1570 struct access *res, **prev_acc_ptr = &res;
1571 VEC (access_p, heap) *access_vec;
1572 bool first = true;
1573 HOST_WIDE_INT low = -1, high = 0;
1574
1575 access_vec = get_base_access_vector (var);
1576 if (!access_vec)
1577 return NULL;
1578 access_count = VEC_length (access_p, access_vec);
1579
1580 /* Sort by <OFFSET, SIZE>. */
1581 VEC_qsort (access_p, access_vec, compare_access_positions);
1582
1583 i = 0;
1584 while (i < access_count)
1585 {
1586 struct access *access = VEC_index (access_p, access_vec, i);
1587 bool grp_write = access->write;
1588 bool grp_read = !access->write;
1589 bool grp_assignment_read = access->grp_assignment_read;
1590 bool grp_assignment_write = access->grp_assignment_write;
1591 bool multiple_reads = false;
1592 bool total_scalarization = access->total_scalarization;
1593 bool grp_partial_lhs = access->grp_partial_lhs;
1594 bool first_scalar = is_gimple_reg_type (access->type);
1595 bool unscalarizable_region = access->grp_unscalarizable_region;
1596
1597 if (first || access->offset >= high)
1598 {
1599 first = false;
1600 low = access->offset;
1601 high = access->offset + access->size;
1602 }
1603 else if (access->offset > low && access->offset + access->size > high)
1604 return NULL;
1605 else
1606 gcc_assert (access->offset >= low
1607 && access->offset + access->size <= high);
1608
1609 j = i + 1;
1610 while (j < access_count)
1611 {
1612 struct access *ac2 = VEC_index (access_p, access_vec, j);
1613 if (ac2->offset != access->offset || ac2->size != access->size)
1614 break;
1615 if (ac2->write)
1616 grp_write = true;
1617 else
1618 {
1619 if (grp_read)
1620 multiple_reads = true;
1621 else
1622 grp_read = true;
1623 }
1624 grp_assignment_read |= ac2->grp_assignment_read;
1625 grp_assignment_write |= ac2->grp_assignment_write;
1626 grp_partial_lhs |= ac2->grp_partial_lhs;
1627 unscalarizable_region |= ac2->grp_unscalarizable_region;
1628 total_scalarization |= ac2->total_scalarization;
1629 relink_to_new_repr (access, ac2);
1630
1631 /* If there are both aggregate-type and scalar-type accesses with
1632 this combination of size and offset, the comparison function
1633 should have put the scalars first. */
1634 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1635 ac2->group_representative = access;
1636 j++;
1637 }
1638
1639 i = j;
1640
1641 access->group_representative = access;
1642 access->grp_write = grp_write;
1643 access->grp_read = grp_read;
1644 access->grp_assignment_read = grp_assignment_read;
1645 access->grp_assignment_write = grp_assignment_write;
1646 access->grp_hint = multiple_reads || total_scalarization;
1647 access->grp_partial_lhs = grp_partial_lhs;
1648 access->grp_unscalarizable_region = unscalarizable_region;
1649 if (access->first_link)
1650 add_access_to_work_queue (access);
1651
1652 *prev_acc_ptr = access;
1653 prev_acc_ptr = &access->next_grp;
1654 }
1655
1656 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1657 return res;
1658 }
1659
1660 /* Create a variable for the given ACCESS which determines the type, name and a
1661 few other properties. Return the variable declaration and store it also to
1662 ACCESS->replacement. */
1663
1664 static tree
1665 create_access_replacement (struct access *access, bool rename)
1666 {
1667 tree repl;
1668
1669 repl = create_tmp_var (access->type, "SR");
1670 get_var_ann (repl);
1671 add_referenced_var (repl);
1672 if (rename)
1673 mark_sym_for_renaming (repl);
1674
1675 if (!access->grp_partial_lhs
1676 && (TREE_CODE (access->type) == COMPLEX_TYPE
1677 || TREE_CODE (access->type) == VECTOR_TYPE))
1678 DECL_GIMPLE_REG_P (repl) = 1;
1679
1680 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1681 DECL_ARTIFICIAL (repl) = 1;
1682 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1683
1684 if (DECL_NAME (access->base)
1685 && !DECL_IGNORED_P (access->base)
1686 && !DECL_ARTIFICIAL (access->base))
1687 {
1688 char *pretty_name = make_fancy_name (access->expr);
1689 tree debug_expr = unshare_expr (access->expr), d;
1690
1691 DECL_NAME (repl) = get_identifier (pretty_name);
1692 obstack_free (&name_obstack, pretty_name);
1693
1694 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1695 as DECL_DEBUG_EXPR isn't considered when looking for still
1696 used SSA_NAMEs and thus they could be freed. All debug info
1697 generation cares is whether something is constant or variable
1698 and that get_ref_base_and_extent works properly on the
1699 expression. */
1700 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1701 switch (TREE_CODE (d))
1702 {
1703 case ARRAY_REF:
1704 case ARRAY_RANGE_REF:
1705 if (TREE_OPERAND (d, 1)
1706 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1707 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1708 if (TREE_OPERAND (d, 3)
1709 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1710 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1711 /* FALLTHRU */
1712 case COMPONENT_REF:
1713 if (TREE_OPERAND (d, 2)
1714 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1715 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1716 break;
1717 default:
1718 break;
1719 }
1720 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1721 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1722 if (access->grp_no_warning)
1723 TREE_NO_WARNING (repl) = 1;
1724 else
1725 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1726 }
1727 else
1728 TREE_NO_WARNING (repl) = 1;
1729
1730 if (dump_file)
1731 {
1732 fprintf (dump_file, "Created a replacement for ");
1733 print_generic_expr (dump_file, access->base, 0);
1734 fprintf (dump_file, " offset: %u, size: %u: ",
1735 (unsigned) access->offset, (unsigned) access->size);
1736 print_generic_expr (dump_file, repl, 0);
1737 fprintf (dump_file, "\n");
1738 }
1739 sra_stats.replacements++;
1740
1741 return repl;
1742 }
1743
1744 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1745
1746 static inline tree
1747 get_access_replacement (struct access *access)
1748 {
1749 gcc_assert (access->grp_to_be_replaced);
1750
1751 if (!access->replacement_decl)
1752 access->replacement_decl = create_access_replacement (access, true);
1753 return access->replacement_decl;
1754 }
1755
1756 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1757 not mark it for renaming. */
1758
1759 static inline tree
1760 get_unrenamed_access_replacement (struct access *access)
1761 {
1762 gcc_assert (!access->grp_to_be_replaced);
1763
1764 if (!access->replacement_decl)
1765 access->replacement_decl = create_access_replacement (access, false);
1766 return access->replacement_decl;
1767 }
1768
1769
1770 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1771 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1772 to it is not "within" the root. Return false iff some accesses partially
1773 overlap. */
1774
1775 static bool
1776 build_access_subtree (struct access **access)
1777 {
1778 struct access *root = *access, *last_child = NULL;
1779 HOST_WIDE_INT limit = root->offset + root->size;
1780
1781 *access = (*access)->next_grp;
1782 while (*access && (*access)->offset + (*access)->size <= limit)
1783 {
1784 if (!last_child)
1785 root->first_child = *access;
1786 else
1787 last_child->next_sibling = *access;
1788 last_child = *access;
1789
1790 if (!build_access_subtree (access))
1791 return false;
1792 }
1793
1794 if (*access && (*access)->offset < limit)
1795 return false;
1796
1797 return true;
1798 }
1799
1800 /* Build a tree of access representatives, ACCESS is the pointer to the first
1801 one, others are linked in a list by the next_grp field. Return false iff
1802 some accesses partially overlap. */
1803
1804 static bool
1805 build_access_trees (struct access *access)
1806 {
1807 while (access)
1808 {
1809 struct access *root = access;
1810
1811 if (!build_access_subtree (&access))
1812 return false;
1813 root->next_grp = access;
1814 }
1815 return true;
1816 }
1817
1818 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1819 array. */
1820
1821 static bool
1822 expr_with_var_bounded_array_refs_p (tree expr)
1823 {
1824 while (handled_component_p (expr))
1825 {
1826 if (TREE_CODE (expr) == ARRAY_REF
1827 && !host_integerp (array_ref_low_bound (expr), 0))
1828 return true;
1829 expr = TREE_OPERAND (expr, 0);
1830 }
1831 return false;
1832 }
1833
1834 enum mark_rw_status { SRA_MRRW_NOTHING, SRA_MRRW_DIRECT, SRA_MRRW_ASSIGN};
1835
1836 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1837 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
1838 sorts of access flags appropriately along the way, notably always set
1839 grp_read and grp_assign_read according to MARK_READ and grp_write when
1840 MARK_WRITE is true.
1841
1842 Creating a replacement for a scalar access is considered beneficial if its
1843 grp_hint is set (this means we are either attempting total scalarization or
1844 there is more than one direct read access) or according to the following
1845 table:
1846
1847 Access written to individually (once or more times)
1848 |
1849 | Parent written to in an assignment statement
1850 | |
1851 | | Access read individually _once_
1852 | | |
1853 | | | Parent read in an assignment statement
1854 | | | |
1855 | | | | Scalarize Comment
1856 -----------------------------------------------------------------------------
1857 0 0 0 0 No access for the scalar
1858 0 0 0 1 No access for the scalar
1859 0 0 1 0 No Single read - won't help
1860 0 0 1 1 No The same case
1861 0 1 0 0 No access for the scalar
1862 0 1 0 1 No access for the scalar
1863 0 1 1 0 Yes s = *g; return s.i;
1864 0 1 1 1 Yes The same case as above
1865 1 0 0 0 No Won't help
1866 1 0 0 1 Yes s.i = 1; *g = s;
1867 1 0 1 0 Yes s.i = 5; g = s.i;
1868 1 0 1 1 Yes The same case as above
1869 1 1 0 0 No Won't help.
1870 1 1 0 1 Yes s.i = 1; *g = s;
1871 1 1 1 0 Yes s = *g; return s.i;
1872 1 1 1 1 Yes Any of the above yeses */
1873
1874 static bool
1875 analyze_access_subtree (struct access *root, bool allow_replacements,
1876 enum mark_rw_status mark_read,
1877 enum mark_rw_status mark_write)
1878 {
1879 struct access *child;
1880 HOST_WIDE_INT limit = root->offset + root->size;
1881 HOST_WIDE_INT covered_to = root->offset;
1882 bool scalar = is_gimple_reg_type (root->type);
1883 bool hole = false, sth_created = false;
1884 bool direct_read = root->grp_read;
1885 bool direct_write = root->grp_write;
1886
1887 if (root->grp_assignment_read)
1888 mark_read = SRA_MRRW_ASSIGN;
1889 else if (mark_read == SRA_MRRW_ASSIGN)
1890 {
1891 root->grp_read = 1;
1892 root->grp_assignment_read = 1;
1893 }
1894 else if (mark_read == SRA_MRRW_DIRECT)
1895 root->grp_read = 1;
1896 else if (root->grp_read)
1897 mark_read = SRA_MRRW_DIRECT;
1898
1899 if (root->grp_assignment_write)
1900 mark_write = SRA_MRRW_ASSIGN;
1901 else if (mark_write == SRA_MRRW_ASSIGN)
1902 {
1903 root->grp_write = 1;
1904 root->grp_assignment_write = 1;
1905 }
1906 else if (mark_write == SRA_MRRW_DIRECT)
1907 root->grp_write = 1;
1908 else if (root->grp_write)
1909 mark_write = SRA_MRRW_DIRECT;
1910
1911 if (root->grp_unscalarizable_region)
1912 allow_replacements = false;
1913
1914 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
1915 allow_replacements = false;
1916
1917 for (child = root->first_child; child; child = child->next_sibling)
1918 {
1919 if (!hole && child->offset < covered_to)
1920 hole = true;
1921 else
1922 covered_to += child->size;
1923
1924 sth_created |= analyze_access_subtree (child,
1925 allow_replacements && !scalar,
1926 mark_read, mark_write);
1927
1928 root->grp_unscalarized_data |= child->grp_unscalarized_data;
1929 hole |= !child->grp_covered;
1930 }
1931
1932 if (allow_replacements && scalar && !root->first_child
1933 && (root->grp_hint
1934 || ((direct_write || root->grp_assignment_write)
1935 && (direct_read || root->grp_assignment_read))))
1936 {
1937 if (dump_file && (dump_flags & TDF_DETAILS))
1938 {
1939 fprintf (dump_file, "Marking ");
1940 print_generic_expr (dump_file, root->base, 0);
1941 fprintf (dump_file, " offset: %u, size: %u: ",
1942 (unsigned) root->offset, (unsigned) root->size);
1943 fprintf (dump_file, " to be replaced.\n");
1944 }
1945
1946 root->grp_to_be_replaced = 1;
1947 sth_created = true;
1948 hole = false;
1949 }
1950 else if (covered_to < limit)
1951 hole = true;
1952
1953 if (sth_created && !hole)
1954 {
1955 root->grp_covered = 1;
1956 return true;
1957 }
1958 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1959 root->grp_unscalarized_data = 1; /* not covered and written to */
1960 if (sth_created)
1961 return true;
1962 return false;
1963 }
1964
1965 /* Analyze all access trees linked by next_grp by the means of
1966 analyze_access_subtree. */
1967 static bool
1968 analyze_access_trees (struct access *access)
1969 {
1970 bool ret = false;
1971
1972 while (access)
1973 {
1974 if (analyze_access_subtree (access, true,
1975 SRA_MRRW_NOTHING, SRA_MRRW_NOTHING))
1976 ret = true;
1977 access = access->next_grp;
1978 }
1979
1980 return ret;
1981 }
1982
1983 /* Return true iff a potential new child of LACC at offset OFFSET and with size
1984 SIZE would conflict with an already existing one. If exactly such a child
1985 already exists in LACC, store a pointer to it in EXACT_MATCH. */
1986
1987 static bool
1988 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
1989 HOST_WIDE_INT size, struct access **exact_match)
1990 {
1991 struct access *child;
1992
1993 for (child = lacc->first_child; child; child = child->next_sibling)
1994 {
1995 if (child->offset == norm_offset && child->size == size)
1996 {
1997 *exact_match = child;
1998 return true;
1999 }
2000
2001 if (child->offset < norm_offset + size
2002 && child->offset + child->size > norm_offset)
2003 return true;
2004 }
2005
2006 return false;
2007 }
2008
2009 /* Create a new child access of PARENT, with all properties just like MODEL
2010 except for its offset and with its grp_write false and grp_read true.
2011 Return the new access or NULL if it cannot be created. Note that this access
2012 is created long after all splicing and sorting, it's not located in any
2013 access vector and is automatically a representative of its group. */
2014
2015 static struct access *
2016 create_artificial_child_access (struct access *parent, struct access *model,
2017 HOST_WIDE_INT new_offset)
2018 {
2019 struct access *access;
2020 struct access **child;
2021 tree expr = parent->base;
2022
2023 gcc_assert (!model->grp_unscalarizable_region);
2024
2025 access = (struct access *) pool_alloc (access_pool);
2026 memset (access, 0, sizeof (struct access));
2027 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2028 model->type))
2029 {
2030 access->grp_no_warning = true;
2031 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2032 new_offset, model, NULL, false);
2033 }
2034
2035 access->base = parent->base;
2036 access->expr = expr;
2037 access->offset = new_offset;
2038 access->size = model->size;
2039 access->type = model->type;
2040 access->grp_write = true;
2041 access->grp_read = false;
2042
2043 child = &parent->first_child;
2044 while (*child && (*child)->offset < new_offset)
2045 child = &(*child)->next_sibling;
2046
2047 access->next_sibling = *child;
2048 *child = access;
2049
2050 return access;
2051 }
2052
2053
2054 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2055 true if any new subaccess was created. Additionally, if RACC is a scalar
2056 access but LACC is not, change the type of the latter, if possible. */
2057
2058 static bool
2059 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2060 {
2061 struct access *rchild;
2062 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2063 bool ret = false;
2064
2065 if (is_gimple_reg_type (lacc->type)
2066 || lacc->grp_unscalarizable_region
2067 || racc->grp_unscalarizable_region)
2068 return false;
2069
2070 if (!lacc->first_child && !racc->first_child
2071 && is_gimple_reg_type (racc->type))
2072 {
2073 tree t = lacc->base;
2074
2075 lacc->type = racc->type;
2076 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t), lacc->offset,
2077 racc->type))
2078 lacc->expr = t;
2079 else
2080 {
2081 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2082 lacc->base, lacc->offset,
2083 racc, NULL, false);
2084 lacc->grp_no_warning = true;
2085 }
2086 return false;
2087 }
2088
2089 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2090 {
2091 struct access *new_acc = NULL;
2092 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2093
2094 if (rchild->grp_unscalarizable_region)
2095 continue;
2096
2097 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2098 &new_acc))
2099 {
2100 if (new_acc)
2101 {
2102 rchild->grp_hint = 1;
2103 new_acc->grp_hint |= new_acc->grp_read;
2104 if (rchild->first_child)
2105 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2106 }
2107 continue;
2108 }
2109
2110 rchild->grp_hint = 1;
2111 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2112 if (new_acc)
2113 {
2114 ret = true;
2115 if (racc->first_child)
2116 propagate_subaccesses_across_link (new_acc, rchild);
2117 }
2118 }
2119
2120 return ret;
2121 }
2122
2123 /* Propagate all subaccesses across assignment links. */
2124
2125 static void
2126 propagate_all_subaccesses (void)
2127 {
2128 while (work_queue_head)
2129 {
2130 struct access *racc = pop_access_from_work_queue ();
2131 struct assign_link *link;
2132
2133 gcc_assert (racc->first_link);
2134
2135 for (link = racc->first_link; link; link = link->next)
2136 {
2137 struct access *lacc = link->lacc;
2138
2139 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2140 continue;
2141 lacc = lacc->group_representative;
2142 if (propagate_subaccesses_across_link (lacc, racc)
2143 && lacc->first_link)
2144 add_access_to_work_queue (lacc);
2145 }
2146 }
2147 }
2148
2149 /* Go through all accesses collected throughout the (intraprocedural) analysis
2150 stage, exclude overlapping ones, identify representatives and build trees
2151 out of them, making decisions about scalarization on the way. Return true
2152 iff there are any to-be-scalarized variables after this stage. */
2153
2154 static bool
2155 analyze_all_variable_accesses (void)
2156 {
2157 int res = 0;
2158 bitmap tmp = BITMAP_ALLOC (NULL);
2159 bitmap_iterator bi;
2160 unsigned i, max_total_scalarization_size;
2161
2162 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2163 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2164
2165 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2166 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2167 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2168 {
2169 tree var = referenced_var (i);
2170
2171 if (TREE_CODE (var) == VAR_DECL
2172 && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2173 <= max_total_scalarization_size)
2174 && type_consists_of_records_p (TREE_TYPE (var)))
2175 {
2176 completely_scalarize_record (var, var, 0, var);
2177 if (dump_file && (dump_flags & TDF_DETAILS))
2178 {
2179 fprintf (dump_file, "Will attempt to totally scalarize ");
2180 print_generic_expr (dump_file, var, 0);
2181 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2182 }
2183 }
2184 }
2185
2186 bitmap_copy (tmp, candidate_bitmap);
2187 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2188 {
2189 tree var = referenced_var (i);
2190 struct access *access;
2191
2192 access = sort_and_splice_var_accesses (var);
2193 if (!access || !build_access_trees (access))
2194 disqualify_candidate (var,
2195 "No or inhibitingly overlapping accesses.");
2196 }
2197
2198 propagate_all_subaccesses ();
2199
2200 bitmap_copy (tmp, candidate_bitmap);
2201 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2202 {
2203 tree var = referenced_var (i);
2204 struct access *access = get_first_repr_for_decl (var);
2205
2206 if (analyze_access_trees (access))
2207 {
2208 res++;
2209 if (dump_file && (dump_flags & TDF_DETAILS))
2210 {
2211 fprintf (dump_file, "\nAccess trees for ");
2212 print_generic_expr (dump_file, var, 0);
2213 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2214 dump_access_tree (dump_file, access);
2215 fprintf (dump_file, "\n");
2216 }
2217 }
2218 else
2219 disqualify_candidate (var, "No scalar replacements to be created.");
2220 }
2221
2222 BITMAP_FREE (tmp);
2223
2224 if (res)
2225 {
2226 statistics_counter_event (cfun, "Scalarized aggregates", res);
2227 return true;
2228 }
2229 else
2230 return false;
2231 }
2232
2233 /* Generate statements copying scalar replacements of accesses within a subtree
2234 into or out of AGG. ACCESS, all its children, siblings and their children
2235 are to be processed. AGG is an aggregate type expression (can be a
2236 declaration but does not have to be, it can for example also be a mem_ref or
2237 a series of handled components). TOP_OFFSET is the offset of the processed
2238 subtree which has to be subtracted from offsets of individual accesses to
2239 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2240 replacements in the interval <start_offset, start_offset + chunk_size>,
2241 otherwise copy all. GSI is a statement iterator used to place the new
2242 statements. WRITE should be true when the statements should write from AGG
2243 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2244 statements will be added after the current statement in GSI, they will be
2245 added before the statement otherwise. */
2246
2247 static void
2248 generate_subtree_copies (struct access *access, tree agg,
2249 HOST_WIDE_INT top_offset,
2250 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2251 gimple_stmt_iterator *gsi, bool write,
2252 bool insert_after, location_t loc)
2253 {
2254 do
2255 {
2256 if (chunk_size && access->offset >= start_offset + chunk_size)
2257 return;
2258
2259 if (access->grp_to_be_replaced
2260 && (chunk_size == 0
2261 || access->offset + access->size > start_offset))
2262 {
2263 tree expr, repl = get_access_replacement (access);
2264 gimple stmt;
2265
2266 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2267 access, gsi, insert_after);
2268
2269 if (write)
2270 {
2271 if (access->grp_partial_lhs)
2272 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2273 !insert_after,
2274 insert_after ? GSI_NEW_STMT
2275 : GSI_SAME_STMT);
2276 stmt = gimple_build_assign (repl, expr);
2277 }
2278 else
2279 {
2280 TREE_NO_WARNING (repl) = 1;
2281 if (access->grp_partial_lhs)
2282 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2283 !insert_after,
2284 insert_after ? GSI_NEW_STMT
2285 : GSI_SAME_STMT);
2286 stmt = gimple_build_assign (expr, repl);
2287 }
2288 gimple_set_location (stmt, loc);
2289
2290 if (insert_after)
2291 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2292 else
2293 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2294 update_stmt (stmt);
2295 sra_stats.subtree_copies++;
2296 }
2297
2298 if (access->first_child)
2299 generate_subtree_copies (access->first_child, agg, top_offset,
2300 start_offset, chunk_size, gsi,
2301 write, insert_after, loc);
2302
2303 access = access->next_sibling;
2304 }
2305 while (access);
2306 }
2307
2308 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2309 the root of the subtree to be processed. GSI is the statement iterator used
2310 for inserting statements which are added after the current statement if
2311 INSERT_AFTER is true or before it otherwise. */
2312
2313 static void
2314 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2315 bool insert_after, location_t loc)
2316
2317 {
2318 struct access *child;
2319
2320 if (access->grp_to_be_replaced)
2321 {
2322 gimple stmt;
2323
2324 stmt = gimple_build_assign (get_access_replacement (access),
2325 build_zero_cst (access->type));
2326 if (insert_after)
2327 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2328 else
2329 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2330 update_stmt (stmt);
2331 gimple_set_location (stmt, loc);
2332 }
2333
2334 for (child = access->first_child; child; child = child->next_sibling)
2335 init_subtree_with_zero (child, gsi, insert_after, loc);
2336 }
2337
2338 /* Search for an access representative for the given expression EXPR and
2339 return it or NULL if it cannot be found. */
2340
2341 static struct access *
2342 get_access_for_expr (tree expr)
2343 {
2344 HOST_WIDE_INT offset, size, max_size;
2345 tree base;
2346
2347 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2348 a different size than the size of its argument and we need the latter
2349 one. */
2350 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2351 expr = TREE_OPERAND (expr, 0);
2352
2353 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2354 if (max_size == -1 || !DECL_P (base))
2355 return NULL;
2356
2357 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2358 return NULL;
2359
2360 return get_var_base_offset_size_access (base, offset, max_size);
2361 }
2362
2363 /* Replace the expression EXPR with a scalar replacement if there is one and
2364 generate other statements to do type conversion or subtree copying if
2365 necessary. GSI is used to place newly created statements, WRITE is true if
2366 the expression is being written to (it is on a LHS of a statement or output
2367 in an assembly statement). */
2368
2369 static bool
2370 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2371 {
2372 location_t loc;
2373 struct access *access;
2374 tree type, bfr;
2375
2376 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2377 {
2378 bfr = *expr;
2379 expr = &TREE_OPERAND (*expr, 0);
2380 }
2381 else
2382 bfr = NULL_TREE;
2383
2384 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2385 expr = &TREE_OPERAND (*expr, 0);
2386 access = get_access_for_expr (*expr);
2387 if (!access)
2388 return false;
2389 type = TREE_TYPE (*expr);
2390
2391 loc = gimple_location (gsi_stmt (*gsi));
2392 if (access->grp_to_be_replaced)
2393 {
2394 tree repl = get_access_replacement (access);
2395 /* If we replace a non-register typed access simply use the original
2396 access expression to extract the scalar component afterwards.
2397 This happens if scalarizing a function return value or parameter
2398 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2399 gcc.c-torture/compile/20011217-1.c.
2400
2401 We also want to use this when accessing a complex or vector which can
2402 be accessed as a different type too, potentially creating a need for
2403 type conversion (see PR42196) and when scalarized unions are involved
2404 in assembler statements (see PR42398). */
2405 if (!useless_type_conversion_p (type, access->type))
2406 {
2407 tree ref;
2408
2409 ref = build_ref_for_model (loc, access->base, access->offset, access,
2410 NULL, false);
2411
2412 if (write)
2413 {
2414 gimple stmt;
2415
2416 if (access->grp_partial_lhs)
2417 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2418 false, GSI_NEW_STMT);
2419 stmt = gimple_build_assign (repl, ref);
2420 gimple_set_location (stmt, loc);
2421 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2422 }
2423 else
2424 {
2425 gimple stmt;
2426
2427 if (access->grp_partial_lhs)
2428 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2429 true, GSI_SAME_STMT);
2430 stmt = gimple_build_assign (ref, repl);
2431 gimple_set_location (stmt, loc);
2432 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2433 }
2434 }
2435 else
2436 *expr = repl;
2437 sra_stats.exprs++;
2438 }
2439
2440 if (access->first_child)
2441 {
2442 HOST_WIDE_INT start_offset, chunk_size;
2443 if (bfr
2444 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2445 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2446 {
2447 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2448 start_offset = access->offset
2449 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2450 }
2451 else
2452 start_offset = chunk_size = 0;
2453
2454 generate_subtree_copies (access->first_child, access->base, 0,
2455 start_offset, chunk_size, gsi, write, write,
2456 loc);
2457 }
2458 return true;
2459 }
2460
2461 /* Where scalar replacements of the RHS have been written to when a replacement
2462 of a LHS of an assigments cannot be direclty loaded from a replacement of
2463 the RHS. */
2464 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2465 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2466 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2467
2468 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2469 base aggregate if there are unscalarized data or directly to LHS of the
2470 statement that is pointed to by GSI otherwise. */
2471
2472 static enum unscalarized_data_handling
2473 handle_unscalarized_data_in_subtree (struct access *top_racc,
2474 gimple_stmt_iterator *gsi)
2475 {
2476 if (top_racc->grp_unscalarized_data)
2477 {
2478 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2479 gsi, false, false,
2480 gimple_location (gsi_stmt (*gsi)));
2481 return SRA_UDH_RIGHT;
2482 }
2483 else
2484 {
2485 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2486 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2487 0, 0, gsi, false, false,
2488 gimple_location (gsi_stmt (*gsi)));
2489 return SRA_UDH_LEFT;
2490 }
2491 }
2492
2493
2494 /* Try to generate statements to load all sub-replacements in an access subtree
2495 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2496 If that is not possible, refresh the TOP_RACC base aggregate and load the
2497 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2498 copied. NEW_GSI is stmt iterator used for statement insertions after the
2499 original assignment, OLD_GSI is used to insert statements before the
2500 assignment. *REFRESHED keeps the information whether we have needed to
2501 refresh replacements of the LHS and from which side of the assignments this
2502 takes place. */
2503
2504 static void
2505 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2506 HOST_WIDE_INT left_offset,
2507 gimple_stmt_iterator *old_gsi,
2508 gimple_stmt_iterator *new_gsi,
2509 enum unscalarized_data_handling *refreshed)
2510 {
2511 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2512 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2513 {
2514 if (lacc->grp_to_be_replaced)
2515 {
2516 struct access *racc;
2517 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2518 gimple stmt;
2519 tree rhs;
2520
2521 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2522 if (racc && racc->grp_to_be_replaced)
2523 {
2524 rhs = get_access_replacement (racc);
2525 if (!useless_type_conversion_p (lacc->type, racc->type))
2526 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2527 }
2528 else
2529 {
2530 /* No suitable access on the right hand side, need to load from
2531 the aggregate. See if we have to update it first... */
2532 if (*refreshed == SRA_UDH_NONE)
2533 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2534 old_gsi);
2535
2536 if (*refreshed == SRA_UDH_LEFT)
2537 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2538 new_gsi, true);
2539 else
2540 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2541 new_gsi, true);
2542 }
2543
2544 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2545 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2546 gimple_set_location (stmt, loc);
2547 update_stmt (stmt);
2548 sra_stats.subreplacements++;
2549 }
2550 else if (*refreshed == SRA_UDH_NONE
2551 && lacc->grp_read && !lacc->grp_covered)
2552 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2553 old_gsi);
2554
2555 if (lacc->first_child)
2556 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2557 old_gsi, new_gsi, refreshed);
2558 }
2559 }
2560
2561 /* Result code for SRA assignment modification. */
2562 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2563 SRA_AM_MODIFIED, /* stmt changed but not
2564 removed */
2565 SRA_AM_REMOVED }; /* stmt eliminated */
2566
2567 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2568 to the assignment and GSI is the statement iterator pointing at it. Returns
2569 the same values as sra_modify_assign. */
2570
2571 static enum assignment_mod_result
2572 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2573 {
2574 tree lhs = gimple_assign_lhs (*stmt);
2575 struct access *acc;
2576 location_t loc;
2577
2578 acc = get_access_for_expr (lhs);
2579 if (!acc)
2580 return SRA_AM_NONE;
2581
2582 loc = gimple_location (*stmt);
2583 if (VEC_length (constructor_elt,
2584 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2585 {
2586 /* I have never seen this code path trigger but if it can happen the
2587 following should handle it gracefully. */
2588 if (access_has_children_p (acc))
2589 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2590 true, true, loc);
2591 return SRA_AM_MODIFIED;
2592 }
2593
2594 if (acc->grp_covered)
2595 {
2596 init_subtree_with_zero (acc, gsi, false, loc);
2597 unlink_stmt_vdef (*stmt);
2598 gsi_remove (gsi, true);
2599 return SRA_AM_REMOVED;
2600 }
2601 else
2602 {
2603 init_subtree_with_zero (acc, gsi, true, loc);
2604 return SRA_AM_MODIFIED;
2605 }
2606 }
2607
2608 /* Create and return a new suitable default definition SSA_NAME for RACC which
2609 is an access describing an uninitialized part of an aggregate that is being
2610 loaded. */
2611
2612 static tree
2613 get_repl_default_def_ssa_name (struct access *racc)
2614 {
2615 tree repl, decl;
2616
2617 decl = get_unrenamed_access_replacement (racc);
2618
2619 repl = gimple_default_def (cfun, decl);
2620 if (!repl)
2621 {
2622 repl = make_ssa_name (decl, gimple_build_nop ());
2623 set_default_def (decl, repl);
2624 }
2625
2626 return repl;
2627 }
2628
2629 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2630 somewhere in it. */
2631
2632 static inline bool
2633 contains_bitfld_comp_ref_p (const_tree ref)
2634 {
2635 while (handled_component_p (ref))
2636 {
2637 if (TREE_CODE (ref) == COMPONENT_REF
2638 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2639 return true;
2640 ref = TREE_OPERAND (ref, 0);
2641 }
2642
2643 return false;
2644 }
2645
2646 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2647 bit-field field declaration somewhere in it. */
2648
2649 static inline bool
2650 contains_vce_or_bfcref_p (const_tree ref)
2651 {
2652 while (handled_component_p (ref))
2653 {
2654 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2655 || (TREE_CODE (ref) == COMPONENT_REF
2656 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2657 return true;
2658 ref = TREE_OPERAND (ref, 0);
2659 }
2660
2661 return false;
2662 }
2663
2664 /* Examine both sides of the assignment statement pointed to by STMT, replace
2665 them with a scalare replacement if there is one and generate copying of
2666 replacements if scalarized aggregates have been used in the assignment. GSI
2667 is used to hold generated statements for type conversions and subtree
2668 copying. */
2669
2670 static enum assignment_mod_result
2671 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2672 {
2673 struct access *lacc, *racc;
2674 tree lhs, rhs;
2675 bool modify_this_stmt = false;
2676 bool force_gimple_rhs = false;
2677 location_t loc;
2678 gimple_stmt_iterator orig_gsi = *gsi;
2679
2680 if (!gimple_assign_single_p (*stmt))
2681 return SRA_AM_NONE;
2682 lhs = gimple_assign_lhs (*stmt);
2683 rhs = gimple_assign_rhs1 (*stmt);
2684
2685 if (TREE_CODE (rhs) == CONSTRUCTOR)
2686 return sra_modify_constructor_assign (stmt, gsi);
2687
2688 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2689 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2690 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2691 {
2692 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2693 gsi, false);
2694 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2695 gsi, true);
2696 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2697 }
2698
2699 lacc = get_access_for_expr (lhs);
2700 racc = get_access_for_expr (rhs);
2701 if (!lacc && !racc)
2702 return SRA_AM_NONE;
2703
2704 loc = gimple_location (*stmt);
2705 if (lacc && lacc->grp_to_be_replaced)
2706 {
2707 lhs = get_access_replacement (lacc);
2708 gimple_assign_set_lhs (*stmt, lhs);
2709 modify_this_stmt = true;
2710 if (lacc->grp_partial_lhs)
2711 force_gimple_rhs = true;
2712 sra_stats.exprs++;
2713 }
2714
2715 if (racc && racc->grp_to_be_replaced)
2716 {
2717 rhs = get_access_replacement (racc);
2718 modify_this_stmt = true;
2719 if (racc->grp_partial_lhs)
2720 force_gimple_rhs = true;
2721 sra_stats.exprs++;
2722 }
2723
2724 if (modify_this_stmt)
2725 {
2726 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2727 {
2728 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2729 ??? This should move to fold_stmt which we simply should
2730 call after building a VIEW_CONVERT_EXPR here. */
2731 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2732 && !contains_bitfld_comp_ref_p (lhs)
2733 && !access_has_children_p (lacc))
2734 {
2735 lhs = build_ref_for_offset (loc, lhs, 0, TREE_TYPE (rhs),
2736 gsi, false);
2737 gimple_assign_set_lhs (*stmt, lhs);
2738 }
2739 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2740 && !contains_vce_or_bfcref_p (rhs)
2741 && !access_has_children_p (racc))
2742 rhs = build_ref_for_offset (loc, rhs, 0, TREE_TYPE (lhs),
2743 gsi, false);
2744
2745 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2746 {
2747 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
2748 rhs);
2749 if (is_gimple_reg_type (TREE_TYPE (lhs))
2750 && TREE_CODE (lhs) != SSA_NAME)
2751 force_gimple_rhs = true;
2752 }
2753 }
2754 }
2755
2756 /* From this point on, the function deals with assignments in between
2757 aggregates when at least one has scalar reductions of some of its
2758 components. There are three possible scenarios: Both the LHS and RHS have
2759 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2760
2761 In the first case, we would like to load the LHS components from RHS
2762 components whenever possible. If that is not possible, we would like to
2763 read it directly from the RHS (after updating it by storing in it its own
2764 components). If there are some necessary unscalarized data in the LHS,
2765 those will be loaded by the original assignment too. If neither of these
2766 cases happen, the original statement can be removed. Most of this is done
2767 by load_assign_lhs_subreplacements.
2768
2769 In the second case, we would like to store all RHS scalarized components
2770 directly into LHS and if they cover the aggregate completely, remove the
2771 statement too. In the third case, we want the LHS components to be loaded
2772 directly from the RHS (DSE will remove the original statement if it
2773 becomes redundant).
2774
2775 This is a bit complex but manageable when types match and when unions do
2776 not cause confusion in a way that we cannot really load a component of LHS
2777 from the RHS or vice versa (the access representing this level can have
2778 subaccesses that are accessible only through a different union field at a
2779 higher level - different from the one used in the examined expression).
2780 Unions are fun.
2781
2782 Therefore, I specially handle a fourth case, happening when there is a
2783 specific type cast or it is impossible to locate a scalarized subaccess on
2784 the other side of the expression. If that happens, I simply "refresh" the
2785 RHS by storing in it is scalarized components leave the original statement
2786 there to do the copying and then load the scalar replacements of the LHS.
2787 This is what the first branch does. */
2788
2789 if (gimple_has_volatile_ops (*stmt)
2790 || contains_vce_or_bfcref_p (rhs)
2791 || contains_vce_or_bfcref_p (lhs))
2792 {
2793 if (access_has_children_p (racc))
2794 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2795 gsi, false, false, loc);
2796 if (access_has_children_p (lacc))
2797 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2798 gsi, true, true, loc);
2799 sra_stats.separate_lhs_rhs_handling++;
2800 }
2801 else
2802 {
2803 if (access_has_children_p (lacc) && access_has_children_p (racc))
2804 {
2805 gimple_stmt_iterator orig_gsi = *gsi;
2806 enum unscalarized_data_handling refreshed;
2807
2808 if (lacc->grp_read && !lacc->grp_covered)
2809 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
2810 else
2811 refreshed = SRA_UDH_NONE;
2812
2813 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
2814 &orig_gsi, gsi, &refreshed);
2815 if (refreshed != SRA_UDH_RIGHT)
2816 {
2817 gsi_next (gsi);
2818 unlink_stmt_vdef (*stmt);
2819 gsi_remove (&orig_gsi, true);
2820 sra_stats.deleted++;
2821 return SRA_AM_REMOVED;
2822 }
2823 }
2824 else
2825 {
2826 if (racc)
2827 {
2828 if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
2829 {
2830 if (dump_file)
2831 {
2832 fprintf (dump_file, "Removing load: ");
2833 print_gimple_stmt (dump_file, *stmt, 0, 0);
2834 }
2835
2836 if (TREE_CODE (lhs) == SSA_NAME)
2837 {
2838 rhs = get_repl_default_def_ssa_name (racc);
2839 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2840 TREE_TYPE (rhs)))
2841 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
2842 TREE_TYPE (lhs), rhs);
2843 }
2844 else
2845 {
2846 if (racc->first_child)
2847 generate_subtree_copies (racc->first_child, lhs,
2848 racc->offset, 0, 0, gsi,
2849 false, false, loc);
2850
2851 gcc_assert (*stmt == gsi_stmt (*gsi));
2852 unlink_stmt_vdef (*stmt);
2853 gsi_remove (gsi, true);
2854 sra_stats.deleted++;
2855 return SRA_AM_REMOVED;
2856 }
2857 }
2858 else if (racc->first_child)
2859 generate_subtree_copies (racc->first_child, lhs, racc->offset,
2860 0, 0, gsi, false, true, loc);
2861 }
2862 if (access_has_children_p (lacc))
2863 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2864 0, 0, gsi, true, true, loc);
2865 }
2866 }
2867
2868 /* This gimplification must be done after generate_subtree_copies, lest we
2869 insert the subtree copies in the middle of the gimplified sequence. */
2870 if (force_gimple_rhs)
2871 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
2872 true, GSI_SAME_STMT);
2873 if (gimple_assign_rhs1 (*stmt) != rhs)
2874 {
2875 modify_this_stmt = true;
2876 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
2877 gcc_assert (*stmt == gsi_stmt (orig_gsi));
2878 }
2879
2880 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2881 }
2882
2883 /* Traverse the function body and all modifications as decided in
2884 analyze_all_variable_accesses. Return true iff the CFG has been
2885 changed. */
2886
2887 static bool
2888 sra_modify_function_body (void)
2889 {
2890 bool cfg_changed = false;
2891 basic_block bb;
2892
2893 FOR_EACH_BB (bb)
2894 {
2895 gimple_stmt_iterator gsi = gsi_start_bb (bb);
2896 while (!gsi_end_p (gsi))
2897 {
2898 gimple stmt = gsi_stmt (gsi);
2899 enum assignment_mod_result assign_result;
2900 bool modified = false, deleted = false;
2901 tree *t;
2902 unsigned i;
2903
2904 switch (gimple_code (stmt))
2905 {
2906 case GIMPLE_RETURN:
2907 t = gimple_return_retval_ptr (stmt);
2908 if (*t != NULL_TREE)
2909 modified |= sra_modify_expr (t, &gsi, false);
2910 break;
2911
2912 case GIMPLE_ASSIGN:
2913 assign_result = sra_modify_assign (&stmt, &gsi);
2914 modified |= assign_result == SRA_AM_MODIFIED;
2915 deleted = assign_result == SRA_AM_REMOVED;
2916 break;
2917
2918 case GIMPLE_CALL:
2919 /* Operands must be processed before the lhs. */
2920 for (i = 0; i < gimple_call_num_args (stmt); i++)
2921 {
2922 t = gimple_call_arg_ptr (stmt, i);
2923 modified |= sra_modify_expr (t, &gsi, false);
2924 }
2925
2926 if (gimple_call_lhs (stmt))
2927 {
2928 t = gimple_call_lhs_ptr (stmt);
2929 modified |= sra_modify_expr (t, &gsi, true);
2930 }
2931 break;
2932
2933 case GIMPLE_ASM:
2934 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
2935 {
2936 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
2937 modified |= sra_modify_expr (t, &gsi, false);
2938 }
2939 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
2940 {
2941 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
2942 modified |= sra_modify_expr (t, &gsi, true);
2943 }
2944 break;
2945
2946 default:
2947 break;
2948 }
2949
2950 if (modified)
2951 {
2952 update_stmt (stmt);
2953 if (maybe_clean_eh_stmt (stmt)
2954 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2955 cfg_changed = true;
2956 }
2957 if (!deleted)
2958 gsi_next (&gsi);
2959 }
2960 }
2961
2962 return cfg_changed;
2963 }
2964
2965 /* Generate statements initializing scalar replacements of parts of function
2966 parameters. */
2967
2968 static void
2969 initialize_parameter_reductions (void)
2970 {
2971 gimple_stmt_iterator gsi;
2972 gimple_seq seq = NULL;
2973 tree parm;
2974
2975 for (parm = DECL_ARGUMENTS (current_function_decl);
2976 parm;
2977 parm = DECL_CHAIN (parm))
2978 {
2979 VEC (access_p, heap) *access_vec;
2980 struct access *access;
2981
2982 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
2983 continue;
2984 access_vec = get_base_access_vector (parm);
2985 if (!access_vec)
2986 continue;
2987
2988 if (!seq)
2989 {
2990 seq = gimple_seq_alloc ();
2991 gsi = gsi_start (seq);
2992 }
2993
2994 for (access = VEC_index (access_p, access_vec, 0);
2995 access;
2996 access = access->next_grp)
2997 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
2998 EXPR_LOCATION (parm));
2999 }
3000
3001 if (seq)
3002 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3003 }
3004
3005 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3006 it reveals there are components of some aggregates to be scalarized, it runs
3007 the required transformations. */
3008 static unsigned int
3009 perform_intra_sra (void)
3010 {
3011 int ret = 0;
3012 sra_initialize ();
3013
3014 if (!find_var_candidates ())
3015 goto out;
3016
3017 if (!scan_function ())
3018 goto out;
3019
3020 if (!analyze_all_variable_accesses ())
3021 goto out;
3022
3023 if (sra_modify_function_body ())
3024 ret = TODO_update_ssa | TODO_cleanup_cfg;
3025 else
3026 ret = TODO_update_ssa;
3027 initialize_parameter_reductions ();
3028
3029 statistics_counter_event (cfun, "Scalar replacements created",
3030 sra_stats.replacements);
3031 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3032 statistics_counter_event (cfun, "Subtree copy stmts",
3033 sra_stats.subtree_copies);
3034 statistics_counter_event (cfun, "Subreplacement stmts",
3035 sra_stats.subreplacements);
3036 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3037 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3038 sra_stats.separate_lhs_rhs_handling);
3039
3040 out:
3041 sra_deinitialize ();
3042 return ret;
3043 }
3044
3045 /* Perform early intraprocedural SRA. */
3046 static unsigned int
3047 early_intra_sra (void)
3048 {
3049 sra_mode = SRA_MODE_EARLY_INTRA;
3050 return perform_intra_sra ();
3051 }
3052
3053 /* Perform "late" intraprocedural SRA. */
3054 static unsigned int
3055 late_intra_sra (void)
3056 {
3057 sra_mode = SRA_MODE_INTRA;
3058 return perform_intra_sra ();
3059 }
3060
3061
3062 static bool
3063 gate_intra_sra (void)
3064 {
3065 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3066 }
3067
3068
3069 struct gimple_opt_pass pass_sra_early =
3070 {
3071 {
3072 GIMPLE_PASS,
3073 "esra", /* name */
3074 gate_intra_sra, /* gate */
3075 early_intra_sra, /* execute */
3076 NULL, /* sub */
3077 NULL, /* next */
3078 0, /* static_pass_number */
3079 TV_TREE_SRA, /* tv_id */
3080 PROP_cfg | PROP_ssa, /* properties_required */
3081 0, /* properties_provided */
3082 0, /* properties_destroyed */
3083 0, /* todo_flags_start */
3084 TODO_dump_func
3085 | TODO_update_ssa
3086 | TODO_ggc_collect
3087 | TODO_verify_ssa /* todo_flags_finish */
3088 }
3089 };
3090
3091 struct gimple_opt_pass pass_sra =
3092 {
3093 {
3094 GIMPLE_PASS,
3095 "sra", /* name */
3096 gate_intra_sra, /* gate */
3097 late_intra_sra, /* execute */
3098 NULL, /* sub */
3099 NULL, /* next */
3100 0, /* static_pass_number */
3101 TV_TREE_SRA, /* tv_id */
3102 PROP_cfg | PROP_ssa, /* properties_required */
3103 0, /* properties_provided */
3104 0, /* properties_destroyed */
3105 TODO_update_address_taken, /* todo_flags_start */
3106 TODO_dump_func
3107 | TODO_update_ssa
3108 | TODO_ggc_collect
3109 | TODO_verify_ssa /* todo_flags_finish */
3110 }
3111 };
3112
3113
3114 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3115 parameter. */
3116
3117 static bool
3118 is_unused_scalar_param (tree parm)
3119 {
3120 tree name;
3121 return (is_gimple_reg (parm)
3122 && (!(name = gimple_default_def (cfun, parm))
3123 || has_zero_uses (name)));
3124 }
3125
3126 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3127 examine whether there are any direct or otherwise infeasible ones. If so,
3128 return true, otherwise return false. PARM must be a gimple register with a
3129 non-NULL default definition. */
3130
3131 static bool
3132 ptr_parm_has_direct_uses (tree parm)
3133 {
3134 imm_use_iterator ui;
3135 gimple stmt;
3136 tree name = gimple_default_def (cfun, parm);
3137 bool ret = false;
3138
3139 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3140 {
3141 int uses_ok = 0;
3142 use_operand_p use_p;
3143
3144 if (is_gimple_debug (stmt))
3145 continue;
3146
3147 /* Valid uses include dereferences on the lhs and the rhs. */
3148 if (gimple_has_lhs (stmt))
3149 {
3150 tree lhs = gimple_get_lhs (stmt);
3151 while (handled_component_p (lhs))
3152 lhs = TREE_OPERAND (lhs, 0);
3153 if (TREE_CODE (lhs) == MEM_REF
3154 && TREE_OPERAND (lhs, 0) == name
3155 && integer_zerop (TREE_OPERAND (lhs, 1))
3156 && types_compatible_p (TREE_TYPE (lhs),
3157 TREE_TYPE (TREE_TYPE (name))))
3158 uses_ok++;
3159 }
3160 if (gimple_assign_single_p (stmt))
3161 {
3162 tree rhs = gimple_assign_rhs1 (stmt);
3163 while (handled_component_p (rhs))
3164 rhs = TREE_OPERAND (rhs, 0);
3165 if (TREE_CODE (rhs) == MEM_REF
3166 && TREE_OPERAND (rhs, 0) == name
3167 && integer_zerop (TREE_OPERAND (rhs, 1))
3168 && types_compatible_p (TREE_TYPE (rhs),
3169 TREE_TYPE (TREE_TYPE (name))))
3170 uses_ok++;
3171 }
3172 else if (is_gimple_call (stmt))
3173 {
3174 unsigned i;
3175 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3176 {
3177 tree arg = gimple_call_arg (stmt, i);
3178 while (handled_component_p (arg))
3179 arg = TREE_OPERAND (arg, 0);
3180 if (TREE_CODE (arg) == MEM_REF
3181 && TREE_OPERAND (arg, 0) == name
3182 && integer_zerop (TREE_OPERAND (arg, 1))
3183 && types_compatible_p (TREE_TYPE (arg),
3184 TREE_TYPE (TREE_TYPE (name))))
3185 uses_ok++;
3186 }
3187 }
3188
3189 /* If the number of valid uses does not match the number of
3190 uses in this stmt there is an unhandled use. */
3191 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3192 --uses_ok;
3193
3194 if (uses_ok != 0)
3195 ret = true;
3196
3197 if (ret)
3198 BREAK_FROM_IMM_USE_STMT (ui);
3199 }
3200
3201 return ret;
3202 }
3203
3204 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3205 them in candidate_bitmap. Note that these do not necessarily include
3206 parameter which are unused and thus can be removed. Return true iff any
3207 such candidate has been found. */
3208
3209 static bool
3210 find_param_candidates (void)
3211 {
3212 tree parm;
3213 int count = 0;
3214 bool ret = false;
3215
3216 for (parm = DECL_ARGUMENTS (current_function_decl);
3217 parm;
3218 parm = DECL_CHAIN (parm))
3219 {
3220 tree type = TREE_TYPE (parm);
3221
3222 count++;
3223
3224 if (TREE_THIS_VOLATILE (parm)
3225 || TREE_ADDRESSABLE (parm)
3226 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3227 continue;
3228
3229 if (is_unused_scalar_param (parm))
3230 {
3231 ret = true;
3232 continue;
3233 }
3234
3235 if (POINTER_TYPE_P (type))
3236 {
3237 type = TREE_TYPE (type);
3238
3239 if (TREE_CODE (type) == FUNCTION_TYPE
3240 || TYPE_VOLATILE (type)
3241 || (TREE_CODE (type) == ARRAY_TYPE
3242 && TYPE_NONALIASED_COMPONENT (type))
3243 || !is_gimple_reg (parm)
3244 || is_va_list_type (type)
3245 || ptr_parm_has_direct_uses (parm))
3246 continue;
3247 }
3248 else if (!AGGREGATE_TYPE_P (type))
3249 continue;
3250
3251 if (!COMPLETE_TYPE_P (type)
3252 || !host_integerp (TYPE_SIZE (type), 1)
3253 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3254 || (AGGREGATE_TYPE_P (type)
3255 && type_internals_preclude_sra_p (type)))
3256 continue;
3257
3258 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3259 ret = true;
3260 if (dump_file && (dump_flags & TDF_DETAILS))
3261 {
3262 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3263 print_generic_expr (dump_file, parm, 0);
3264 fprintf (dump_file, "\n");
3265 }
3266 }
3267
3268 func_param_count = count;
3269 return ret;
3270 }
3271
3272 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3273 maybe_modified. */
3274
3275 static bool
3276 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3277 void *data)
3278 {
3279 struct access *repr = (struct access *) data;
3280
3281 repr->grp_maybe_modified = 1;
3282 return true;
3283 }
3284
3285 /* Analyze what representatives (in linked lists accessible from
3286 REPRESENTATIVES) can be modified by side effects of statements in the
3287 current function. */
3288
3289 static void
3290 analyze_modified_params (VEC (access_p, heap) *representatives)
3291 {
3292 int i;
3293
3294 for (i = 0; i < func_param_count; i++)
3295 {
3296 struct access *repr;
3297
3298 for (repr = VEC_index (access_p, representatives, i);
3299 repr;
3300 repr = repr->next_grp)
3301 {
3302 struct access *access;
3303 bitmap visited;
3304 ao_ref ar;
3305
3306 if (no_accesses_p (repr))
3307 continue;
3308 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3309 || repr->grp_maybe_modified)
3310 continue;
3311
3312 ao_ref_init (&ar, repr->expr);
3313 visited = BITMAP_ALLOC (NULL);
3314 for (access = repr; access; access = access->next_sibling)
3315 {
3316 /* All accesses are read ones, otherwise grp_maybe_modified would
3317 be trivially set. */
3318 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3319 mark_maybe_modified, repr, &visited);
3320 if (repr->grp_maybe_modified)
3321 break;
3322 }
3323 BITMAP_FREE (visited);
3324 }
3325 }
3326 }
3327
3328 /* Propagate distances in bb_dereferences in the opposite direction than the
3329 control flow edges, in each step storing the maximum of the current value
3330 and the minimum of all successors. These steps are repeated until the table
3331 stabilizes. Note that BBs which might terminate the functions (according to
3332 final_bbs bitmap) never updated in this way. */
3333
3334 static void
3335 propagate_dereference_distances (void)
3336 {
3337 VEC (basic_block, heap) *queue;
3338 basic_block bb;
3339
3340 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3341 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3342 FOR_EACH_BB (bb)
3343 {
3344 VEC_quick_push (basic_block, queue, bb);
3345 bb->aux = bb;
3346 }
3347
3348 while (!VEC_empty (basic_block, queue))
3349 {
3350 edge_iterator ei;
3351 edge e;
3352 bool change = false;
3353 int i;
3354
3355 bb = VEC_pop (basic_block, queue);
3356 bb->aux = NULL;
3357
3358 if (bitmap_bit_p (final_bbs, bb->index))
3359 continue;
3360
3361 for (i = 0; i < func_param_count; i++)
3362 {
3363 int idx = bb->index * func_param_count + i;
3364 bool first = true;
3365 HOST_WIDE_INT inh = 0;
3366
3367 FOR_EACH_EDGE (e, ei, bb->succs)
3368 {
3369 int succ_idx = e->dest->index * func_param_count + i;
3370
3371 if (e->src == EXIT_BLOCK_PTR)
3372 continue;
3373
3374 if (first)
3375 {
3376 first = false;
3377 inh = bb_dereferences [succ_idx];
3378 }
3379 else if (bb_dereferences [succ_idx] < inh)
3380 inh = bb_dereferences [succ_idx];
3381 }
3382
3383 if (!first && bb_dereferences[idx] < inh)
3384 {
3385 bb_dereferences[idx] = inh;
3386 change = true;
3387 }
3388 }
3389
3390 if (change && !bitmap_bit_p (final_bbs, bb->index))
3391 FOR_EACH_EDGE (e, ei, bb->preds)
3392 {
3393 if (e->src->aux)
3394 continue;
3395
3396 e->src->aux = e->src;
3397 VEC_quick_push (basic_block, queue, e->src);
3398 }
3399 }
3400
3401 VEC_free (basic_block, heap, queue);
3402 }
3403
3404 /* Dump a dereferences TABLE with heading STR to file F. */
3405
3406 static void
3407 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3408 {
3409 basic_block bb;
3410
3411 fprintf (dump_file, str);
3412 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3413 {
3414 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3415 if (bb != EXIT_BLOCK_PTR)
3416 {
3417 int i;
3418 for (i = 0; i < func_param_count; i++)
3419 {
3420 int idx = bb->index * func_param_count + i;
3421 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3422 }
3423 }
3424 fprintf (f, "\n");
3425 }
3426 fprintf (dump_file, "\n");
3427 }
3428
3429 /* Determine what (parts of) parameters passed by reference that are not
3430 assigned to are not certainly dereferenced in this function and thus the
3431 dereferencing cannot be safely moved to the caller without potentially
3432 introducing a segfault. Mark such REPRESENTATIVES as
3433 grp_not_necessarilly_dereferenced.
3434
3435 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3436 part is calculated rather than simple booleans are calculated for each
3437 pointer parameter to handle cases when only a fraction of the whole
3438 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3439 an example).
3440
3441 The maximum dereference distances for each pointer parameter and BB are
3442 already stored in bb_dereference. This routine simply propagates these
3443 values upwards by propagate_dereference_distances and then compares the
3444 distances of individual parameters in the ENTRY BB to the equivalent
3445 distances of each representative of a (fraction of a) parameter. */
3446
3447 static void
3448 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3449 {
3450 int i;
3451
3452 if (dump_file && (dump_flags & TDF_DETAILS))
3453 dump_dereferences_table (dump_file,
3454 "Dereference table before propagation:\n",
3455 bb_dereferences);
3456
3457 propagate_dereference_distances ();
3458
3459 if (dump_file && (dump_flags & TDF_DETAILS))
3460 dump_dereferences_table (dump_file,
3461 "Dereference table after propagation:\n",
3462 bb_dereferences);
3463
3464 for (i = 0; i < func_param_count; i++)
3465 {
3466 struct access *repr = VEC_index (access_p, representatives, i);
3467 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3468
3469 if (!repr || no_accesses_p (repr))
3470 continue;
3471
3472 do
3473 {
3474 if ((repr->offset + repr->size) > bb_dereferences[idx])
3475 repr->grp_not_necessarilly_dereferenced = 1;
3476 repr = repr->next_grp;
3477 }
3478 while (repr);
3479 }
3480 }
3481
3482 /* Return the representative access for the parameter declaration PARM if it is
3483 a scalar passed by reference which is not written to and the pointer value
3484 is not used directly. Thus, if it is legal to dereference it in the caller
3485 and we can rule out modifications through aliases, such parameter should be
3486 turned into one passed by value. Return NULL otherwise. */
3487
3488 static struct access *
3489 unmodified_by_ref_scalar_representative (tree parm)
3490 {
3491 int i, access_count;
3492 struct access *repr;
3493 VEC (access_p, heap) *access_vec;
3494
3495 access_vec = get_base_access_vector (parm);
3496 gcc_assert (access_vec);
3497 repr = VEC_index (access_p, access_vec, 0);
3498 if (repr->write)
3499 return NULL;
3500 repr->group_representative = repr;
3501
3502 access_count = VEC_length (access_p, access_vec);
3503 for (i = 1; i < access_count; i++)
3504 {
3505 struct access *access = VEC_index (access_p, access_vec, i);
3506 if (access->write)
3507 return NULL;
3508 access->group_representative = repr;
3509 access->next_sibling = repr->next_sibling;
3510 repr->next_sibling = access;
3511 }
3512
3513 repr->grp_read = 1;
3514 repr->grp_scalar_ptr = 1;
3515 return repr;
3516 }
3517
3518 /* Return true iff this access precludes IPA-SRA of the parameter it is
3519 associated with. */
3520
3521 static bool
3522 access_precludes_ipa_sra_p (struct access *access)
3523 {
3524 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3525 is incompatible assign in a call statement (and possibly even in asm
3526 statements). This can be relaxed by using a new temporary but only for
3527 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3528 intraprocedural SRA we deal with this by keeping the old aggregate around,
3529 something we cannot do in IPA-SRA.) */
3530 if (access->write
3531 && (is_gimple_call (access->stmt)
3532 || gimple_code (access->stmt) == GIMPLE_ASM))
3533 return true;
3534
3535 return false;
3536 }
3537
3538
3539 /* Sort collected accesses for parameter PARM, identify representatives for
3540 each accessed region and link them together. Return NULL if there are
3541 different but overlapping accesses, return the special ptr value meaning
3542 there are no accesses for this parameter if that is the case and return the
3543 first representative otherwise. Set *RO_GRP if there is a group of accesses
3544 with only read (i.e. no write) accesses. */
3545
3546 static struct access *
3547 splice_param_accesses (tree parm, bool *ro_grp)
3548 {
3549 int i, j, access_count, group_count;
3550 int agg_size, total_size = 0;
3551 struct access *access, *res, **prev_acc_ptr = &res;
3552 VEC (access_p, heap) *access_vec;
3553
3554 access_vec = get_base_access_vector (parm);
3555 if (!access_vec)
3556 return &no_accesses_representant;
3557 access_count = VEC_length (access_p, access_vec);
3558
3559 VEC_qsort (access_p, access_vec, compare_access_positions);
3560
3561 i = 0;
3562 total_size = 0;
3563 group_count = 0;
3564 while (i < access_count)
3565 {
3566 bool modification;
3567 access = VEC_index (access_p, access_vec, i);
3568 modification = access->write;
3569 if (access_precludes_ipa_sra_p (access))
3570 return NULL;
3571
3572 /* Access is about to become group representative unless we find some
3573 nasty overlap which would preclude us from breaking this parameter
3574 apart. */
3575
3576 j = i + 1;
3577 while (j < access_count)
3578 {
3579 struct access *ac2 = VEC_index (access_p, access_vec, j);
3580 if (ac2->offset != access->offset)
3581 {
3582 /* All or nothing law for parameters. */
3583 if (access->offset + access->size > ac2->offset)
3584 return NULL;
3585 else
3586 break;
3587 }
3588 else if (ac2->size != access->size)
3589 return NULL;
3590
3591 if (access_precludes_ipa_sra_p (ac2))
3592 return NULL;
3593
3594 modification |= ac2->write;
3595 ac2->group_representative = access;
3596 ac2->next_sibling = access->next_sibling;
3597 access->next_sibling = ac2;
3598 j++;
3599 }
3600
3601 group_count++;
3602 access->grp_maybe_modified = modification;
3603 if (!modification)
3604 *ro_grp = true;
3605 *prev_acc_ptr = access;
3606 prev_acc_ptr = &access->next_grp;
3607 total_size += access->size;
3608 i = j;
3609 }
3610
3611 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3612 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3613 else
3614 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3615 if (total_size >= agg_size)
3616 return NULL;
3617
3618 gcc_assert (group_count > 0);
3619 return res;
3620 }
3621
3622 /* Decide whether parameters with representative accesses given by REPR should
3623 be reduced into components. */
3624
3625 static int
3626 decide_one_param_reduction (struct access *repr)
3627 {
3628 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3629 bool by_ref;
3630 tree parm;
3631
3632 parm = repr->base;
3633 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3634 gcc_assert (cur_parm_size > 0);
3635
3636 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3637 {
3638 by_ref = true;
3639 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3640 }
3641 else
3642 {
3643 by_ref = false;
3644 agg_size = cur_parm_size;
3645 }
3646
3647 if (dump_file)
3648 {
3649 struct access *acc;
3650 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3651 print_generic_expr (dump_file, parm, 0);
3652 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3653 for (acc = repr; acc; acc = acc->next_grp)
3654 dump_access (dump_file, acc, true);
3655 }
3656
3657 total_size = 0;
3658 new_param_count = 0;
3659
3660 for (; repr; repr = repr->next_grp)
3661 {
3662 gcc_assert (parm == repr->base);
3663 new_param_count++;
3664
3665 if (!by_ref || (!repr->grp_maybe_modified
3666 && !repr->grp_not_necessarilly_dereferenced))
3667 total_size += repr->size;
3668 else
3669 total_size += cur_parm_size;
3670 }
3671
3672 gcc_assert (new_param_count > 0);
3673
3674 if (optimize_function_for_size_p (cfun))
3675 parm_size_limit = cur_parm_size;
3676 else
3677 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3678 * cur_parm_size);
3679
3680 if (total_size < agg_size
3681 && total_size <= parm_size_limit)
3682 {
3683 if (dump_file)
3684 fprintf (dump_file, " ....will be split into %i components\n",
3685 new_param_count);
3686 return new_param_count;
3687 }
3688 else
3689 return 0;
3690 }
3691
3692 /* The order of the following enums is important, we need to do extra work for
3693 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3694 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3695 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3696
3697 /* Identify representatives of all accesses to all candidate parameters for
3698 IPA-SRA. Return result based on what representatives have been found. */
3699
3700 static enum ipa_splicing_result
3701 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3702 {
3703 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3704 tree parm;
3705 struct access *repr;
3706
3707 *representatives = VEC_alloc (access_p, heap, func_param_count);
3708
3709 for (parm = DECL_ARGUMENTS (current_function_decl);
3710 parm;
3711 parm = DECL_CHAIN (parm))
3712 {
3713 if (is_unused_scalar_param (parm))
3714 {
3715 VEC_quick_push (access_p, *representatives,
3716 &no_accesses_representant);
3717 if (result == NO_GOOD_ACCESS)
3718 result = UNUSED_PARAMS;
3719 }
3720 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3721 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3722 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3723 {
3724 repr = unmodified_by_ref_scalar_representative (parm);
3725 VEC_quick_push (access_p, *representatives, repr);
3726 if (repr)
3727 result = UNMODIF_BY_REF_ACCESSES;
3728 }
3729 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3730 {
3731 bool ro_grp = false;
3732 repr = splice_param_accesses (parm, &ro_grp);
3733 VEC_quick_push (access_p, *representatives, repr);
3734
3735 if (repr && !no_accesses_p (repr))
3736 {
3737 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3738 {
3739 if (ro_grp)
3740 result = UNMODIF_BY_REF_ACCESSES;
3741 else if (result < MODIF_BY_REF_ACCESSES)
3742 result = MODIF_BY_REF_ACCESSES;
3743 }
3744 else if (result < BY_VAL_ACCESSES)
3745 result = BY_VAL_ACCESSES;
3746 }
3747 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3748 result = UNUSED_PARAMS;
3749 }
3750 else
3751 VEC_quick_push (access_p, *representatives, NULL);
3752 }
3753
3754 if (result == NO_GOOD_ACCESS)
3755 {
3756 VEC_free (access_p, heap, *representatives);
3757 *representatives = NULL;
3758 return NO_GOOD_ACCESS;
3759 }
3760
3761 return result;
3762 }
3763
3764 /* Return the index of BASE in PARMS. Abort if it is not found. */
3765
3766 static inline int
3767 get_param_index (tree base, VEC(tree, heap) *parms)
3768 {
3769 int i, len;
3770
3771 len = VEC_length (tree, parms);
3772 for (i = 0; i < len; i++)
3773 if (VEC_index (tree, parms, i) == base)
3774 return i;
3775 gcc_unreachable ();
3776 }
3777
3778 /* Convert the decisions made at the representative level into compact
3779 parameter adjustments. REPRESENTATIVES are pointers to first
3780 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3781 final number of adjustments. */
3782
3783 static ipa_parm_adjustment_vec
3784 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3785 int adjustments_count)
3786 {
3787 VEC (tree, heap) *parms;
3788 ipa_parm_adjustment_vec adjustments;
3789 tree parm;
3790 int i;
3791
3792 gcc_assert (adjustments_count > 0);
3793 parms = ipa_get_vector_of_formal_parms (current_function_decl);
3794 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3795 parm = DECL_ARGUMENTS (current_function_decl);
3796 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
3797 {
3798 struct access *repr = VEC_index (access_p, representatives, i);
3799
3800 if (!repr || no_accesses_p (repr))
3801 {
3802 struct ipa_parm_adjustment *adj;
3803
3804 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3805 memset (adj, 0, sizeof (*adj));
3806 adj->base_index = get_param_index (parm, parms);
3807 adj->base = parm;
3808 if (!repr)
3809 adj->copy_param = 1;
3810 else
3811 adj->remove_param = 1;
3812 }
3813 else
3814 {
3815 struct ipa_parm_adjustment *adj;
3816 int index = get_param_index (parm, parms);
3817
3818 for (; repr; repr = repr->next_grp)
3819 {
3820 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3821 memset (adj, 0, sizeof (*adj));
3822 gcc_assert (repr->base == parm);
3823 adj->base_index = index;
3824 adj->base = repr->base;
3825 adj->type = repr->type;
3826 adj->offset = repr->offset;
3827 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3828 && (repr->grp_maybe_modified
3829 || repr->grp_not_necessarilly_dereferenced));
3830
3831 }
3832 }
3833 }
3834 VEC_free (tree, heap, parms);
3835 return adjustments;
3836 }
3837
3838 /* Analyze the collected accesses and produce a plan what to do with the
3839 parameters in the form of adjustments, NULL meaning nothing. */
3840
3841 static ipa_parm_adjustment_vec
3842 analyze_all_param_acesses (void)
3843 {
3844 enum ipa_splicing_result repr_state;
3845 bool proceed = false;
3846 int i, adjustments_count = 0;
3847 VEC (access_p, heap) *representatives;
3848 ipa_parm_adjustment_vec adjustments;
3849
3850 repr_state = splice_all_param_accesses (&representatives);
3851 if (repr_state == NO_GOOD_ACCESS)
3852 return NULL;
3853
3854 /* If there are any parameters passed by reference which are not modified
3855 directly, we need to check whether they can be modified indirectly. */
3856 if (repr_state == UNMODIF_BY_REF_ACCESSES)
3857 {
3858 analyze_caller_dereference_legality (representatives);
3859 analyze_modified_params (representatives);
3860 }
3861
3862 for (i = 0; i < func_param_count; i++)
3863 {
3864 struct access *repr = VEC_index (access_p, representatives, i);
3865
3866 if (repr && !no_accesses_p (repr))
3867 {
3868 if (repr->grp_scalar_ptr)
3869 {
3870 adjustments_count++;
3871 if (repr->grp_not_necessarilly_dereferenced
3872 || repr->grp_maybe_modified)
3873 VEC_replace (access_p, representatives, i, NULL);
3874 else
3875 {
3876 proceed = true;
3877 sra_stats.scalar_by_ref_to_by_val++;
3878 }
3879 }
3880 else
3881 {
3882 int new_components = decide_one_param_reduction (repr);
3883
3884 if (new_components == 0)
3885 {
3886 VEC_replace (access_p, representatives, i, NULL);
3887 adjustments_count++;
3888 }
3889 else
3890 {
3891 adjustments_count += new_components;
3892 sra_stats.aggregate_params_reduced++;
3893 sra_stats.param_reductions_created += new_components;
3894 proceed = true;
3895 }
3896 }
3897 }
3898 else
3899 {
3900 if (no_accesses_p (repr))
3901 {
3902 proceed = true;
3903 sra_stats.deleted_unused_parameters++;
3904 }
3905 adjustments_count++;
3906 }
3907 }
3908
3909 if (!proceed && dump_file)
3910 fprintf (dump_file, "NOT proceeding to change params.\n");
3911
3912 if (proceed)
3913 adjustments = turn_representatives_into_adjustments (representatives,
3914 adjustments_count);
3915 else
3916 adjustments = NULL;
3917
3918 VEC_free (access_p, heap, representatives);
3919 return adjustments;
3920 }
3921
3922 /* If a parameter replacement identified by ADJ does not yet exist in the form
3923 of declaration, create it and record it, otherwise return the previously
3924 created one. */
3925
3926 static tree
3927 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
3928 {
3929 tree repl;
3930 if (!adj->new_ssa_base)
3931 {
3932 char *pretty_name = make_fancy_name (adj->base);
3933
3934 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
3935 DECL_NAME (repl) = get_identifier (pretty_name);
3936 obstack_free (&name_obstack, pretty_name);
3937
3938 get_var_ann (repl);
3939 add_referenced_var (repl);
3940 adj->new_ssa_base = repl;
3941 }
3942 else
3943 repl = adj->new_ssa_base;
3944 return repl;
3945 }
3946
3947 /* Find the first adjustment for a particular parameter BASE in a vector of
3948 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
3949 adjustment. */
3950
3951 static struct ipa_parm_adjustment *
3952 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
3953 {
3954 int i, len;
3955
3956 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3957 for (i = 0; i < len; i++)
3958 {
3959 struct ipa_parm_adjustment *adj;
3960
3961 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3962 if (!adj->copy_param && adj->base == base)
3963 return adj;
3964 }
3965
3966 return NULL;
3967 }
3968
3969 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
3970 removed because its value is not used, replace the SSA_NAME with a one
3971 relating to a created VAR_DECL together all of its uses and return true.
3972 ADJUSTMENTS is a pointer to an adjustments vector. */
3973
3974 static bool
3975 replace_removed_params_ssa_names (gimple stmt,
3976 ipa_parm_adjustment_vec adjustments)
3977 {
3978 struct ipa_parm_adjustment *adj;
3979 tree lhs, decl, repl, name;
3980
3981 if (gimple_code (stmt) == GIMPLE_PHI)
3982 lhs = gimple_phi_result (stmt);
3983 else if (is_gimple_assign (stmt))
3984 lhs = gimple_assign_lhs (stmt);
3985 else if (is_gimple_call (stmt))
3986 lhs = gimple_call_lhs (stmt);
3987 else
3988 gcc_unreachable ();
3989
3990 if (TREE_CODE (lhs) != SSA_NAME)
3991 return false;
3992 decl = SSA_NAME_VAR (lhs);
3993 if (TREE_CODE (decl) != PARM_DECL)
3994 return false;
3995
3996 adj = get_adjustment_for_base (adjustments, decl);
3997 if (!adj)
3998 return false;
3999
4000 repl = get_replaced_param_substitute (adj);
4001 name = make_ssa_name (repl, stmt);
4002
4003 if (dump_file)
4004 {
4005 fprintf (dump_file, "replacing an SSA name of a removed param ");
4006 print_generic_expr (dump_file, lhs, 0);
4007 fprintf (dump_file, " with ");
4008 print_generic_expr (dump_file, name, 0);
4009 fprintf (dump_file, "\n");
4010 }
4011
4012 if (is_gimple_assign (stmt))
4013 gimple_assign_set_lhs (stmt, name);
4014 else if (is_gimple_call (stmt))
4015 gimple_call_set_lhs (stmt, name);
4016 else
4017 gimple_phi_set_result (stmt, name);
4018
4019 replace_uses_by (lhs, name);
4020 release_ssa_name (lhs);
4021 return true;
4022 }
4023
4024 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4025 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4026 specifies whether the function should care about type incompatibility the
4027 current and new expressions. If it is false, the function will leave
4028 incompatibility issues to the caller. Return true iff the expression
4029 was modified. */
4030
4031 static bool
4032 sra_ipa_modify_expr (tree *expr, bool convert,
4033 ipa_parm_adjustment_vec adjustments)
4034 {
4035 int i, len;
4036 struct ipa_parm_adjustment *adj, *cand = NULL;
4037 HOST_WIDE_INT offset, size, max_size;
4038 tree base, src;
4039
4040 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4041
4042 if (TREE_CODE (*expr) == BIT_FIELD_REF
4043 || TREE_CODE (*expr) == IMAGPART_EXPR
4044 || TREE_CODE (*expr) == REALPART_EXPR)
4045 {
4046 expr = &TREE_OPERAND (*expr, 0);
4047 convert = true;
4048 }
4049
4050 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4051 if (!base || size == -1 || max_size == -1)
4052 return false;
4053
4054 if (TREE_CODE (base) == MEM_REF)
4055 {
4056 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4057 base = TREE_OPERAND (base, 0);
4058 }
4059
4060 base = get_ssa_base_param (base);
4061 if (!base || TREE_CODE (base) != PARM_DECL)
4062 return false;
4063
4064 for (i = 0; i < len; i++)
4065 {
4066 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4067
4068 if (adj->base == base &&
4069 (adj->offset == offset || adj->remove_param))
4070 {
4071 cand = adj;
4072 break;
4073 }
4074 }
4075 if (!cand || cand->copy_param || cand->remove_param)
4076 return false;
4077
4078 if (cand->by_ref)
4079 src = build_simple_mem_ref (cand->reduction);
4080 else
4081 src = cand->reduction;
4082
4083 if (dump_file && (dump_flags & TDF_DETAILS))
4084 {
4085 fprintf (dump_file, "About to replace expr ");
4086 print_generic_expr (dump_file, *expr, 0);
4087 fprintf (dump_file, " with ");
4088 print_generic_expr (dump_file, src, 0);
4089 fprintf (dump_file, "\n");
4090 }
4091
4092 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4093 {
4094 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4095 *expr = vce;
4096 }
4097 else
4098 *expr = src;
4099 return true;
4100 }
4101
4102 /* If the statement pointed to by STMT_PTR contains any expressions that need
4103 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4104 potential type incompatibilities (GSI is used to accommodate conversion
4105 statements and must point to the statement). Return true iff the statement
4106 was modified. */
4107
4108 static bool
4109 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4110 ipa_parm_adjustment_vec adjustments)
4111 {
4112 gimple stmt = *stmt_ptr;
4113 tree *lhs_p, *rhs_p;
4114 bool any;
4115
4116 if (!gimple_assign_single_p (stmt))
4117 return false;
4118
4119 rhs_p = gimple_assign_rhs1_ptr (stmt);
4120 lhs_p = gimple_assign_lhs_ptr (stmt);
4121
4122 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4123 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4124 if (any)
4125 {
4126 tree new_rhs = NULL_TREE;
4127
4128 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4129 {
4130 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4131 {
4132 /* V_C_Es of constructors can cause trouble (PR 42714). */
4133 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4134 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4135 else
4136 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4137 }
4138 else
4139 new_rhs = fold_build1_loc (gimple_location (stmt),
4140 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4141 *rhs_p);
4142 }
4143 else if (REFERENCE_CLASS_P (*rhs_p)
4144 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4145 && !is_gimple_reg (*lhs_p))
4146 /* This can happen when an assignment in between two single field
4147 structures is turned into an assignment in between two pointers to
4148 scalars (PR 42237). */
4149 new_rhs = *rhs_p;
4150
4151 if (new_rhs)
4152 {
4153 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4154 true, GSI_SAME_STMT);
4155
4156 gimple_assign_set_rhs_from_tree (gsi, tmp);
4157 }
4158
4159 return true;
4160 }
4161
4162 return false;
4163 }
4164
4165 /* Traverse the function body and all modifications as described in
4166 ADJUSTMENTS. Return true iff the CFG has been changed. */
4167
4168 static bool
4169 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4170 {
4171 bool cfg_changed = false;
4172 basic_block bb;
4173
4174 FOR_EACH_BB (bb)
4175 {
4176 gimple_stmt_iterator gsi;
4177
4178 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4179 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4180
4181 gsi = gsi_start_bb (bb);
4182 while (!gsi_end_p (gsi))
4183 {
4184 gimple stmt = gsi_stmt (gsi);
4185 bool modified = false;
4186 tree *t;
4187 unsigned i;
4188
4189 switch (gimple_code (stmt))
4190 {
4191 case GIMPLE_RETURN:
4192 t = gimple_return_retval_ptr (stmt);
4193 if (*t != NULL_TREE)
4194 modified |= sra_ipa_modify_expr (t, true, adjustments);
4195 break;
4196
4197 case GIMPLE_ASSIGN:
4198 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4199 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4200 break;
4201
4202 case GIMPLE_CALL:
4203 /* Operands must be processed before the lhs. */
4204 for (i = 0; i < gimple_call_num_args (stmt); i++)
4205 {
4206 t = gimple_call_arg_ptr (stmt, i);
4207 modified |= sra_ipa_modify_expr (t, true, adjustments);
4208 }
4209
4210 if (gimple_call_lhs (stmt))
4211 {
4212 t = gimple_call_lhs_ptr (stmt);
4213 modified |= sra_ipa_modify_expr (t, false, adjustments);
4214 modified |= replace_removed_params_ssa_names (stmt,
4215 adjustments);
4216 }
4217 break;
4218
4219 case GIMPLE_ASM:
4220 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4221 {
4222 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4223 modified |= sra_ipa_modify_expr (t, true, adjustments);
4224 }
4225 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4226 {
4227 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4228 modified |= sra_ipa_modify_expr (t, false, adjustments);
4229 }
4230 break;
4231
4232 default:
4233 break;
4234 }
4235
4236 if (modified)
4237 {
4238 update_stmt (stmt);
4239 if (maybe_clean_eh_stmt (stmt)
4240 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4241 cfg_changed = true;
4242 }
4243 gsi_next (&gsi);
4244 }
4245 }
4246
4247 return cfg_changed;
4248 }
4249
4250 /* Call gimple_debug_bind_reset_value on all debug statements describing
4251 gimple register parameters that are being removed or replaced. */
4252
4253 static void
4254 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4255 {
4256 int i, len;
4257
4258 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4259 for (i = 0; i < len; i++)
4260 {
4261 struct ipa_parm_adjustment *adj;
4262 imm_use_iterator ui;
4263 gimple stmt;
4264 tree name;
4265
4266 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4267 if (adj->copy_param || !is_gimple_reg (adj->base))
4268 continue;
4269 name = gimple_default_def (cfun, adj->base);
4270 if (!name)
4271 continue;
4272 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4273 {
4274 /* All other users must have been removed by
4275 ipa_sra_modify_function_body. */
4276 gcc_assert (is_gimple_debug (stmt));
4277 gimple_debug_bind_reset_value (stmt);
4278 update_stmt (stmt);
4279 }
4280 }
4281 }
4282
4283 /* Return true iff all callers have at least as many actual arguments as there
4284 are formal parameters in the current function. */
4285
4286 static bool
4287 all_callers_have_enough_arguments_p (struct cgraph_node *node)
4288 {
4289 struct cgraph_edge *cs;
4290 for (cs = node->callers; cs; cs = cs->next_caller)
4291 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4292 return false;
4293
4294 return true;
4295 }
4296
4297
4298 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4299
4300 static void
4301 convert_callers (struct cgraph_node *node, tree old_decl,
4302 ipa_parm_adjustment_vec adjustments)
4303 {
4304 tree old_cur_fndecl = current_function_decl;
4305 struct cgraph_edge *cs;
4306 basic_block this_block;
4307 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4308
4309 for (cs = node->callers; cs; cs = cs->next_caller)
4310 {
4311 current_function_decl = cs->caller->decl;
4312 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4313
4314 if (dump_file)
4315 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4316 cs->caller->uid, cs->callee->uid,
4317 cgraph_node_name (cs->caller),
4318 cgraph_node_name (cs->callee));
4319
4320 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4321
4322 pop_cfun ();
4323 }
4324
4325 for (cs = node->callers; cs; cs = cs->next_caller)
4326 if (bitmap_set_bit (recomputed_callers, cs->caller->uid))
4327 compute_inline_parameters (cs->caller);
4328 BITMAP_FREE (recomputed_callers);
4329
4330 current_function_decl = old_cur_fndecl;
4331
4332 if (!encountered_recursive_call)
4333 return;
4334
4335 FOR_EACH_BB (this_block)
4336 {
4337 gimple_stmt_iterator gsi;
4338
4339 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4340 {
4341 gimple stmt = gsi_stmt (gsi);
4342 tree call_fndecl;
4343 if (gimple_code (stmt) != GIMPLE_CALL)
4344 continue;
4345 call_fndecl = gimple_call_fndecl (stmt);
4346 if (call_fndecl == old_decl)
4347 {
4348 if (dump_file)
4349 fprintf (dump_file, "Adjusting recursive call");
4350 gimple_call_set_fndecl (stmt, node->decl);
4351 ipa_modify_call_arguments (NULL, stmt, adjustments);
4352 }
4353 }
4354 }
4355
4356 return;
4357 }
4358
4359 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4360 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4361
4362 static bool
4363 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4364 {
4365 struct cgraph_node *new_node;
4366 struct cgraph_edge *cs;
4367 bool cfg_changed;
4368 VEC (cgraph_edge_p, heap) * redirect_callers;
4369 int node_callers;
4370
4371 node_callers = 0;
4372 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4373 node_callers++;
4374 redirect_callers = VEC_alloc (cgraph_edge_p, heap, node_callers);
4375 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4376 VEC_quick_push (cgraph_edge_p, redirect_callers, cs);
4377
4378 rebuild_cgraph_edges ();
4379 pop_cfun ();
4380 current_function_decl = NULL_TREE;
4381
4382 new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4383 NULL, NULL, "isra");
4384 current_function_decl = new_node->decl;
4385 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4386
4387 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4388 cfg_changed = ipa_sra_modify_function_body (adjustments);
4389 sra_ipa_reset_debug_stmts (adjustments);
4390 convert_callers (new_node, node->decl, adjustments);
4391 cgraph_make_node_local (new_node);
4392 return cfg_changed;
4393 }
4394
4395 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4396 attributes, return true otherwise. NODE is the cgraph node of the current
4397 function. */
4398
4399 static bool
4400 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4401 {
4402 if (!cgraph_node_can_be_local_p (node))
4403 {
4404 if (dump_file)
4405 fprintf (dump_file, "Function not local to this compilation unit.\n");
4406 return false;
4407 }
4408
4409 if (!tree_versionable_function_p (node->decl))
4410 {
4411 if (dump_file)
4412 fprintf (dump_file, "Function is not versionable.\n");
4413 return false;
4414 }
4415
4416 if (DECL_VIRTUAL_P (current_function_decl))
4417 {
4418 if (dump_file)
4419 fprintf (dump_file, "Function is a virtual method.\n");
4420 return false;
4421 }
4422
4423 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4424 && node->global.size >= MAX_INLINE_INSNS_AUTO)
4425 {
4426 if (dump_file)
4427 fprintf (dump_file, "Function too big to be made truly local.\n");
4428 return false;
4429 }
4430
4431 if (!node->callers)
4432 {
4433 if (dump_file)
4434 fprintf (dump_file,
4435 "Function has no callers in this compilation unit.\n");
4436 return false;
4437 }
4438
4439 if (cfun->stdarg)
4440 {
4441 if (dump_file)
4442 fprintf (dump_file, "Function uses stdarg. \n");
4443 return false;
4444 }
4445
4446 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4447 return false;
4448
4449 return true;
4450 }
4451
4452 /* Perform early interprocedural SRA. */
4453
4454 static unsigned int
4455 ipa_early_sra (void)
4456 {
4457 struct cgraph_node *node = cgraph_node (current_function_decl);
4458 ipa_parm_adjustment_vec adjustments;
4459 int ret = 0;
4460
4461 if (!ipa_sra_preliminary_function_checks (node))
4462 return 0;
4463
4464 sra_initialize ();
4465 sra_mode = SRA_MODE_EARLY_IPA;
4466
4467 if (!find_param_candidates ())
4468 {
4469 if (dump_file)
4470 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4471 goto simple_out;
4472 }
4473
4474 if (!all_callers_have_enough_arguments_p (node))
4475 {
4476 if (dump_file)
4477 fprintf (dump_file, "There are callers with insufficient number of "
4478 "arguments.\n");
4479 goto simple_out;
4480 }
4481
4482 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4483 func_param_count
4484 * last_basic_block_for_function (cfun));
4485 final_bbs = BITMAP_ALLOC (NULL);
4486
4487 scan_function ();
4488 if (encountered_apply_args)
4489 {
4490 if (dump_file)
4491 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4492 goto out;
4493 }
4494
4495 if (encountered_unchangable_recursive_call)
4496 {
4497 if (dump_file)
4498 fprintf (dump_file, "Function calls itself with insufficient "
4499 "number of arguments.\n");
4500 goto out;
4501 }
4502
4503 adjustments = analyze_all_param_acesses ();
4504 if (!adjustments)
4505 goto out;
4506 if (dump_file)
4507 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4508
4509 if (modify_function (node, adjustments))
4510 ret = TODO_update_ssa | TODO_cleanup_cfg;
4511 else
4512 ret = TODO_update_ssa;
4513 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4514
4515 statistics_counter_event (cfun, "Unused parameters deleted",
4516 sra_stats.deleted_unused_parameters);
4517 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4518 sra_stats.scalar_by_ref_to_by_val);
4519 statistics_counter_event (cfun, "Aggregate parameters broken up",
4520 sra_stats.aggregate_params_reduced);
4521 statistics_counter_event (cfun, "Aggregate parameter components created",
4522 sra_stats.param_reductions_created);
4523
4524 out:
4525 BITMAP_FREE (final_bbs);
4526 free (bb_dereferences);
4527 simple_out:
4528 sra_deinitialize ();
4529 return ret;
4530 }
4531
4532 /* Return if early ipa sra shall be performed. */
4533 static bool
4534 ipa_early_sra_gate (void)
4535 {
4536 return flag_ipa_sra && dbg_cnt (eipa_sra);
4537 }
4538
4539 struct gimple_opt_pass pass_early_ipa_sra =
4540 {
4541 {
4542 GIMPLE_PASS,
4543 "eipa_sra", /* name */
4544 ipa_early_sra_gate, /* gate */
4545 ipa_early_sra, /* execute */
4546 NULL, /* sub */
4547 NULL, /* next */
4548 0, /* static_pass_number */
4549 TV_IPA_SRA, /* tv_id */
4550 0, /* properties_required */
4551 0, /* properties_provided */
4552 0, /* properties_destroyed */
4553 0, /* todo_flags_start */
4554 TODO_dump_func | TODO_dump_cgraph /* todo_flags_finish */
4555 }
4556 };
4557
4558