tree-sra.c (decide_block_copy): Decide if there are groups.
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2003, 2004, 2005, 2006, 2007
5 Free Software Foundation, Inc.
6 Contributed by Diego Novillo <dnovillo@redhat.com>
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 3, or (at your option) any
13 later version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "ggc.h"
29 #include "tree.h"
30
31 /* These RTL headers are needed for basic-block.h. */
32 #include "rtl.h"
33 #include "tm_p.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
40 #include "tree-gimple.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "timevar.h"
44 #include "flags.h"
45 #include "bitmap.h"
46 #include "obstack.h"
47 #include "target.h"
48 /* expr.h is needed for MOVE_RATIO. */
49 #include "expr.h"
50 #include "params.h"
51
52
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
58
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
61
62 The optimization proceeds in phases:
63
64 (1) Identify variables that have types that are candidates for
65 decomposition.
66
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
71
72 (3) Based on the usage profile, instantiate substitution variables.
73
74 (4) Scan the function making replacements.
75 */
76
77
78 /* True if this is the "early" pass, before inlining. */
79 static bool early_sra;
80
81 /* The set of todo flags to return from tree_sra. */
82 static unsigned int todoflags;
83
84 /* The set of aggregate variables that are candidates for scalarization. */
85 static bitmap sra_candidates;
86
87 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
88 beginning of the function. */
89 static bitmap needs_copy_in;
90
91 /* Sets of bit pairs that cache type decomposition and instantiation. */
92 static bitmap sra_type_decomp_cache;
93 static bitmap sra_type_inst_cache;
94
95 /* One of these structures is created for each candidate aggregate and
96 each (accessed) member or group of members of such an aggregate. */
97 struct sra_elt
98 {
99 /* A tree of the elements. Used when we want to traverse everything. */
100 struct sra_elt *parent;
101 struct sra_elt *groups;
102 struct sra_elt *children;
103 struct sra_elt *sibling;
104
105 /* If this element is a root, then this is the VAR_DECL. If this is
106 a sub-element, this is some token used to identify the reference.
107 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
108 of an ARRAY_REF, this is the (constant) index. In the case of an
109 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
110 of a complex number, this is a zero or one. */
111 tree element;
112
113 /* The type of the element. */
114 tree type;
115
116 /* A VAR_DECL, for any sub-element we've decided to replace. */
117 tree replacement;
118
119 /* The number of times the element is referenced as a whole. I.e.
120 given "a.b.c", this would be incremented for C, but not for A or B. */
121 unsigned int n_uses;
122
123 /* The number of times the element is copied to or from another
124 scalarizable element. */
125 unsigned int n_copies;
126
127 /* True if TYPE is scalar. */
128 bool is_scalar;
129
130 /* True if this element is a group of members of its parent. */
131 bool is_group;
132
133 /* True if we saw something about this element that prevents scalarization,
134 such as non-constant indexing. */
135 bool cannot_scalarize;
136
137 /* True if we've decided that structure-to-structure assignment
138 should happen via memcpy and not per-element. */
139 bool use_block_copy;
140
141 /* True if everything under this element has been marked TREE_NO_WARNING. */
142 bool all_no_warning;
143
144 /* A flag for use with/after random access traversals. */
145 bool visited;
146
147 /* True if there is BIT_FIELD_REF on the lhs with a vector. */
148 bool is_vector_lhs;
149 };
150
151 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
152
153 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
154 for ((CHILD) = (ELT)->is_group \
155 ? next_child_for_group (NULL, (ELT)) \
156 : (ELT)->children; \
157 (CHILD); \
158 (CHILD) = (ELT)->is_group \
159 ? next_child_for_group ((CHILD), (ELT)) \
160 : (CHILD)->sibling)
161
162 /* Helper function for above macro. Return next child in group. */
163 static struct sra_elt *
164 next_child_for_group (struct sra_elt *child, struct sra_elt *group)
165 {
166 gcc_assert (group->is_group);
167
168 /* Find the next child in the parent. */
169 if (child)
170 child = child->sibling;
171 else
172 child = group->parent->children;
173
174 /* Skip siblings that do not belong to the group. */
175 while (child)
176 {
177 tree g_elt = group->element;
178 if (TREE_CODE (g_elt) == RANGE_EXPR)
179 {
180 if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
181 && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
182 break;
183 }
184 else
185 gcc_unreachable ();
186
187 child = child->sibling;
188 }
189
190 return child;
191 }
192
193 /* Random access to the child of a parent is performed by hashing.
194 This prevents quadratic behavior, and allows SRA to function
195 reasonably on larger records. */
196 static htab_t sra_map;
197
198 /* All structures are allocated out of the following obstack. */
199 static struct obstack sra_obstack;
200
201 /* Debugging functions. */
202 static void dump_sra_elt_name (FILE *, struct sra_elt *);
203 extern void debug_sra_elt_name (struct sra_elt *);
204
205 /* Forward declarations. */
206 static tree generate_element_ref (struct sra_elt *);
207 \f
208 /* Return true if DECL is an SRA candidate. */
209
210 static bool
211 is_sra_candidate_decl (tree decl)
212 {
213 return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
214 }
215
216 /* Return true if TYPE is a scalar type. */
217
218 static bool
219 is_sra_scalar_type (tree type)
220 {
221 enum tree_code code = TREE_CODE (type);
222 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
223 || code == FIXED_POINT_TYPE
224 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
225 || code == POINTER_TYPE || code == OFFSET_TYPE
226 || code == REFERENCE_TYPE);
227 }
228
229 /* Return true if TYPE can be decomposed into a set of independent variables.
230
231 Note that this doesn't imply that all elements of TYPE can be
232 instantiated, just that if we decide to break up the type into
233 separate pieces that it can be done. */
234
235 bool
236 sra_type_can_be_decomposed_p (tree type)
237 {
238 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
239 tree t;
240
241 /* Avoid searching the same type twice. */
242 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
243 return true;
244 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
245 return false;
246
247 /* The type must have a definite nonzero size. */
248 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
249 || integer_zerop (TYPE_SIZE (type)))
250 goto fail;
251
252 /* The type must be a non-union aggregate. */
253 switch (TREE_CODE (type))
254 {
255 case RECORD_TYPE:
256 {
257 bool saw_one_field = false;
258
259 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
260 if (TREE_CODE (t) == FIELD_DECL)
261 {
262 /* Reject incorrectly represented bit fields. */
263 if (DECL_BIT_FIELD (t)
264 && (tree_low_cst (DECL_SIZE (t), 1)
265 != TYPE_PRECISION (TREE_TYPE (t))))
266 goto fail;
267
268 saw_one_field = true;
269 }
270
271 /* Record types must have at least one field. */
272 if (!saw_one_field)
273 goto fail;
274 }
275 break;
276
277 case ARRAY_TYPE:
278 /* Array types must have a fixed lower and upper bound. */
279 t = TYPE_DOMAIN (type);
280 if (t == NULL)
281 goto fail;
282 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
283 goto fail;
284 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
285 goto fail;
286 break;
287
288 case COMPLEX_TYPE:
289 break;
290
291 default:
292 goto fail;
293 }
294
295 bitmap_set_bit (sra_type_decomp_cache, cache+0);
296 return true;
297
298 fail:
299 bitmap_set_bit (sra_type_decomp_cache, cache+1);
300 return false;
301 }
302
303 /* Return true if DECL can be decomposed into a set of independent
304 (though not necessarily scalar) variables. */
305
306 static bool
307 decl_can_be_decomposed_p (tree var)
308 {
309 /* Early out for scalars. */
310 if (is_sra_scalar_type (TREE_TYPE (var)))
311 return false;
312
313 /* The variable must not be aliased. */
314 if (!is_gimple_non_addressable (var))
315 {
316 if (dump_file && (dump_flags & TDF_DETAILS))
317 {
318 fprintf (dump_file, "Cannot scalarize variable ");
319 print_generic_expr (dump_file, var, dump_flags);
320 fprintf (dump_file, " because it must live in memory\n");
321 }
322 return false;
323 }
324
325 /* The variable must not be volatile. */
326 if (TREE_THIS_VOLATILE (var))
327 {
328 if (dump_file && (dump_flags & TDF_DETAILS))
329 {
330 fprintf (dump_file, "Cannot scalarize variable ");
331 print_generic_expr (dump_file, var, dump_flags);
332 fprintf (dump_file, " because it is declared volatile\n");
333 }
334 return false;
335 }
336
337 /* We must be able to decompose the variable's type. */
338 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
339 {
340 if (dump_file && (dump_flags & TDF_DETAILS))
341 {
342 fprintf (dump_file, "Cannot scalarize variable ");
343 print_generic_expr (dump_file, var, dump_flags);
344 fprintf (dump_file, " because its type cannot be decomposed\n");
345 }
346 return false;
347 }
348
349 /* HACK: if we decompose a va_list_type_node before inlining, then we'll
350 confuse tree-stdarg.c, and we won't be able to figure out which and
351 how many arguments are accessed. This really should be improved in
352 tree-stdarg.c, as the decomposition is truely a win. This could also
353 be fixed if the stdarg pass ran early, but this can't be done until
354 we've aliasing information early too. See PR 30791. */
355 if (early_sra
356 && TYPE_MAIN_VARIANT (TREE_TYPE (var))
357 == TYPE_MAIN_VARIANT (va_list_type_node))
358 return false;
359
360 return true;
361 }
362
363 /* Return true if TYPE can be *completely* decomposed into scalars. */
364
365 static bool
366 type_can_instantiate_all_elements (tree type)
367 {
368 if (is_sra_scalar_type (type))
369 return true;
370 if (!sra_type_can_be_decomposed_p (type))
371 return false;
372
373 switch (TREE_CODE (type))
374 {
375 case RECORD_TYPE:
376 {
377 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
378 tree f;
379
380 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
381 return true;
382 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
383 return false;
384
385 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
386 if (TREE_CODE (f) == FIELD_DECL)
387 {
388 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
389 {
390 bitmap_set_bit (sra_type_inst_cache, cache+1);
391 return false;
392 }
393 }
394
395 bitmap_set_bit (sra_type_inst_cache, cache+0);
396 return true;
397 }
398
399 case ARRAY_TYPE:
400 return type_can_instantiate_all_elements (TREE_TYPE (type));
401
402 case COMPLEX_TYPE:
403 return true;
404
405 default:
406 gcc_unreachable ();
407 }
408 }
409
410 /* Test whether ELT or some sub-element cannot be scalarized. */
411
412 static bool
413 can_completely_scalarize_p (struct sra_elt *elt)
414 {
415 struct sra_elt *c;
416
417 if (elt->cannot_scalarize)
418 return false;
419
420 for (c = elt->children; c; c = c->sibling)
421 if (!can_completely_scalarize_p (c))
422 return false;
423
424 for (c = elt->groups; c; c = c->sibling)
425 if (!can_completely_scalarize_p (c))
426 return false;
427
428 return true;
429 }
430
431 \f
432 /* A simplified tree hashing algorithm that only handles the types of
433 trees we expect to find in sra_elt->element. */
434
435 static hashval_t
436 sra_hash_tree (tree t)
437 {
438 hashval_t h;
439
440 switch (TREE_CODE (t))
441 {
442 case VAR_DECL:
443 case PARM_DECL:
444 case RESULT_DECL:
445 h = DECL_UID (t);
446 break;
447
448 case INTEGER_CST:
449 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
450 break;
451
452 case RANGE_EXPR:
453 h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
454 h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
455 break;
456
457 case FIELD_DECL:
458 /* We can have types that are compatible, but have different member
459 lists, so we can't hash fields by ID. Use offsets instead. */
460 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
461 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
462 break;
463
464 default:
465 gcc_unreachable ();
466 }
467
468 return h;
469 }
470
471 /* Hash function for type SRA_PAIR. */
472
473 static hashval_t
474 sra_elt_hash (const void *x)
475 {
476 const struct sra_elt *e = x;
477 const struct sra_elt *p;
478 hashval_t h;
479
480 h = sra_hash_tree (e->element);
481
482 /* Take into account everything back up the chain. Given that chain
483 lengths are rarely very long, this should be acceptable. If we
484 truly identify this as a performance problem, it should work to
485 hash the pointer value "e->parent". */
486 for (p = e->parent; p ; p = p->parent)
487 h = (h * 65521) ^ sra_hash_tree (p->element);
488
489 return h;
490 }
491
492 /* Equality function for type SRA_PAIR. */
493
494 static int
495 sra_elt_eq (const void *x, const void *y)
496 {
497 const struct sra_elt *a = x;
498 const struct sra_elt *b = y;
499 tree ae, be;
500
501 if (a->parent != b->parent)
502 return false;
503
504 ae = a->element;
505 be = b->element;
506
507 if (ae == be)
508 return true;
509 if (TREE_CODE (ae) != TREE_CODE (be))
510 return false;
511
512 switch (TREE_CODE (ae))
513 {
514 case VAR_DECL:
515 case PARM_DECL:
516 case RESULT_DECL:
517 /* These are all pointer unique. */
518 return false;
519
520 case INTEGER_CST:
521 /* Integers are not pointer unique, so compare their values. */
522 return tree_int_cst_equal (ae, be);
523
524 case RANGE_EXPR:
525 return
526 tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
527 && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
528
529 case FIELD_DECL:
530 /* Fields are unique within a record, but not between
531 compatible records. */
532 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
533 return false;
534 return fields_compatible_p (ae, be);
535
536 default:
537 gcc_unreachable ();
538 }
539 }
540
541 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
542 may be null, in which case CHILD must be a DECL. */
543
544 static struct sra_elt *
545 lookup_element (struct sra_elt *parent, tree child, tree type,
546 enum insert_option insert)
547 {
548 struct sra_elt dummy;
549 struct sra_elt **slot;
550 struct sra_elt *elt;
551
552 if (parent)
553 dummy.parent = parent->is_group ? parent->parent : parent;
554 else
555 dummy.parent = NULL;
556 dummy.element = child;
557
558 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
559 if (!slot && insert == NO_INSERT)
560 return NULL;
561
562 elt = *slot;
563 if (!elt && insert == INSERT)
564 {
565 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
566 memset (elt, 0, sizeof (*elt));
567
568 elt->parent = parent;
569 elt->element = child;
570 elt->type = type;
571 elt->is_scalar = is_sra_scalar_type (type);
572
573 if (parent)
574 {
575 if (IS_ELEMENT_FOR_GROUP (elt->element))
576 {
577 elt->is_group = true;
578 elt->sibling = parent->groups;
579 parent->groups = elt;
580 }
581 else
582 {
583 elt->sibling = parent->children;
584 parent->children = elt;
585 }
586 }
587
588 /* If this is a parameter, then if we want to scalarize, we have
589 one copy from the true function parameter. Count it now. */
590 if (TREE_CODE (child) == PARM_DECL)
591 {
592 elt->n_copies = 1;
593 bitmap_set_bit (needs_copy_in, DECL_UID (child));
594 }
595 }
596
597 return elt;
598 }
599
600 /* Create or return the SRA_ELT structure for EXPR if the expression
601 refers to a scalarizable variable. */
602
603 static struct sra_elt *
604 maybe_lookup_element_for_expr (tree expr)
605 {
606 struct sra_elt *elt;
607 tree child;
608
609 switch (TREE_CODE (expr))
610 {
611 case VAR_DECL:
612 case PARM_DECL:
613 case RESULT_DECL:
614 if (is_sra_candidate_decl (expr))
615 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
616 return NULL;
617
618 case ARRAY_REF:
619 /* We can't scalarize variable array indices. */
620 if (in_array_bounds_p (expr))
621 child = TREE_OPERAND (expr, 1);
622 else
623 return NULL;
624 break;
625
626 case ARRAY_RANGE_REF:
627 /* We can't scalarize variable array indices. */
628 if (range_in_array_bounds_p (expr))
629 {
630 tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
631 child = build2 (RANGE_EXPR, integer_type_node,
632 TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
633 }
634 else
635 return NULL;
636 break;
637
638 case COMPONENT_REF:
639 {
640 tree type = TREE_TYPE (TREE_OPERAND (expr, 0));
641 /* Don't look through unions. */
642 if (TREE_CODE (type) != RECORD_TYPE)
643 return NULL;
644 /* Neither through variable-sized records. */
645 if (TYPE_SIZE (type) == NULL_TREE
646 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
647 return NULL;
648 child = TREE_OPERAND (expr, 1);
649 }
650 break;
651
652 case REALPART_EXPR:
653 child = integer_zero_node;
654 break;
655 case IMAGPART_EXPR:
656 child = integer_one_node;
657 break;
658
659 default:
660 return NULL;
661 }
662
663 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
664 if (elt)
665 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
666 return NULL;
667 }
668
669 \f
670 /* Functions to walk just enough of the tree to see all scalarizable
671 references, and categorize them. */
672
673 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
674 various kinds of references seen. In all cases, *BSI is an iterator
675 pointing to the statement being processed. */
676 struct sra_walk_fns
677 {
678 /* Invoked when ELT is required as a unit. Note that ELT might refer to
679 a leaf node, in which case this is a simple scalar reference. *EXPR_P
680 points to the location of the expression. IS_OUTPUT is true if this
681 is a left-hand-side reference. USE_ALL is true if we saw something we
682 couldn't quite identify and had to force the use of the entire object. */
683 void (*use) (struct sra_elt *elt, tree *expr_p,
684 block_stmt_iterator *bsi, bool is_output, bool use_all);
685
686 /* Invoked when we have a copy between two scalarizable references. */
687 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
688 block_stmt_iterator *bsi);
689
690 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
691 in which case it should be treated as an empty CONSTRUCTOR. */
692 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
693
694 /* Invoked when we have a copy between one scalarizable reference ELT
695 and one non-scalarizable reference OTHER without side-effects.
696 IS_OUTPUT is true if ELT is on the left-hand side. */
697 void (*ldst) (struct sra_elt *elt, tree other,
698 block_stmt_iterator *bsi, bool is_output);
699
700 /* True during phase 2, false during phase 4. */
701 /* ??? This is a hack. */
702 bool initial_scan;
703 };
704
705 #ifdef ENABLE_CHECKING
706 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
707
708 static tree
709 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
710 void *data ATTRIBUTE_UNUSED)
711 {
712 tree t = *tp;
713 enum tree_code code = TREE_CODE (t);
714
715 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
716 {
717 *walk_subtrees = 0;
718 if (is_sra_candidate_decl (t))
719 return t;
720 }
721 else if (TYPE_P (t))
722 *walk_subtrees = 0;
723
724 return NULL;
725 }
726 #endif
727
728 /* Walk most expressions looking for a scalarizable aggregate.
729 If we find one, invoke FNS->USE. */
730
731 static void
732 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
733 const struct sra_walk_fns *fns)
734 {
735 tree expr = *expr_p;
736 tree inner = expr;
737 bool disable_scalarization = false;
738 bool use_all_p = false;
739
740 /* We're looking to collect a reference expression between EXPR and INNER,
741 such that INNER is a scalarizable decl and all other nodes through EXPR
742 are references that we can scalarize. If we come across something that
743 we can't scalarize, we reset EXPR. This has the effect of making it
744 appear that we're referring to the larger expression as a whole. */
745
746 while (1)
747 switch (TREE_CODE (inner))
748 {
749 case VAR_DECL:
750 case PARM_DECL:
751 case RESULT_DECL:
752 /* If there is a scalarizable decl at the bottom, then process it. */
753 if (is_sra_candidate_decl (inner))
754 {
755 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
756 if (disable_scalarization)
757 elt->cannot_scalarize = true;
758 else
759 fns->use (elt, expr_p, bsi, is_output, use_all_p);
760 }
761 return;
762
763 case ARRAY_REF:
764 /* Non-constant index means any member may be accessed. Prevent the
765 expression from being scalarized. If we were to treat this as a
766 reference to the whole array, we can wind up with a single dynamic
767 index reference inside a loop being overridden by several constant
768 index references during loop setup. It's possible that this could
769 be avoided by using dynamic usage counts based on BB trip counts
770 (based on loop analysis or profiling), but that hardly seems worth
771 the effort. */
772 /* ??? Hack. Figure out how to push this into the scan routines
773 without duplicating too much code. */
774 if (!in_array_bounds_p (inner))
775 {
776 disable_scalarization = true;
777 goto use_all;
778 }
779 /* ??? Are we assured that non-constant bounds and stride will have
780 the same value everywhere? I don't think Fortran will... */
781 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
782 goto use_all;
783 inner = TREE_OPERAND (inner, 0);
784 break;
785
786 case ARRAY_RANGE_REF:
787 if (!range_in_array_bounds_p (inner))
788 {
789 disable_scalarization = true;
790 goto use_all;
791 }
792 /* ??? See above non-constant bounds and stride . */
793 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
794 goto use_all;
795 inner = TREE_OPERAND (inner, 0);
796 break;
797
798 case COMPONENT_REF:
799 {
800 tree type = TREE_TYPE (TREE_OPERAND (inner, 0));
801 /* Don't look through unions. */
802 if (TREE_CODE (type) != RECORD_TYPE)
803 goto use_all;
804 /* Neither through variable-sized records. */
805 if (TYPE_SIZE (type) == NULL_TREE
806 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
807 goto use_all;
808 inner = TREE_OPERAND (inner, 0);
809 }
810 break;
811
812 case REALPART_EXPR:
813 case IMAGPART_EXPR:
814 inner = TREE_OPERAND (inner, 0);
815 break;
816
817 case BIT_FIELD_REF:
818 /* A bit field reference to a specific vector is scalarized but for
819 ones for inputs need to be marked as used on the left hand size so
820 when we scalarize it, we can mark that variable as non renamable. */
821 if (is_output
822 && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
823 {
824 struct sra_elt *elt
825 = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
826 if (elt)
827 elt->is_vector_lhs = true;
828 }
829 /* A bit field reference (access to *multiple* fields simultaneously)
830 is not currently scalarized. Consider this an access to the
831 complete outer element, to which walk_tree will bring us next. */
832
833 goto use_all;
834
835 case VIEW_CONVERT_EXPR:
836 case NOP_EXPR:
837 /* Similarly, a view/nop explicitly wants to look at an object in a
838 type other than the one we've scalarized. */
839 goto use_all;
840
841 case WITH_SIZE_EXPR:
842 /* This is a transparent wrapper. The entire inner expression really
843 is being used. */
844 goto use_all;
845
846 use_all:
847 expr_p = &TREE_OPERAND (inner, 0);
848 inner = expr = *expr_p;
849 use_all_p = true;
850 break;
851
852 default:
853 #ifdef ENABLE_CHECKING
854 /* Validate that we're not missing any references. */
855 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
856 #endif
857 return;
858 }
859 }
860
861 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
862 If we find one, invoke FNS->USE. */
863
864 static void
865 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
866 const struct sra_walk_fns *fns)
867 {
868 tree op;
869 for (op = list; op ; op = TREE_CHAIN (op))
870 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
871 }
872
873 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
874 If we find one, invoke FNS->USE. */
875
876 static void
877 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
878 const struct sra_walk_fns *fns)
879 {
880 int i;
881 int nargs = call_expr_nargs (expr);
882 for (i = 0; i < nargs; i++)
883 sra_walk_expr (&CALL_EXPR_ARG (expr, i), bsi, false, fns);
884 }
885
886 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
887 aggregates. If we find one, invoke FNS->USE. */
888
889 static void
890 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
891 const struct sra_walk_fns *fns)
892 {
893 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
894 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
895 }
896
897 /* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately. */
898
899 static void
900 sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
901 const struct sra_walk_fns *fns)
902 {
903 struct sra_elt *lhs_elt, *rhs_elt;
904 tree lhs, rhs;
905
906 lhs = GIMPLE_STMT_OPERAND (expr, 0);
907 rhs = GIMPLE_STMT_OPERAND (expr, 1);
908 lhs_elt = maybe_lookup_element_for_expr (lhs);
909 rhs_elt = maybe_lookup_element_for_expr (rhs);
910
911 /* If both sides are scalarizable, this is a COPY operation. */
912 if (lhs_elt && rhs_elt)
913 {
914 fns->copy (lhs_elt, rhs_elt, bsi);
915 return;
916 }
917
918 /* If the RHS is scalarizable, handle it. There are only two cases. */
919 if (rhs_elt)
920 {
921 if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
922 fns->ldst (rhs_elt, lhs, bsi, false);
923 else
924 fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false, false);
925 }
926
927 /* If it isn't scalarizable, there may be scalarizable variables within, so
928 check for a call or else walk the RHS to see if we need to do any
929 copy-in operations. We need to do it before the LHS is scalarized so
930 that the statements get inserted in the proper place, before any
931 copy-out operations. */
932 else
933 {
934 tree call = get_call_expr_in (rhs);
935 if (call)
936 sra_walk_call_expr (call, bsi, fns);
937 else
938 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 1), bsi, false, fns);
939 }
940
941 /* Likewise, handle the LHS being scalarizable. We have cases similar
942 to those above, but also want to handle RHS being constant. */
943 if (lhs_elt)
944 {
945 /* If this is an assignment from a constant, or constructor, then
946 we have access to all of the elements individually. Invoke INIT. */
947 if (TREE_CODE (rhs) == COMPLEX_EXPR
948 || TREE_CODE (rhs) == COMPLEX_CST
949 || TREE_CODE (rhs) == CONSTRUCTOR)
950 fns->init (lhs_elt, rhs, bsi);
951
952 /* If this is an assignment from read-only memory, treat this as if
953 we'd been passed the constructor directly. Invoke INIT. */
954 else if (TREE_CODE (rhs) == VAR_DECL
955 && TREE_STATIC (rhs)
956 && TREE_READONLY (rhs)
957 && targetm.binds_local_p (rhs))
958 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
959
960 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
961 The lvalue requirement prevents us from trying to directly scalarize
962 the result of a function call. Which would result in trying to call
963 the function multiple times, and other evil things. */
964 else if (!lhs_elt->is_scalar
965 && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
966 fns->ldst (lhs_elt, rhs, bsi, true);
967
968 /* Otherwise we're being used in some context that requires the
969 aggregate to be seen as a whole. Invoke USE. */
970 else
971 fns->use (lhs_elt, &GIMPLE_STMT_OPERAND (expr, 0), bsi, true, false);
972 }
973
974 /* Similarly to above, LHS_ELT being null only means that the LHS as a
975 whole is not a scalarizable reference. There may be occurrences of
976 scalarizable variables within, which implies a USE. */
977 else
978 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 0), bsi, true, fns);
979 }
980
981 /* Entry point to the walk functions. Search the entire function,
982 invoking the callbacks in FNS on each of the references to
983 scalarizable variables. */
984
985 static void
986 sra_walk_function (const struct sra_walk_fns *fns)
987 {
988 basic_block bb;
989 block_stmt_iterator si, ni;
990
991 /* ??? Phase 4 could derive some benefit to walking the function in
992 dominator tree order. */
993
994 FOR_EACH_BB (bb)
995 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
996 {
997 tree stmt, t;
998 stmt_ann_t ann;
999
1000 stmt = bsi_stmt (si);
1001 ann = stmt_ann (stmt);
1002
1003 ni = si;
1004 bsi_next (&ni);
1005
1006 /* If the statement has no virtual operands, then it doesn't
1007 make any structure references that we care about. */
1008 if (gimple_aliases_computed_p (cfun)
1009 && ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
1010 continue;
1011
1012 switch (TREE_CODE (stmt))
1013 {
1014 case RETURN_EXPR:
1015 /* If we have "return <retval>" then the return value is
1016 already exposed for our pleasure. Walk it as a USE to
1017 force all the components back in place for the return.
1018
1019 If we have an embedded assignment, then <retval> is of
1020 a type that gets returned in registers in this ABI, and
1021 we do not wish to extend their lifetimes. Treat this
1022 as a USE of the variable on the RHS of this assignment. */
1023
1024 t = TREE_OPERAND (stmt, 0);
1025 if (t == NULL_TREE)
1026 ;
1027 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
1028 sra_walk_expr (&GIMPLE_STMT_OPERAND (t, 1), &si, false, fns);
1029 else
1030 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
1031 break;
1032
1033 case GIMPLE_MODIFY_STMT:
1034 sra_walk_gimple_modify_stmt (stmt, &si, fns);
1035 break;
1036 case CALL_EXPR:
1037 sra_walk_call_expr (stmt, &si, fns);
1038 break;
1039 case ASM_EXPR:
1040 sra_walk_asm_expr (stmt, &si, fns);
1041 break;
1042
1043 default:
1044 break;
1045 }
1046 }
1047 }
1048 \f
1049 /* Phase One: Scan all referenced variables in the program looking for
1050 structures that could be decomposed. */
1051
1052 static bool
1053 find_candidates_for_sra (void)
1054 {
1055 bool any_set = false;
1056 tree var;
1057 referenced_var_iterator rvi;
1058
1059 FOR_EACH_REFERENCED_VAR (var, rvi)
1060 {
1061 if (decl_can_be_decomposed_p (var))
1062 {
1063 bitmap_set_bit (sra_candidates, DECL_UID (var));
1064 any_set = true;
1065 }
1066 }
1067
1068 return any_set;
1069 }
1070
1071 \f
1072 /* Phase Two: Scan all references to scalarizable variables. Count the
1073 number of times they are used or copied respectively. */
1074
1075 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1076 considered a copy, because we can decompose the reference such that
1077 the sub-elements needn't be contiguous. */
1078
1079 static void
1080 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
1081 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1082 bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
1083 {
1084 elt->n_uses += 1;
1085 }
1086
1087 static void
1088 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1089 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1090 {
1091 lhs_elt->n_copies += 1;
1092 rhs_elt->n_copies += 1;
1093 }
1094
1095 static void
1096 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
1097 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1098 {
1099 lhs_elt->n_copies += 1;
1100 }
1101
1102 static void
1103 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
1104 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1105 bool is_output ATTRIBUTE_UNUSED)
1106 {
1107 elt->n_copies += 1;
1108 }
1109
1110 /* Dump the values we collected during the scanning phase. */
1111
1112 static void
1113 scan_dump (struct sra_elt *elt)
1114 {
1115 struct sra_elt *c;
1116
1117 dump_sra_elt_name (dump_file, elt);
1118 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1119
1120 for (c = elt->children; c ; c = c->sibling)
1121 scan_dump (c);
1122
1123 for (c = elt->groups; c ; c = c->sibling)
1124 scan_dump (c);
1125 }
1126
1127 /* Entry point to phase 2. Scan the entire function, building up
1128 scalarization data structures, recording copies and uses. */
1129
1130 static void
1131 scan_function (void)
1132 {
1133 static const struct sra_walk_fns fns = {
1134 scan_use, scan_copy, scan_init, scan_ldst, true
1135 };
1136 bitmap_iterator bi;
1137
1138 sra_walk_function (&fns);
1139
1140 if (dump_file && (dump_flags & TDF_DETAILS))
1141 {
1142 unsigned i;
1143
1144 fputs ("\nScan results:\n", dump_file);
1145 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1146 {
1147 tree var = referenced_var (i);
1148 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1149 if (elt)
1150 scan_dump (elt);
1151 }
1152 fputc ('\n', dump_file);
1153 }
1154 }
1155 \f
1156 /* Phase Three: Make decisions about which variables to scalarize, if any.
1157 All elements to be scalarized have replacement variables made for them. */
1158
1159 /* A subroutine of build_element_name. Recursively build the element
1160 name on the obstack. */
1161
1162 static void
1163 build_element_name_1 (struct sra_elt *elt)
1164 {
1165 tree t;
1166 char buffer[32];
1167
1168 if (elt->parent)
1169 {
1170 build_element_name_1 (elt->parent);
1171 obstack_1grow (&sra_obstack, '$');
1172
1173 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1174 {
1175 if (elt->element == integer_zero_node)
1176 obstack_grow (&sra_obstack, "real", 4);
1177 else
1178 obstack_grow (&sra_obstack, "imag", 4);
1179 return;
1180 }
1181 }
1182
1183 t = elt->element;
1184 if (TREE_CODE (t) == INTEGER_CST)
1185 {
1186 /* ??? Eh. Don't bother doing double-wide printing. */
1187 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1188 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1189 }
1190 else
1191 {
1192 tree name = DECL_NAME (t);
1193 if (name)
1194 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1195 IDENTIFIER_LENGTH (name));
1196 else
1197 {
1198 sprintf (buffer, "D%u", DECL_UID (t));
1199 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1200 }
1201 }
1202 }
1203
1204 /* Construct a pretty variable name for an element's replacement variable.
1205 The name is built on the obstack. */
1206
1207 static char *
1208 build_element_name (struct sra_elt *elt)
1209 {
1210 build_element_name_1 (elt);
1211 obstack_1grow (&sra_obstack, '\0');
1212 return XOBFINISH (&sra_obstack, char *);
1213 }
1214
1215 /* Instantiate an element as an independent variable. */
1216
1217 static void
1218 instantiate_element (struct sra_elt *elt)
1219 {
1220 struct sra_elt *base_elt;
1221 tree var, base;
1222
1223 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1224 continue;
1225 base = base_elt->element;
1226
1227 elt->replacement = var = make_rename_temp (elt->type, "SR");
1228
1229 /* For vectors, if used on the left hand side with BIT_FIELD_REF,
1230 they are not a gimple register. */
1231 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE && elt->is_vector_lhs)
1232 DECL_GIMPLE_REG_P (var) = 0;
1233
1234 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1235 DECL_ARTIFICIAL (var) = 1;
1236
1237 if (TREE_THIS_VOLATILE (elt->type))
1238 {
1239 TREE_THIS_VOLATILE (var) = 1;
1240 TREE_SIDE_EFFECTS (var) = 1;
1241 }
1242
1243 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1244 {
1245 char *pretty_name = build_element_name (elt);
1246 DECL_NAME (var) = get_identifier (pretty_name);
1247 obstack_free (&sra_obstack, pretty_name);
1248
1249 SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1250 DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1251
1252 DECL_IGNORED_P (var) = 0;
1253 TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1254 if (elt->element && TREE_NO_WARNING (elt->element))
1255 TREE_NO_WARNING (var) = 1;
1256 }
1257 else
1258 {
1259 DECL_IGNORED_P (var) = 1;
1260 /* ??? We can't generate any warning that would be meaningful. */
1261 TREE_NO_WARNING (var) = 1;
1262 }
1263
1264 if (dump_file)
1265 {
1266 fputs (" ", dump_file);
1267 dump_sra_elt_name (dump_file, elt);
1268 fputs (" -> ", dump_file);
1269 print_generic_expr (dump_file, var, dump_flags);
1270 fputc ('\n', dump_file);
1271 }
1272 }
1273
1274 /* Make one pass across an element tree deciding whether or not it's
1275 profitable to instantiate individual leaf scalars.
1276
1277 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1278 fields all the way up the tree. */
1279
1280 static void
1281 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1282 unsigned int parent_copies)
1283 {
1284 if (dump_file && !elt->parent)
1285 {
1286 fputs ("Initial instantiation for ", dump_file);
1287 dump_sra_elt_name (dump_file, elt);
1288 fputc ('\n', dump_file);
1289 }
1290
1291 if (elt->cannot_scalarize)
1292 return;
1293
1294 if (elt->is_scalar)
1295 {
1296 /* The decision is simple: instantiate if we're used more frequently
1297 than the parent needs to be seen as a complete unit. */
1298 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1299 instantiate_element (elt);
1300 }
1301 else
1302 {
1303 struct sra_elt *c, *group;
1304 unsigned int this_uses = elt->n_uses + parent_uses;
1305 unsigned int this_copies = elt->n_copies + parent_copies;
1306
1307 /* Consider groups of sub-elements as weighing in favour of
1308 instantiation whatever their size. */
1309 for (group = elt->groups; group ; group = group->sibling)
1310 FOR_EACH_ACTUAL_CHILD (c, group)
1311 {
1312 c->n_uses += group->n_uses;
1313 c->n_copies += group->n_copies;
1314 }
1315
1316 for (c = elt->children; c ; c = c->sibling)
1317 decide_instantiation_1 (c, this_uses, this_copies);
1318 }
1319 }
1320
1321 /* Compute the size and number of all instantiated elements below ELT.
1322 We will only care about this if the size of the complete structure
1323 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1324
1325 static unsigned int
1326 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1327 {
1328 if (elt->replacement)
1329 {
1330 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1331 return 1;
1332 }
1333 else
1334 {
1335 struct sra_elt *c;
1336 unsigned int count = 0;
1337
1338 for (c = elt->children; c ; c = c->sibling)
1339 count += sum_instantiated_sizes (c, sizep);
1340
1341 return count;
1342 }
1343 }
1344
1345 /* Instantiate fields in ELT->TYPE that are not currently present as
1346 children of ELT. */
1347
1348 static void instantiate_missing_elements (struct sra_elt *elt);
1349
1350 static void
1351 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1352 {
1353 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1354 if (sub->is_scalar)
1355 {
1356 if (sub->replacement == NULL)
1357 instantiate_element (sub);
1358 }
1359 else
1360 instantiate_missing_elements (sub);
1361 }
1362
1363 static void
1364 instantiate_missing_elements (struct sra_elt *elt)
1365 {
1366 tree type = elt->type;
1367
1368 switch (TREE_CODE (type))
1369 {
1370 case RECORD_TYPE:
1371 {
1372 tree f;
1373 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1374 if (TREE_CODE (f) == FIELD_DECL)
1375 {
1376 tree field_type = TREE_TYPE (f);
1377
1378 /* canonicalize_component_ref() unwidens some bit-field
1379 types (not marked as DECL_BIT_FIELD in C++), so we
1380 must do the same, lest we may introduce type
1381 mismatches. */
1382 if (INTEGRAL_TYPE_P (field_type)
1383 && DECL_MODE (f) != TYPE_MODE (field_type))
1384 field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
1385 field_type,
1386 elt->element,
1387 f, NULL_TREE),
1388 NULL_TREE));
1389
1390 instantiate_missing_elements_1 (elt, f, field_type);
1391 }
1392 break;
1393 }
1394
1395 case ARRAY_TYPE:
1396 {
1397 tree i, max, subtype;
1398
1399 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1400 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1401 subtype = TREE_TYPE (type);
1402
1403 while (1)
1404 {
1405 instantiate_missing_elements_1 (elt, i, subtype);
1406 if (tree_int_cst_equal (i, max))
1407 break;
1408 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1409 }
1410
1411 break;
1412 }
1413
1414 case COMPLEX_TYPE:
1415 type = TREE_TYPE (type);
1416 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1417 instantiate_missing_elements_1 (elt, integer_one_node, type);
1418 break;
1419
1420 default:
1421 gcc_unreachable ();
1422 }
1423 }
1424
1425 /* Return true if there is only one non aggregate field in the record, TYPE.
1426 Return false otherwise. */
1427
1428 static bool
1429 single_scalar_field_in_record_p (tree type)
1430 {
1431 int num_fields = 0;
1432 tree field;
1433 if (TREE_CODE (type) != RECORD_TYPE)
1434 return false;
1435
1436 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1437 if (TREE_CODE (field) == FIELD_DECL)
1438 {
1439 num_fields++;
1440
1441 if (num_fields == 2)
1442 return false;
1443
1444 if (AGGREGATE_TYPE_P (TREE_TYPE (field)))
1445 return false;
1446 }
1447
1448 return true;
1449 }
1450
1451 /* Make one pass across an element tree deciding whether to perform block
1452 or element copies. If we decide on element copies, instantiate all
1453 elements. Return true if there are any instantiated sub-elements. */
1454
1455 static bool
1456 decide_block_copy (struct sra_elt *elt)
1457 {
1458 struct sra_elt *c;
1459 bool any_inst;
1460
1461 /* We shouldn't be invoked on groups of sub-elements as they must
1462 behave like their parent as far as block copy is concerned. */
1463 gcc_assert (!elt->is_group);
1464
1465 /* If scalarization is disabled, respect it. */
1466 if (elt->cannot_scalarize)
1467 {
1468 elt->use_block_copy = 1;
1469
1470 if (dump_file)
1471 {
1472 fputs ("Scalarization disabled for ", dump_file);
1473 dump_sra_elt_name (dump_file, elt);
1474 fputc ('\n', dump_file);
1475 }
1476
1477 /* Disable scalarization of sub-elements */
1478 for (c = elt->children; c; c = c->sibling)
1479 {
1480 c->cannot_scalarize = 1;
1481 decide_block_copy (c);
1482 }
1483
1484 /* Groups behave like their parent. */
1485 for (c = elt->groups; c; c = c->sibling)
1486 {
1487 c->cannot_scalarize = 1;
1488 c->use_block_copy = 1;
1489 }
1490
1491 return false;
1492 }
1493
1494 /* Don't decide if we've no uses and no groups. */
1495 if (elt->n_uses == 0 && elt->n_copies == 0 && elt->groups == NULL)
1496 ;
1497
1498 else if (!elt->is_scalar)
1499 {
1500 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1501 bool use_block_copy = true;
1502
1503 /* Tradeoffs for COMPLEX types pretty much always make it better
1504 to go ahead and split the components. */
1505 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1506 use_block_copy = false;
1507
1508 /* Don't bother trying to figure out the rest if the structure is
1509 so large we can't do easy arithmetic. This also forces block
1510 copies for variable sized structures. */
1511 else if (host_integerp (size_tree, 1))
1512 {
1513 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1514 unsigned int max_size, max_count, inst_count, full_count;
1515
1516 /* If the sra-max-structure-size parameter is 0, then the
1517 user has not overridden the parameter and we can choose a
1518 sensible default. */
1519 max_size = SRA_MAX_STRUCTURE_SIZE
1520 ? SRA_MAX_STRUCTURE_SIZE
1521 : MOVE_RATIO * UNITS_PER_WORD;
1522 max_count = SRA_MAX_STRUCTURE_COUNT
1523 ? SRA_MAX_STRUCTURE_COUNT
1524 : MOVE_RATIO;
1525
1526 full_size = tree_low_cst (size_tree, 1);
1527 full_count = count_type_elements (elt->type, false);
1528 inst_count = sum_instantiated_sizes (elt, &inst_size);
1529
1530 /* If there is only one scalar field in the record, don't block copy. */
1531 if (single_scalar_field_in_record_p (elt->type))
1532 use_block_copy = false;
1533
1534 /* ??? What to do here. If there are two fields, and we've only
1535 instantiated one, then instantiating the other is clearly a win.
1536 If there are a large number of fields then the size of the copy
1537 is much more of a factor. */
1538
1539 /* If the structure is small, and we've made copies, go ahead
1540 and instantiate, hoping that the copies will go away. */
1541 if (full_size <= max_size
1542 && (full_count - inst_count) <= max_count
1543 && elt->n_copies > elt->n_uses)
1544 use_block_copy = false;
1545 else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
1546 && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1547 use_block_copy = false;
1548
1549 /* In order to avoid block copy, we have to be able to instantiate
1550 all elements of the type. See if this is possible. */
1551 if (!use_block_copy
1552 && (!can_completely_scalarize_p (elt)
1553 || !type_can_instantiate_all_elements (elt->type)))
1554 use_block_copy = true;
1555 }
1556
1557 elt->use_block_copy = use_block_copy;
1558
1559 /* Groups behave like their parent. */
1560 for (c = elt->groups; c; c = c->sibling)
1561 c->use_block_copy = use_block_copy;
1562
1563 if (dump_file)
1564 {
1565 fprintf (dump_file, "Using %s for ",
1566 use_block_copy ? "block-copy" : "element-copy");
1567 dump_sra_elt_name (dump_file, elt);
1568 fputc ('\n', dump_file);
1569 }
1570
1571 if (!use_block_copy)
1572 {
1573 instantiate_missing_elements (elt);
1574 return true;
1575 }
1576 }
1577
1578 any_inst = elt->replacement != NULL;
1579
1580 for (c = elt->children; c ; c = c->sibling)
1581 any_inst |= decide_block_copy (c);
1582
1583 return any_inst;
1584 }
1585
1586 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1587
1588 static void
1589 decide_instantiations (void)
1590 {
1591 unsigned int i;
1592 bool cleared_any;
1593 bitmap_head done_head;
1594 bitmap_iterator bi;
1595
1596 /* We cannot clear bits from a bitmap we're iterating over,
1597 so save up all the bits to clear until the end. */
1598 bitmap_initialize (&done_head, &bitmap_default_obstack);
1599 cleared_any = false;
1600
1601 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1602 {
1603 tree var = referenced_var (i);
1604 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1605 if (elt)
1606 {
1607 decide_instantiation_1 (elt, 0, 0);
1608 if (!decide_block_copy (elt))
1609 elt = NULL;
1610 }
1611 if (!elt)
1612 {
1613 bitmap_set_bit (&done_head, i);
1614 cleared_any = true;
1615 }
1616 }
1617
1618 if (cleared_any)
1619 {
1620 bitmap_and_compl_into (sra_candidates, &done_head);
1621 bitmap_and_compl_into (needs_copy_in, &done_head);
1622 }
1623 bitmap_clear (&done_head);
1624
1625 mark_set_for_renaming (sra_candidates);
1626
1627 if (dump_file)
1628 fputc ('\n', dump_file);
1629 }
1630
1631 \f
1632 /* Phase Four: Update the function to match the replacements created. */
1633
1634 /* Mark all the variables in VDEF/VUSE operators for STMT for
1635 renaming. This becomes necessary when we modify all of a
1636 non-scalar. */
1637
1638 static void
1639 mark_all_v_defs_1 (tree stmt)
1640 {
1641 tree sym;
1642 ssa_op_iter iter;
1643
1644 update_stmt_if_modified (stmt);
1645
1646 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
1647 {
1648 if (TREE_CODE (sym) == SSA_NAME)
1649 sym = SSA_NAME_VAR (sym);
1650 mark_sym_for_renaming (sym);
1651 }
1652 }
1653
1654
1655 /* Mark all the variables in virtual operands in all the statements in
1656 LIST for renaming. */
1657
1658 static void
1659 mark_all_v_defs (tree list)
1660 {
1661 if (TREE_CODE (list) != STATEMENT_LIST)
1662 mark_all_v_defs_1 (list);
1663 else
1664 {
1665 tree_stmt_iterator i;
1666 for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
1667 mark_all_v_defs_1 (tsi_stmt (i));
1668 }
1669 }
1670
1671
1672 /* Mark every replacement under ELT with TREE_NO_WARNING. */
1673
1674 static void
1675 mark_no_warning (struct sra_elt *elt)
1676 {
1677 if (!elt->all_no_warning)
1678 {
1679 if (elt->replacement)
1680 TREE_NO_WARNING (elt->replacement) = 1;
1681 else
1682 {
1683 struct sra_elt *c;
1684 FOR_EACH_ACTUAL_CHILD (c, elt)
1685 mark_no_warning (c);
1686 }
1687 elt->all_no_warning = true;
1688 }
1689 }
1690
1691 /* Build a single level component reference to ELT rooted at BASE. */
1692
1693 static tree
1694 generate_one_element_ref (struct sra_elt *elt, tree base)
1695 {
1696 switch (TREE_CODE (TREE_TYPE (base)))
1697 {
1698 case RECORD_TYPE:
1699 {
1700 tree field = elt->element;
1701
1702 /* Watch out for compatible records with differing field lists. */
1703 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
1704 field = find_compatible_field (TREE_TYPE (base), field);
1705
1706 return build3 (COMPONENT_REF, elt->type, base, field, NULL);
1707 }
1708
1709 case ARRAY_TYPE:
1710 if (TREE_CODE (elt->element) == RANGE_EXPR)
1711 return build4 (ARRAY_RANGE_REF, elt->type, base,
1712 TREE_OPERAND (elt->element, 0), NULL, NULL);
1713 else
1714 return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1715
1716 case COMPLEX_TYPE:
1717 if (elt->element == integer_zero_node)
1718 return build1 (REALPART_EXPR, elt->type, base);
1719 else
1720 return build1 (IMAGPART_EXPR, elt->type, base);
1721
1722 default:
1723 gcc_unreachable ();
1724 }
1725 }
1726
1727 /* Build a full component reference to ELT rooted at its native variable. */
1728
1729 static tree
1730 generate_element_ref (struct sra_elt *elt)
1731 {
1732 if (elt->parent)
1733 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1734 else
1735 return elt->element;
1736 }
1737
1738 /* Create an assignment statement from SRC to DST. */
1739
1740 static tree
1741 sra_build_assignment (tree dst, tree src)
1742 {
1743 /* It was hoped that we could perform some type sanity checking
1744 here, but since front-ends can emit accesses of fields in types
1745 different from their nominal types and copy structures containing
1746 them as a whole, we'd have to handle such differences here.
1747 Since such accesses under different types require compatibility
1748 anyway, there's little point in making tests and/or adding
1749 conversions to ensure the types of src and dst are the same.
1750 So we just assume type differences at this point are ok. */
1751 return build_gimple_modify_stmt (dst, src);
1752 }
1753
1754 /* Generate a set of assignment statements in *LIST_P to copy all
1755 instantiated elements under ELT to or from the equivalent structure
1756 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1757 true meaning to copy out of EXPR into ELT. */
1758
1759 static void
1760 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1761 tree *list_p)
1762 {
1763 struct sra_elt *c;
1764 tree t;
1765
1766 if (!copy_out && TREE_CODE (expr) == SSA_NAME
1767 && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
1768 {
1769 tree r, i;
1770
1771 c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
1772 r = c->replacement;
1773 c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
1774 i = c->replacement;
1775
1776 t = build2 (COMPLEX_EXPR, elt->type, r, i);
1777 t = sra_build_assignment (expr, t);
1778 SSA_NAME_DEF_STMT (expr) = t;
1779 append_to_statement_list (t, list_p);
1780 }
1781 else if (elt->replacement)
1782 {
1783 if (copy_out)
1784 t = sra_build_assignment (elt->replacement, expr);
1785 else
1786 t = sra_build_assignment (expr, elt->replacement);
1787 append_to_statement_list (t, list_p);
1788 }
1789 else
1790 {
1791 FOR_EACH_ACTUAL_CHILD (c, elt)
1792 {
1793 t = generate_one_element_ref (c, unshare_expr (expr));
1794 generate_copy_inout (c, copy_out, t, list_p);
1795 }
1796 }
1797 }
1798
1799 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1800 elements under SRC to their counterparts under DST. There must be a 1-1
1801 correspondence of instantiated elements. */
1802
1803 static void
1804 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1805 {
1806 struct sra_elt *dc, *sc;
1807
1808 FOR_EACH_ACTUAL_CHILD (dc, dst)
1809 {
1810 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1811 gcc_assert (sc);
1812 generate_element_copy (dc, sc, list_p);
1813 }
1814
1815 if (dst->replacement)
1816 {
1817 tree t;
1818
1819 gcc_assert (src->replacement);
1820
1821 t = sra_build_assignment (dst->replacement, src->replacement);
1822 append_to_statement_list (t, list_p);
1823 }
1824 }
1825
1826 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1827 elements under ELT. In addition, do not assign to elements that have been
1828 marked VISITED but do reset the visited flag; this allows easy coordination
1829 with generate_element_init. */
1830
1831 static void
1832 generate_element_zero (struct sra_elt *elt, tree *list_p)
1833 {
1834 struct sra_elt *c;
1835
1836 if (elt->visited)
1837 {
1838 elt->visited = false;
1839 return;
1840 }
1841
1842 FOR_EACH_ACTUAL_CHILD (c, elt)
1843 generate_element_zero (c, list_p);
1844
1845 if (elt->replacement)
1846 {
1847 tree t;
1848
1849 gcc_assert (elt->is_scalar);
1850 t = fold_convert (elt->type, integer_zero_node);
1851
1852 t = sra_build_assignment (elt->replacement, t);
1853 append_to_statement_list (t, list_p);
1854 }
1855 }
1856
1857 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
1858 Add the result to *LIST_P. */
1859
1860 static void
1861 generate_one_element_init (tree var, tree init, tree *list_p)
1862 {
1863 /* The replacement can be almost arbitrarily complex. Gimplify. */
1864 tree stmt = sra_build_assignment (var, init);
1865 gimplify_and_add (stmt, list_p);
1866 }
1867
1868 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1869 elements under ELT with the contents of the initializer INIT. In addition,
1870 mark all assigned elements VISITED; this allows easy coordination with
1871 generate_element_zero. Return false if we found a case we couldn't
1872 handle. */
1873
1874 static bool
1875 generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
1876 {
1877 bool result = true;
1878 enum tree_code init_code;
1879 struct sra_elt *sub;
1880 tree t;
1881 unsigned HOST_WIDE_INT idx;
1882 tree value, purpose;
1883
1884 /* We can be passed DECL_INITIAL of a static variable. It might have a
1885 conversion, which we strip off here. */
1886 STRIP_USELESS_TYPE_CONVERSION (init);
1887 init_code = TREE_CODE (init);
1888
1889 if (elt->is_scalar)
1890 {
1891 if (elt->replacement)
1892 {
1893 generate_one_element_init (elt->replacement, init, list_p);
1894 elt->visited = true;
1895 }
1896 return result;
1897 }
1898
1899 switch (init_code)
1900 {
1901 case COMPLEX_CST:
1902 case COMPLEX_EXPR:
1903 FOR_EACH_ACTUAL_CHILD (sub, elt)
1904 {
1905 if (sub->element == integer_zero_node)
1906 t = (init_code == COMPLEX_EXPR
1907 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1908 else
1909 t = (init_code == COMPLEX_EXPR
1910 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1911 result &= generate_element_init_1 (sub, t, list_p);
1912 }
1913 break;
1914
1915 case CONSTRUCTOR:
1916 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
1917 {
1918 if (TREE_CODE (purpose) == RANGE_EXPR)
1919 {
1920 tree lower = TREE_OPERAND (purpose, 0);
1921 tree upper = TREE_OPERAND (purpose, 1);
1922
1923 while (1)
1924 {
1925 sub = lookup_element (elt, lower, NULL, NO_INSERT);
1926 if (sub != NULL)
1927 result &= generate_element_init_1 (sub, value, list_p);
1928 if (tree_int_cst_equal (lower, upper))
1929 break;
1930 lower = int_const_binop (PLUS_EXPR, lower,
1931 integer_one_node, true);
1932 }
1933 }
1934 else
1935 {
1936 sub = lookup_element (elt, purpose, NULL, NO_INSERT);
1937 if (sub != NULL)
1938 result &= generate_element_init_1 (sub, value, list_p);
1939 }
1940 }
1941 break;
1942
1943 default:
1944 elt->visited = true;
1945 result = false;
1946 }
1947
1948 return result;
1949 }
1950
1951 /* A wrapper function for generate_element_init_1 that handles cleanup after
1952 gimplification. */
1953
1954 static bool
1955 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1956 {
1957 bool ret;
1958
1959 push_gimplify_context ();
1960 ret = generate_element_init_1 (elt, init, list_p);
1961 pop_gimplify_context (NULL);
1962
1963 /* The replacement can expose previously unreferenced variables. */
1964 if (ret && *list_p)
1965 {
1966 tree_stmt_iterator i;
1967
1968 for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
1969 find_new_referenced_vars (tsi_stmt_ptr (i));
1970 }
1971
1972 return ret;
1973 }
1974
1975 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1976 has more than one edge, STMT will be replicated for each edge. Also,
1977 abnormal edges will be ignored. */
1978
1979 void
1980 insert_edge_copies (tree stmt, basic_block bb)
1981 {
1982 edge e;
1983 edge_iterator ei;
1984 bool first_copy;
1985
1986 first_copy = true;
1987 FOR_EACH_EDGE (e, ei, bb->succs)
1988 {
1989 /* We don't need to insert copies on abnormal edges. The
1990 value of the scalar replacement is not guaranteed to
1991 be valid through an abnormal edge. */
1992 if (!(e->flags & EDGE_ABNORMAL))
1993 {
1994 if (first_copy)
1995 {
1996 bsi_insert_on_edge (e, stmt);
1997 first_copy = false;
1998 }
1999 else
2000 bsi_insert_on_edge (e, unsave_expr_now (stmt));
2001 }
2002 }
2003 }
2004
2005 /* Helper function to insert LIST before BSI, and set up line number info. */
2006
2007 void
2008 sra_insert_before (block_stmt_iterator *bsi, tree list)
2009 {
2010 tree stmt = bsi_stmt (*bsi);
2011
2012 if (EXPR_HAS_LOCATION (stmt))
2013 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
2014 bsi_insert_before (bsi, list, BSI_SAME_STMT);
2015 }
2016
2017 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
2018
2019 void
2020 sra_insert_after (block_stmt_iterator *bsi, tree list)
2021 {
2022 tree stmt = bsi_stmt (*bsi);
2023
2024 if (EXPR_HAS_LOCATION (stmt))
2025 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
2026
2027 if (stmt_ends_bb_p (stmt))
2028 insert_edge_copies (list, bsi->bb);
2029 else
2030 bsi_insert_after (bsi, list, BSI_SAME_STMT);
2031 }
2032
2033 /* Similarly, but replace the statement at BSI. */
2034
2035 static void
2036 sra_replace (block_stmt_iterator *bsi, tree list)
2037 {
2038 sra_insert_before (bsi, list);
2039 bsi_remove (bsi, false);
2040 if (bsi_end_p (*bsi))
2041 *bsi = bsi_last (bsi->bb);
2042 else
2043 bsi_prev (bsi);
2044 }
2045
2046 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
2047 if elt is scalar, or some occurrence of ELT that requires a complete
2048 aggregate. IS_OUTPUT is true if ELT is being modified. */
2049
2050 static void
2051 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
2052 bool is_output, bool use_all)
2053 {
2054 tree list = NULL, stmt = bsi_stmt (*bsi);
2055
2056 if (elt->replacement)
2057 {
2058 /* If we have a replacement, then updating the reference is as
2059 simple as modifying the existing statement in place. */
2060 if (is_output)
2061 mark_all_v_defs (stmt);
2062 *expr_p = elt->replacement;
2063 update_stmt (stmt);
2064 }
2065 else
2066 {
2067 /* Otherwise we need some copies. If ELT is being read, then we want
2068 to store all (modified) sub-elements back into the structure before
2069 the reference takes place. If ELT is being written, then we want to
2070 load the changed values back into our shadow variables. */
2071 /* ??? We don't check modified for reads, we just always write all of
2072 the values. We should be able to record the SSA number of the VOP
2073 for which the values were last read. If that number matches the
2074 SSA number of the VOP in the current statement, then we needn't
2075 emit an assignment. This would also eliminate double writes when
2076 a structure is passed as more than one argument to a function call.
2077 This optimization would be most effective if sra_walk_function
2078 processed the blocks in dominator order. */
2079
2080 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
2081 if (list == NULL)
2082 return;
2083 mark_all_v_defs (list);
2084 if (is_output)
2085 sra_insert_after (bsi, list);
2086 else
2087 {
2088 sra_insert_before (bsi, list);
2089 if (use_all)
2090 mark_no_warning (elt);
2091 }
2092 }
2093 }
2094
2095 /* Scalarize a COPY. To recap, this is an assignment statement between
2096 two scalarizable references, LHS_ELT and RHS_ELT. */
2097
2098 static void
2099 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
2100 block_stmt_iterator *bsi)
2101 {
2102 tree list, stmt;
2103
2104 if (lhs_elt->replacement && rhs_elt->replacement)
2105 {
2106 /* If we have two scalar operands, modify the existing statement. */
2107 stmt = bsi_stmt (*bsi);
2108
2109 /* See the commentary in sra_walk_function concerning
2110 RETURN_EXPR, and why we should never see one here. */
2111 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
2112
2113 GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement;
2114 GIMPLE_STMT_OPERAND (stmt, 1) = rhs_elt->replacement;
2115 update_stmt (stmt);
2116 }
2117 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
2118 {
2119 /* If either side requires a block copy, then sync the RHS back
2120 to the original structure, leave the original assignment
2121 statement (which will perform the block copy), then load the
2122 LHS values out of its now-updated original structure. */
2123 /* ??? Could perform a modified pair-wise element copy. That
2124 would at least allow those elements that are instantiated in
2125 both structures to be optimized well. */
2126
2127 list = NULL;
2128 generate_copy_inout (rhs_elt, false,
2129 generate_element_ref (rhs_elt), &list);
2130 if (list)
2131 {
2132 mark_all_v_defs (list);
2133 sra_insert_before (bsi, list);
2134 }
2135
2136 list = NULL;
2137 generate_copy_inout (lhs_elt, true,
2138 generate_element_ref (lhs_elt), &list);
2139 if (list)
2140 {
2141 mark_all_v_defs (list);
2142 sra_insert_after (bsi, list);
2143 }
2144 }
2145 else
2146 {
2147 /* Otherwise both sides must be fully instantiated. In which
2148 case perform pair-wise element assignments and replace the
2149 original block copy statement. */
2150
2151 stmt = bsi_stmt (*bsi);
2152 mark_all_v_defs (stmt);
2153
2154 list = NULL;
2155 generate_element_copy (lhs_elt, rhs_elt, &list);
2156 gcc_assert (list);
2157 mark_all_v_defs (list);
2158 sra_replace (bsi, list);
2159 }
2160 }
2161
2162 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
2163 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
2164 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
2165 CONSTRUCTOR. */
2166
2167 static void
2168 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
2169 {
2170 bool result = true;
2171 tree list = NULL;
2172
2173 /* Generate initialization statements for all members extant in the RHS. */
2174 if (rhs)
2175 {
2176 /* Unshare the expression just in case this is from a decl's initial. */
2177 rhs = unshare_expr (rhs);
2178 result = generate_element_init (lhs_elt, rhs, &list);
2179 }
2180
2181 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
2182 a zero value. Initialize the rest of the instantiated elements. */
2183 generate_element_zero (lhs_elt, &list);
2184
2185 if (!result)
2186 {
2187 /* If we failed to convert the entire initializer, then we must
2188 leave the structure assignment in place and must load values
2189 from the structure into the slots for which we did not find
2190 constants. The easiest way to do this is to generate a complete
2191 copy-out, and then follow that with the constant assignments
2192 that we were able to build. DCE will clean things up. */
2193 tree list0 = NULL;
2194 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
2195 &list0);
2196 append_to_statement_list (list, &list0);
2197 list = list0;
2198 }
2199
2200 if (lhs_elt->use_block_copy || !result)
2201 {
2202 /* Since LHS is not fully instantiated, we must leave the structure
2203 assignment in place. Treating this case differently from a USE
2204 exposes constants to later optimizations. */
2205 if (list)
2206 {
2207 mark_all_v_defs (list);
2208 sra_insert_after (bsi, list);
2209 }
2210 }
2211 else
2212 {
2213 /* The LHS is fully instantiated. The list of initializations
2214 replaces the original structure assignment. */
2215 gcc_assert (list);
2216 mark_all_v_defs (bsi_stmt (*bsi));
2217 mark_all_v_defs (list);
2218 sra_replace (bsi, list);
2219 }
2220 }
2221
2222 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
2223 on all INDIRECT_REFs. */
2224
2225 static tree
2226 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2227 {
2228 tree t = *tp;
2229
2230 if (TREE_CODE (t) == INDIRECT_REF)
2231 {
2232 TREE_THIS_NOTRAP (t) = 1;
2233 *walk_subtrees = 0;
2234 }
2235 else if (IS_TYPE_OR_DECL_P (t))
2236 *walk_subtrees = 0;
2237
2238 return NULL;
2239 }
2240
2241 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
2242 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
2243 if ELT is on the left-hand side. */
2244
2245 static void
2246 scalarize_ldst (struct sra_elt *elt, tree other,
2247 block_stmt_iterator *bsi, bool is_output)
2248 {
2249 /* Shouldn't have gotten called for a scalar. */
2250 gcc_assert (!elt->replacement);
2251
2252 if (elt->use_block_copy)
2253 {
2254 /* Since ELT is not fully instantiated, we have to leave the
2255 block copy in place. Treat this as a USE. */
2256 scalarize_use (elt, NULL, bsi, is_output, false);
2257 }
2258 else
2259 {
2260 /* The interesting case is when ELT is fully instantiated. In this
2261 case we can have each element stored/loaded directly to/from the
2262 corresponding slot in OTHER. This avoids a block copy. */
2263
2264 tree list = NULL, stmt = bsi_stmt (*bsi);
2265
2266 mark_all_v_defs (stmt);
2267 generate_copy_inout (elt, is_output, other, &list);
2268 mark_all_v_defs (list);
2269 gcc_assert (list);
2270
2271 /* Preserve EH semantics. */
2272 if (stmt_ends_bb_p (stmt))
2273 {
2274 tree_stmt_iterator tsi;
2275 tree first;
2276
2277 /* Extract the first statement from LIST. */
2278 tsi = tsi_start (list);
2279 first = tsi_stmt (tsi);
2280 tsi_delink (&tsi);
2281
2282 /* Replace the old statement with this new representative. */
2283 bsi_replace (bsi, first, true);
2284
2285 if (!tsi_end_p (tsi))
2286 {
2287 /* If any reference would trap, then they all would. And more
2288 to the point, the first would. Therefore none of the rest
2289 will trap since the first didn't. Indicate this by
2290 iterating over the remaining statements and set
2291 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
2292 do
2293 {
2294 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
2295 tsi_next (&tsi);
2296 }
2297 while (!tsi_end_p (tsi));
2298
2299 insert_edge_copies (list, bsi->bb);
2300 }
2301 }
2302 else
2303 sra_replace (bsi, list);
2304 }
2305 }
2306
2307 /* Generate initializations for all scalarizable parameters. */
2308
2309 static void
2310 scalarize_parms (void)
2311 {
2312 tree list = NULL;
2313 unsigned i;
2314 bitmap_iterator bi;
2315
2316 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
2317 {
2318 tree var = referenced_var (i);
2319 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
2320 generate_copy_inout (elt, true, var, &list);
2321 }
2322
2323 if (list)
2324 {
2325 insert_edge_copies (list, ENTRY_BLOCK_PTR);
2326 mark_all_v_defs (list);
2327 }
2328 }
2329
2330 /* Entry point to phase 4. Update the function to match replacements. */
2331
2332 static void
2333 scalarize_function (void)
2334 {
2335 static const struct sra_walk_fns fns = {
2336 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
2337 };
2338
2339 sra_walk_function (&fns);
2340 scalarize_parms ();
2341 bsi_commit_edge_inserts ();
2342 }
2343
2344 \f
2345 /* Debug helper function. Print ELT in a nice human-readable format. */
2346
2347 static void
2348 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
2349 {
2350 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
2351 {
2352 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
2353 dump_sra_elt_name (f, elt->parent);
2354 }
2355 else
2356 {
2357 if (elt->parent)
2358 dump_sra_elt_name (f, elt->parent);
2359 if (DECL_P (elt->element))
2360 {
2361 if (TREE_CODE (elt->element) == FIELD_DECL)
2362 fputc ('.', f);
2363 print_generic_expr (f, elt->element, dump_flags);
2364 }
2365 else if (TREE_CODE (elt->element) == RANGE_EXPR)
2366 fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
2367 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
2368 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
2369 else
2370 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
2371 TREE_INT_CST_LOW (elt->element));
2372 }
2373 }
2374
2375 /* Likewise, but callable from the debugger. */
2376
2377 void
2378 debug_sra_elt_name (struct sra_elt *elt)
2379 {
2380 dump_sra_elt_name (stderr, elt);
2381 fputc ('\n', stderr);
2382 }
2383
2384 void
2385 sra_init_cache (void)
2386 {
2387 if (sra_type_decomp_cache)
2388 return;
2389
2390 sra_type_decomp_cache = BITMAP_ALLOC (NULL);
2391 sra_type_inst_cache = BITMAP_ALLOC (NULL);
2392 }
2393
2394 /* Main entry point. */
2395
2396 static unsigned int
2397 tree_sra (void)
2398 {
2399 /* Initialize local variables. */
2400 todoflags = 0;
2401 gcc_obstack_init (&sra_obstack);
2402 sra_candidates = BITMAP_ALLOC (NULL);
2403 needs_copy_in = BITMAP_ALLOC (NULL);
2404 sra_init_cache ();
2405 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
2406
2407 /* Scan. If we find anything, instantiate and scalarize. */
2408 if (find_candidates_for_sra ())
2409 {
2410 scan_function ();
2411 decide_instantiations ();
2412 scalarize_function ();
2413 if (!bitmap_empty_p (sra_candidates))
2414 todoflags |= TODO_rebuild_alias;
2415 }
2416
2417 /* Free allocated memory. */
2418 htab_delete (sra_map);
2419 sra_map = NULL;
2420 BITMAP_FREE (sra_candidates);
2421 BITMAP_FREE (needs_copy_in);
2422 BITMAP_FREE (sra_type_decomp_cache);
2423 BITMAP_FREE (sra_type_inst_cache);
2424 obstack_free (&sra_obstack, NULL);
2425 return todoflags;
2426 }
2427
2428 static unsigned int
2429 tree_sra_early (void)
2430 {
2431 unsigned int ret;
2432
2433 early_sra = true;
2434 ret = tree_sra ();
2435 early_sra = false;
2436
2437 return ret & ~TODO_rebuild_alias;
2438 }
2439
2440 static bool
2441 gate_sra (void)
2442 {
2443 return flag_tree_sra != 0;
2444 }
2445
2446 struct tree_opt_pass pass_sra_early =
2447 {
2448 "esra", /* name */
2449 gate_sra, /* gate */
2450 tree_sra_early, /* execute */
2451 NULL, /* sub */
2452 NULL, /* next */
2453 0, /* static_pass_number */
2454 TV_TREE_SRA, /* tv_id */
2455 PROP_cfg | PROP_ssa, /* properties_required */
2456 0, /* properties_provided */
2457 0, /* properties_destroyed */
2458 0, /* todo_flags_start */
2459 TODO_dump_func
2460 | TODO_update_ssa
2461 | TODO_ggc_collect
2462 | TODO_verify_ssa, /* todo_flags_finish */
2463 0 /* letter */
2464 };
2465
2466 struct tree_opt_pass pass_sra =
2467 {
2468 "sra", /* name */
2469 gate_sra, /* gate */
2470 tree_sra, /* execute */
2471 NULL, /* sub */
2472 NULL, /* next */
2473 0, /* static_pass_number */
2474 TV_TREE_SRA, /* tv_id */
2475 PROP_cfg | PROP_ssa, /* properties_required */
2476 0, /* properties_provided */
2477 0, /* properties_destroyed */
2478 0, /* todo_flags_start */
2479 TODO_dump_func
2480 | TODO_update_ssa
2481 | TODO_ggc_collect
2482 | TODO_verify_ssa, /* todo_flags_finish */
2483 0 /* letter */
2484 };