7db39c454f50a8e868f6dcc961ddd754fa373a34
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2003, 2004, 2005, 2006, 2007
5 Free Software Foundation, Inc.
6 Contributed by Diego Novillo <dnovillo@redhat.com>
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 2, or (at your option) any
13 later version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 02110-1301, USA. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "ggc.h"
30 #include "tree.h"
31
32 /* These RTL headers are needed for basic-block.h. */
33 #include "rtl.h"
34 #include "tm_p.h"
35 #include "hard-reg-set.h"
36 #include "basic-block.h"
37 #include "diagnostic.h"
38 #include "langhooks.h"
39 #include "tree-inline.h"
40 #include "tree-flow.h"
41 #include "tree-gimple.h"
42 #include "tree-dump.h"
43 #include "tree-pass.h"
44 #include "timevar.h"
45 #include "flags.h"
46 #include "bitmap.h"
47 #include "obstack.h"
48 #include "target.h"
49 /* expr.h is needed for MOVE_RATIO. */
50 #include "expr.h"
51 #include "params.h"
52
53
54 /* This object of this pass is to replace a non-addressable aggregate with a
55 set of independent variables. Most of the time, all of these variables
56 will be scalars. But a secondary objective is to break up larger
57 aggregates into smaller aggregates. In the process we may find that some
58 bits of the larger aggregate can be deleted as unreferenced.
59
60 This substitution is done globally. More localized substitutions would
61 be the purvey of a load-store motion pass.
62
63 The optimization proceeds in phases:
64
65 (1) Identify variables that have types that are candidates for
66 decomposition.
67
68 (2) Scan the function looking for the ways these variables are used.
69 In particular we're interested in the number of times a variable
70 (or member) is needed as a complete unit, and the number of times
71 a variable (or member) is copied.
72
73 (3) Based on the usage profile, instantiate substitution variables.
74
75 (4) Scan the function making replacements.
76 */
77
78
79 /* True if this is the "early" pass, before inlining. */
80 static bool early_sra;
81
82 /* The set of todo flags to return from tree_sra. */
83 static unsigned int todoflags;
84
85 /* The set of aggregate variables that are candidates for scalarization. */
86 static bitmap sra_candidates;
87
88 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
89 beginning of the function. */
90 static bitmap needs_copy_in;
91
92 /* Sets of bit pairs that cache type decomposition and instantiation. */
93 static bitmap sra_type_decomp_cache;
94 static bitmap sra_type_inst_cache;
95
96 /* One of these structures is created for each candidate aggregate and
97 each (accessed) member or group of members of such an aggregate. */
98 struct sra_elt
99 {
100 /* A tree of the elements. Used when we want to traverse everything. */
101 struct sra_elt *parent;
102 struct sra_elt *groups;
103 struct sra_elt *children;
104 struct sra_elt *sibling;
105
106 /* If this element is a root, then this is the VAR_DECL. If this is
107 a sub-element, this is some token used to identify the reference.
108 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
109 of an ARRAY_REF, this is the (constant) index. In the case of an
110 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
111 of a complex number, this is a zero or one. */
112 tree element;
113
114 /* The type of the element. */
115 tree type;
116
117 /* A VAR_DECL, for any sub-element we've decided to replace. */
118 tree replacement;
119
120 /* The number of times the element is referenced as a whole. I.e.
121 given "a.b.c", this would be incremented for C, but not for A or B. */
122 unsigned int n_uses;
123
124 /* The number of times the element is copied to or from another
125 scalarizable element. */
126 unsigned int n_copies;
127
128 /* True if TYPE is scalar. */
129 bool is_scalar;
130
131 /* True if this element is a group of members of its parent. */
132 bool is_group;
133
134 /* True if we saw something about this element that prevents scalarization,
135 such as non-constant indexing. */
136 bool cannot_scalarize;
137
138 /* True if we've decided that structure-to-structure assignment
139 should happen via memcpy and not per-element. */
140 bool use_block_copy;
141
142 /* True if everything under this element has been marked TREE_NO_WARNING. */
143 bool all_no_warning;
144
145 /* A flag for use with/after random access traversals. */
146 bool visited;
147
148 /* True if there is BIT_FIELD_REF on the lhs with a vector. */
149 bool is_vector_lhs;
150 };
151
152 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
153
154 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
155 for ((CHILD) = (ELT)->is_group \
156 ? next_child_for_group (NULL, (ELT)) \
157 : (ELT)->children; \
158 (CHILD); \
159 (CHILD) = (ELT)->is_group \
160 ? next_child_for_group ((CHILD), (ELT)) \
161 : (CHILD)->sibling)
162
163 /* Helper function for above macro. Return next child in group. */
164 static struct sra_elt *
165 next_child_for_group (struct sra_elt *child, struct sra_elt *group)
166 {
167 gcc_assert (group->is_group);
168
169 /* Find the next child in the parent. */
170 if (child)
171 child = child->sibling;
172 else
173 child = group->parent->children;
174
175 /* Skip siblings that do not belong to the group. */
176 while (child)
177 {
178 tree g_elt = group->element;
179 if (TREE_CODE (g_elt) == RANGE_EXPR)
180 {
181 if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
182 && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
183 break;
184 }
185 else
186 gcc_unreachable ();
187
188 child = child->sibling;
189 }
190
191 return child;
192 }
193
194 /* Random access to the child of a parent is performed by hashing.
195 This prevents quadratic behavior, and allows SRA to function
196 reasonably on larger records. */
197 static htab_t sra_map;
198
199 /* All structures are allocated out of the following obstack. */
200 static struct obstack sra_obstack;
201
202 /* Debugging functions. */
203 static void dump_sra_elt_name (FILE *, struct sra_elt *);
204 extern void debug_sra_elt_name (struct sra_elt *);
205
206 /* Forward declarations. */
207 static tree generate_element_ref (struct sra_elt *);
208 \f
209 /* Return true if DECL is an SRA candidate. */
210
211 static bool
212 is_sra_candidate_decl (tree decl)
213 {
214 return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
215 }
216
217 /* Return true if TYPE is a scalar type. */
218
219 static bool
220 is_sra_scalar_type (tree type)
221 {
222 enum tree_code code = TREE_CODE (type);
223 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
224 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
225 || code == POINTER_TYPE || code == OFFSET_TYPE
226 || code == REFERENCE_TYPE);
227 }
228
229 /* Return true if TYPE can be decomposed into a set of independent variables.
230
231 Note that this doesn't imply that all elements of TYPE can be
232 instantiated, just that if we decide to break up the type into
233 separate pieces that it can be done. */
234
235 bool
236 sra_type_can_be_decomposed_p (tree type)
237 {
238 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
239 tree t;
240
241 /* Avoid searching the same type twice. */
242 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
243 return true;
244 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
245 return false;
246
247 /* The type must have a definite nonzero size. */
248 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
249 || integer_zerop (TYPE_SIZE (type)))
250 goto fail;
251
252 /* The type must be a non-union aggregate. */
253 switch (TREE_CODE (type))
254 {
255 case RECORD_TYPE:
256 {
257 bool saw_one_field = false;
258
259 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
260 if (TREE_CODE (t) == FIELD_DECL)
261 {
262 /* Reject incorrectly represented bit fields. */
263 if (DECL_BIT_FIELD (t)
264 && (tree_low_cst (DECL_SIZE (t), 1)
265 != TYPE_PRECISION (TREE_TYPE (t))))
266 goto fail;
267
268 saw_one_field = true;
269 }
270
271 /* Record types must have at least one field. */
272 if (!saw_one_field)
273 goto fail;
274 }
275 break;
276
277 case ARRAY_TYPE:
278 /* Array types must have a fixed lower and upper bound. */
279 t = TYPE_DOMAIN (type);
280 if (t == NULL)
281 goto fail;
282 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
283 goto fail;
284 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
285 goto fail;
286 break;
287
288 case COMPLEX_TYPE:
289 break;
290
291 default:
292 goto fail;
293 }
294
295 bitmap_set_bit (sra_type_decomp_cache, cache+0);
296 return true;
297
298 fail:
299 bitmap_set_bit (sra_type_decomp_cache, cache+1);
300 return false;
301 }
302
303 /* Return true if DECL can be decomposed into a set of independent
304 (though not necessarily scalar) variables. */
305
306 static bool
307 decl_can_be_decomposed_p (tree var)
308 {
309 /* Early out for scalars. */
310 if (is_sra_scalar_type (TREE_TYPE (var)))
311 return false;
312
313 /* The variable must not be aliased. */
314 if (!is_gimple_non_addressable (var))
315 {
316 if (dump_file && (dump_flags & TDF_DETAILS))
317 {
318 fprintf (dump_file, "Cannot scalarize variable ");
319 print_generic_expr (dump_file, var, dump_flags);
320 fprintf (dump_file, " because it must live in memory\n");
321 }
322 return false;
323 }
324
325 /* The variable must not be volatile. */
326 if (TREE_THIS_VOLATILE (var))
327 {
328 if (dump_file && (dump_flags & TDF_DETAILS))
329 {
330 fprintf (dump_file, "Cannot scalarize variable ");
331 print_generic_expr (dump_file, var, dump_flags);
332 fprintf (dump_file, " because it is declared volatile\n");
333 }
334 return false;
335 }
336
337 /* We must be able to decompose the variable's type. */
338 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
339 {
340 if (dump_file && (dump_flags & TDF_DETAILS))
341 {
342 fprintf (dump_file, "Cannot scalarize variable ");
343 print_generic_expr (dump_file, var, dump_flags);
344 fprintf (dump_file, " because its type cannot be decomposed\n");
345 }
346 return false;
347 }
348
349 /* HACK: if we decompose a va_list_type_node before inlining, then we'll
350 confuse tree-stdarg.c, and we won't be able to figure out which and
351 how many arguments are accessed. This really should be improved in
352 tree-stdarg.c, as the decomposition is truely a win. This could also
353 be fixed if the stdarg pass ran early, but this can't be done until
354 we've aliasing information early too. See PR 30791. */
355 if (early_sra
356 && TYPE_MAIN_VARIANT (TREE_TYPE (var))
357 == TYPE_MAIN_VARIANT (va_list_type_node))
358 return false;
359
360 return true;
361 }
362
363 /* Return true if TYPE can be *completely* decomposed into scalars. */
364
365 static bool
366 type_can_instantiate_all_elements (tree type)
367 {
368 if (is_sra_scalar_type (type))
369 return true;
370 if (!sra_type_can_be_decomposed_p (type))
371 return false;
372
373 switch (TREE_CODE (type))
374 {
375 case RECORD_TYPE:
376 {
377 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
378 tree f;
379
380 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
381 return true;
382 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
383 return false;
384
385 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
386 if (TREE_CODE (f) == FIELD_DECL)
387 {
388 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
389 {
390 bitmap_set_bit (sra_type_inst_cache, cache+1);
391 return false;
392 }
393 }
394
395 bitmap_set_bit (sra_type_inst_cache, cache+0);
396 return true;
397 }
398
399 case ARRAY_TYPE:
400 return type_can_instantiate_all_elements (TREE_TYPE (type));
401
402 case COMPLEX_TYPE:
403 return true;
404
405 default:
406 gcc_unreachable ();
407 }
408 }
409
410 /* Test whether ELT or some sub-element cannot be scalarized. */
411
412 static bool
413 can_completely_scalarize_p (struct sra_elt *elt)
414 {
415 struct sra_elt *c;
416
417 if (elt->cannot_scalarize)
418 return false;
419
420 for (c = elt->children; c; c = c->sibling)
421 if (!can_completely_scalarize_p (c))
422 return false;
423
424 for (c = elt->groups; c; c = c->sibling)
425 if (!can_completely_scalarize_p (c))
426 return false;
427
428 return true;
429 }
430
431 \f
432 /* A simplified tree hashing algorithm that only handles the types of
433 trees we expect to find in sra_elt->element. */
434
435 static hashval_t
436 sra_hash_tree (tree t)
437 {
438 hashval_t h;
439
440 switch (TREE_CODE (t))
441 {
442 case VAR_DECL:
443 case PARM_DECL:
444 case RESULT_DECL:
445 h = DECL_UID (t);
446 break;
447
448 case INTEGER_CST:
449 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
450 break;
451
452 case RANGE_EXPR:
453 h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
454 h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
455 break;
456
457 case FIELD_DECL:
458 /* We can have types that are compatible, but have different member
459 lists, so we can't hash fields by ID. Use offsets instead. */
460 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
461 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
462 break;
463
464 default:
465 gcc_unreachable ();
466 }
467
468 return h;
469 }
470
471 /* Hash function for type SRA_PAIR. */
472
473 static hashval_t
474 sra_elt_hash (const void *x)
475 {
476 const struct sra_elt *e = x;
477 const struct sra_elt *p;
478 hashval_t h;
479
480 h = sra_hash_tree (e->element);
481
482 /* Take into account everything back up the chain. Given that chain
483 lengths are rarely very long, this should be acceptable. If we
484 truly identify this as a performance problem, it should work to
485 hash the pointer value "e->parent". */
486 for (p = e->parent; p ; p = p->parent)
487 h = (h * 65521) ^ sra_hash_tree (p->element);
488
489 return h;
490 }
491
492 /* Equality function for type SRA_PAIR. */
493
494 static int
495 sra_elt_eq (const void *x, const void *y)
496 {
497 const struct sra_elt *a = x;
498 const struct sra_elt *b = y;
499 tree ae, be;
500
501 if (a->parent != b->parent)
502 return false;
503
504 ae = a->element;
505 be = b->element;
506
507 if (ae == be)
508 return true;
509 if (TREE_CODE (ae) != TREE_CODE (be))
510 return false;
511
512 switch (TREE_CODE (ae))
513 {
514 case VAR_DECL:
515 case PARM_DECL:
516 case RESULT_DECL:
517 /* These are all pointer unique. */
518 return false;
519
520 case INTEGER_CST:
521 /* Integers are not pointer unique, so compare their values. */
522 return tree_int_cst_equal (ae, be);
523
524 case RANGE_EXPR:
525 return
526 tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
527 && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
528
529 case FIELD_DECL:
530 /* Fields are unique within a record, but not between
531 compatible records. */
532 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
533 return false;
534 return fields_compatible_p (ae, be);
535
536 default:
537 gcc_unreachable ();
538 }
539 }
540
541 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
542 may be null, in which case CHILD must be a DECL. */
543
544 static struct sra_elt *
545 lookup_element (struct sra_elt *parent, tree child, tree type,
546 enum insert_option insert)
547 {
548 struct sra_elt dummy;
549 struct sra_elt **slot;
550 struct sra_elt *elt;
551
552 if (parent)
553 dummy.parent = parent->is_group ? parent->parent : parent;
554 else
555 dummy.parent = NULL;
556 dummy.element = child;
557
558 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
559 if (!slot && insert == NO_INSERT)
560 return NULL;
561
562 elt = *slot;
563 if (!elt && insert == INSERT)
564 {
565 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
566 memset (elt, 0, sizeof (*elt));
567
568 elt->parent = parent;
569 elt->element = child;
570 elt->type = type;
571 elt->is_scalar = is_sra_scalar_type (type);
572
573 if (parent)
574 {
575 if (IS_ELEMENT_FOR_GROUP (elt->element))
576 {
577 elt->is_group = true;
578 elt->sibling = parent->groups;
579 parent->groups = elt;
580 }
581 else
582 {
583 elt->sibling = parent->children;
584 parent->children = elt;
585 }
586 }
587
588 /* If this is a parameter, then if we want to scalarize, we have
589 one copy from the true function parameter. Count it now. */
590 if (TREE_CODE (child) == PARM_DECL)
591 {
592 elt->n_copies = 1;
593 bitmap_set_bit (needs_copy_in, DECL_UID (child));
594 }
595 }
596
597 return elt;
598 }
599
600 /* Create or return the SRA_ELT structure for EXPR if the expression
601 refers to a scalarizable variable. */
602
603 static struct sra_elt *
604 maybe_lookup_element_for_expr (tree expr)
605 {
606 struct sra_elt *elt;
607 tree child;
608
609 switch (TREE_CODE (expr))
610 {
611 case VAR_DECL:
612 case PARM_DECL:
613 case RESULT_DECL:
614 if (is_sra_candidate_decl (expr))
615 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
616 return NULL;
617
618 case ARRAY_REF:
619 /* We can't scalarize variable array indices. */
620 if (in_array_bounds_p (expr))
621 child = TREE_OPERAND (expr, 1);
622 else
623 return NULL;
624 break;
625
626 case ARRAY_RANGE_REF:
627 /* We can't scalarize variable array indices. */
628 if (range_in_array_bounds_p (expr))
629 {
630 tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
631 child = build2 (RANGE_EXPR, integer_type_node,
632 TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
633 }
634 else
635 return NULL;
636 break;
637
638 case COMPONENT_REF:
639 /* Don't look through unions. */
640 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
641 return NULL;
642 child = TREE_OPERAND (expr, 1);
643 break;
644
645 case REALPART_EXPR:
646 child = integer_zero_node;
647 break;
648 case IMAGPART_EXPR:
649 child = integer_one_node;
650 break;
651
652 default:
653 return NULL;
654 }
655
656 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
657 if (elt)
658 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
659 return NULL;
660 }
661
662 \f
663 /* Functions to walk just enough of the tree to see all scalarizable
664 references, and categorize them. */
665
666 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
667 various kinds of references seen. In all cases, *BSI is an iterator
668 pointing to the statement being processed. */
669 struct sra_walk_fns
670 {
671 /* Invoked when ELT is required as a unit. Note that ELT might refer to
672 a leaf node, in which case this is a simple scalar reference. *EXPR_P
673 points to the location of the expression. IS_OUTPUT is true if this
674 is a left-hand-side reference. USE_ALL is true if we saw something we
675 couldn't quite identify and had to force the use of the entire object. */
676 void (*use) (struct sra_elt *elt, tree *expr_p,
677 block_stmt_iterator *bsi, bool is_output, bool use_all);
678
679 /* Invoked when we have a copy between two scalarizable references. */
680 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
681 block_stmt_iterator *bsi);
682
683 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
684 in which case it should be treated as an empty CONSTRUCTOR. */
685 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
686
687 /* Invoked when we have a copy between one scalarizable reference ELT
688 and one non-scalarizable reference OTHER without side-effects.
689 IS_OUTPUT is true if ELT is on the left-hand side. */
690 void (*ldst) (struct sra_elt *elt, tree other,
691 block_stmt_iterator *bsi, bool is_output);
692
693 /* True during phase 2, false during phase 4. */
694 /* ??? This is a hack. */
695 bool initial_scan;
696 };
697
698 #ifdef ENABLE_CHECKING
699 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
700
701 static tree
702 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
703 void *data ATTRIBUTE_UNUSED)
704 {
705 tree t = *tp;
706 enum tree_code code = TREE_CODE (t);
707
708 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
709 {
710 *walk_subtrees = 0;
711 if (is_sra_candidate_decl (t))
712 return t;
713 }
714 else if (TYPE_P (t))
715 *walk_subtrees = 0;
716
717 return NULL;
718 }
719 #endif
720
721 /* Walk most expressions looking for a scalarizable aggregate.
722 If we find one, invoke FNS->USE. */
723
724 static void
725 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
726 const struct sra_walk_fns *fns)
727 {
728 tree expr = *expr_p;
729 tree inner = expr;
730 bool disable_scalarization = false;
731 bool use_all_p = false;
732
733 /* We're looking to collect a reference expression between EXPR and INNER,
734 such that INNER is a scalarizable decl and all other nodes through EXPR
735 are references that we can scalarize. If we come across something that
736 we can't scalarize, we reset EXPR. This has the effect of making it
737 appear that we're referring to the larger expression as a whole. */
738
739 while (1)
740 switch (TREE_CODE (inner))
741 {
742 case VAR_DECL:
743 case PARM_DECL:
744 case RESULT_DECL:
745 /* If there is a scalarizable decl at the bottom, then process it. */
746 if (is_sra_candidate_decl (inner))
747 {
748 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
749 if (disable_scalarization)
750 elt->cannot_scalarize = true;
751 else
752 fns->use (elt, expr_p, bsi, is_output, use_all_p);
753 }
754 return;
755
756 case ARRAY_REF:
757 /* Non-constant index means any member may be accessed. Prevent the
758 expression from being scalarized. If we were to treat this as a
759 reference to the whole array, we can wind up with a single dynamic
760 index reference inside a loop being overridden by several constant
761 index references during loop setup. It's possible that this could
762 be avoided by using dynamic usage counts based on BB trip counts
763 (based on loop analysis or profiling), but that hardly seems worth
764 the effort. */
765 /* ??? Hack. Figure out how to push this into the scan routines
766 without duplicating too much code. */
767 if (!in_array_bounds_p (inner))
768 {
769 disable_scalarization = true;
770 goto use_all;
771 }
772 /* ??? Are we assured that non-constant bounds and stride will have
773 the same value everywhere? I don't think Fortran will... */
774 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
775 goto use_all;
776 inner = TREE_OPERAND (inner, 0);
777 break;
778
779 case ARRAY_RANGE_REF:
780 if (!range_in_array_bounds_p (inner))
781 {
782 disable_scalarization = true;
783 goto use_all;
784 }
785 /* ??? See above non-constant bounds and stride . */
786 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
787 goto use_all;
788 inner = TREE_OPERAND (inner, 0);
789 break;
790
791 case COMPONENT_REF:
792 /* A reference to a union member constitutes a reference to the
793 entire union. */
794 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) != RECORD_TYPE)
795 goto use_all;
796 /* ??? See above re non-constant stride. */
797 if (TREE_OPERAND (inner, 2))
798 goto use_all;
799 inner = TREE_OPERAND (inner, 0);
800 break;
801
802 case REALPART_EXPR:
803 case IMAGPART_EXPR:
804 inner = TREE_OPERAND (inner, 0);
805 break;
806
807 case BIT_FIELD_REF:
808 /* A bit field reference to a specific vector is scalarized but for
809 ones for inputs need to be marked as used on the left hand size so
810 when we scalarize it, we can mark that variable as non renamable. */
811 if (is_output
812 && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
813 {
814 struct sra_elt *elt
815 = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
816 if (elt)
817 elt->is_vector_lhs = true;
818 }
819 /* A bit field reference (access to *multiple* fields simultaneously)
820 is not currently scalarized. Consider this an access to the
821 complete outer element, to which walk_tree will bring us next. */
822
823 goto use_all;
824
825 case VIEW_CONVERT_EXPR:
826 case NOP_EXPR:
827 /* Similarly, a view/nop explicitly wants to look at an object in a
828 type other than the one we've scalarized. */
829 goto use_all;
830
831 case WITH_SIZE_EXPR:
832 /* This is a transparent wrapper. The entire inner expression really
833 is being used. */
834 goto use_all;
835
836 use_all:
837 expr_p = &TREE_OPERAND (inner, 0);
838 inner = expr = *expr_p;
839 use_all_p = true;
840 break;
841
842 default:
843 #ifdef ENABLE_CHECKING
844 /* Validate that we're not missing any references. */
845 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
846 #endif
847 return;
848 }
849 }
850
851 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
852 If we find one, invoke FNS->USE. */
853
854 static void
855 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
856 const struct sra_walk_fns *fns)
857 {
858 tree op;
859 for (op = list; op ; op = TREE_CHAIN (op))
860 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
861 }
862
863 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
864 If we find one, invoke FNS->USE. */
865
866 static void
867 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
868 const struct sra_walk_fns *fns)
869 {
870 int i;
871 int nargs = call_expr_nargs (expr);
872 for (i = 0; i < nargs; i++)
873 sra_walk_expr (&CALL_EXPR_ARG (expr, i), bsi, false, fns);
874 }
875
876 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
877 aggregates. If we find one, invoke FNS->USE. */
878
879 static void
880 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
881 const struct sra_walk_fns *fns)
882 {
883 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
884 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
885 }
886
887 /* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately. */
888
889 static void
890 sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
891 const struct sra_walk_fns *fns)
892 {
893 struct sra_elt *lhs_elt, *rhs_elt;
894 tree lhs, rhs;
895
896 lhs = GIMPLE_STMT_OPERAND (expr, 0);
897 rhs = GIMPLE_STMT_OPERAND (expr, 1);
898 lhs_elt = maybe_lookup_element_for_expr (lhs);
899 rhs_elt = maybe_lookup_element_for_expr (rhs);
900
901 /* If both sides are scalarizable, this is a COPY operation. */
902 if (lhs_elt && rhs_elt)
903 {
904 fns->copy (lhs_elt, rhs_elt, bsi);
905 return;
906 }
907
908 /* If the RHS is scalarizable, handle it. There are only two cases. */
909 if (rhs_elt)
910 {
911 if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
912 fns->ldst (rhs_elt, lhs, bsi, false);
913 else
914 fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false, false);
915 }
916
917 /* If it isn't scalarizable, there may be scalarizable variables within, so
918 check for a call or else walk the RHS to see if we need to do any
919 copy-in operations. We need to do it before the LHS is scalarized so
920 that the statements get inserted in the proper place, before any
921 copy-out operations. */
922 else
923 {
924 tree call = get_call_expr_in (rhs);
925 if (call)
926 sra_walk_call_expr (call, bsi, fns);
927 else
928 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 1), bsi, false, fns);
929 }
930
931 /* Likewise, handle the LHS being scalarizable. We have cases similar
932 to those above, but also want to handle RHS being constant. */
933 if (lhs_elt)
934 {
935 /* If this is an assignment from a constant, or constructor, then
936 we have access to all of the elements individually. Invoke INIT. */
937 if (TREE_CODE (rhs) == COMPLEX_EXPR
938 || TREE_CODE (rhs) == COMPLEX_CST
939 || TREE_CODE (rhs) == CONSTRUCTOR)
940 fns->init (lhs_elt, rhs, bsi);
941
942 /* If this is an assignment from read-only memory, treat this as if
943 we'd been passed the constructor directly. Invoke INIT. */
944 else if (TREE_CODE (rhs) == VAR_DECL
945 && TREE_STATIC (rhs)
946 && TREE_READONLY (rhs)
947 && targetm.binds_local_p (rhs))
948 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
949
950 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
951 The lvalue requirement prevents us from trying to directly scalarize
952 the result of a function call. Which would result in trying to call
953 the function multiple times, and other evil things. */
954 else if (!lhs_elt->is_scalar
955 && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
956 fns->ldst (lhs_elt, rhs, bsi, true);
957
958 /* Otherwise we're being used in some context that requires the
959 aggregate to be seen as a whole. Invoke USE. */
960 else
961 fns->use (lhs_elt, &GIMPLE_STMT_OPERAND (expr, 0), bsi, true, false);
962 }
963
964 /* Similarly to above, LHS_ELT being null only means that the LHS as a
965 whole is not a scalarizable reference. There may be occurrences of
966 scalarizable variables within, which implies a USE. */
967 else
968 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 0), bsi, true, fns);
969 }
970
971 /* Entry point to the walk functions. Search the entire function,
972 invoking the callbacks in FNS on each of the references to
973 scalarizable variables. */
974
975 static void
976 sra_walk_function (const struct sra_walk_fns *fns)
977 {
978 basic_block bb;
979 block_stmt_iterator si, ni;
980
981 /* ??? Phase 4 could derive some benefit to walking the function in
982 dominator tree order. */
983
984 FOR_EACH_BB (bb)
985 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
986 {
987 tree stmt, t;
988 stmt_ann_t ann;
989
990 stmt = bsi_stmt (si);
991 ann = stmt_ann (stmt);
992
993 ni = si;
994 bsi_next (&ni);
995
996 /* If the statement has no virtual operands, then it doesn't
997 make any structure references that we care about. */
998 if (gimple_aliases_computed_p (cfun)
999 && ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
1000 continue;
1001
1002 switch (TREE_CODE (stmt))
1003 {
1004 case RETURN_EXPR:
1005 /* If we have "return <retval>" then the return value is
1006 already exposed for our pleasure. Walk it as a USE to
1007 force all the components back in place for the return.
1008
1009 If we have an embedded assignment, then <retval> is of
1010 a type that gets returned in registers in this ABI, and
1011 we do not wish to extend their lifetimes. Treat this
1012 as a USE of the variable on the RHS of this assignment. */
1013
1014 t = TREE_OPERAND (stmt, 0);
1015 if (t == NULL_TREE)
1016 ;
1017 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
1018 sra_walk_expr (&GIMPLE_STMT_OPERAND (t, 1), &si, false, fns);
1019 else
1020 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
1021 break;
1022
1023 case GIMPLE_MODIFY_STMT:
1024 sra_walk_gimple_modify_stmt (stmt, &si, fns);
1025 break;
1026 case CALL_EXPR:
1027 sra_walk_call_expr (stmt, &si, fns);
1028 break;
1029 case ASM_EXPR:
1030 sra_walk_asm_expr (stmt, &si, fns);
1031 break;
1032
1033 default:
1034 break;
1035 }
1036 }
1037 }
1038 \f
1039 /* Phase One: Scan all referenced variables in the program looking for
1040 structures that could be decomposed. */
1041
1042 static bool
1043 find_candidates_for_sra (void)
1044 {
1045 bool any_set = false;
1046 tree var;
1047 referenced_var_iterator rvi;
1048
1049 FOR_EACH_REFERENCED_VAR (var, rvi)
1050 {
1051 if (decl_can_be_decomposed_p (var))
1052 {
1053 bitmap_set_bit (sra_candidates, DECL_UID (var));
1054 any_set = true;
1055 }
1056 }
1057
1058 return any_set;
1059 }
1060
1061 \f
1062 /* Phase Two: Scan all references to scalarizable variables. Count the
1063 number of times they are used or copied respectively. */
1064
1065 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1066 considered a copy, because we can decompose the reference such that
1067 the sub-elements needn't be contiguous. */
1068
1069 static void
1070 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
1071 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1072 bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
1073 {
1074 elt->n_uses += 1;
1075 }
1076
1077 static void
1078 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1079 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1080 {
1081 lhs_elt->n_copies += 1;
1082 rhs_elt->n_copies += 1;
1083 }
1084
1085 static void
1086 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
1087 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1088 {
1089 lhs_elt->n_copies += 1;
1090 }
1091
1092 static void
1093 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
1094 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1095 bool is_output ATTRIBUTE_UNUSED)
1096 {
1097 elt->n_copies += 1;
1098 }
1099
1100 /* Dump the values we collected during the scanning phase. */
1101
1102 static void
1103 scan_dump (struct sra_elt *elt)
1104 {
1105 struct sra_elt *c;
1106
1107 dump_sra_elt_name (dump_file, elt);
1108 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1109
1110 for (c = elt->children; c ; c = c->sibling)
1111 scan_dump (c);
1112
1113 for (c = elt->groups; c ; c = c->sibling)
1114 scan_dump (c);
1115 }
1116
1117 /* Entry point to phase 2. Scan the entire function, building up
1118 scalarization data structures, recording copies and uses. */
1119
1120 static void
1121 scan_function (void)
1122 {
1123 static const struct sra_walk_fns fns = {
1124 scan_use, scan_copy, scan_init, scan_ldst, true
1125 };
1126 bitmap_iterator bi;
1127
1128 sra_walk_function (&fns);
1129
1130 if (dump_file && (dump_flags & TDF_DETAILS))
1131 {
1132 unsigned i;
1133
1134 fputs ("\nScan results:\n", dump_file);
1135 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1136 {
1137 tree var = referenced_var (i);
1138 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1139 if (elt)
1140 scan_dump (elt);
1141 }
1142 fputc ('\n', dump_file);
1143 }
1144 }
1145 \f
1146 /* Phase Three: Make decisions about which variables to scalarize, if any.
1147 All elements to be scalarized have replacement variables made for them. */
1148
1149 /* A subroutine of build_element_name. Recursively build the element
1150 name on the obstack. */
1151
1152 static void
1153 build_element_name_1 (struct sra_elt *elt)
1154 {
1155 tree t;
1156 char buffer[32];
1157
1158 if (elt->parent)
1159 {
1160 build_element_name_1 (elt->parent);
1161 obstack_1grow (&sra_obstack, '$');
1162
1163 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1164 {
1165 if (elt->element == integer_zero_node)
1166 obstack_grow (&sra_obstack, "real", 4);
1167 else
1168 obstack_grow (&sra_obstack, "imag", 4);
1169 return;
1170 }
1171 }
1172
1173 t = elt->element;
1174 if (TREE_CODE (t) == INTEGER_CST)
1175 {
1176 /* ??? Eh. Don't bother doing double-wide printing. */
1177 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1178 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1179 }
1180 else
1181 {
1182 tree name = DECL_NAME (t);
1183 if (name)
1184 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1185 IDENTIFIER_LENGTH (name));
1186 else
1187 {
1188 sprintf (buffer, "D%u", DECL_UID (t));
1189 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1190 }
1191 }
1192 }
1193
1194 /* Construct a pretty variable name for an element's replacement variable.
1195 The name is built on the obstack. */
1196
1197 static char *
1198 build_element_name (struct sra_elt *elt)
1199 {
1200 build_element_name_1 (elt);
1201 obstack_1grow (&sra_obstack, '\0');
1202 return XOBFINISH (&sra_obstack, char *);
1203 }
1204
1205 /* Instantiate an element as an independent variable. */
1206
1207 static void
1208 instantiate_element (struct sra_elt *elt)
1209 {
1210 struct sra_elt *base_elt;
1211 tree var, base;
1212
1213 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1214 continue;
1215 base = base_elt->element;
1216
1217 elt->replacement = var = make_rename_temp (elt->type, "SR");
1218
1219 /* For vectors, if used on the left hand side with BIT_FIELD_REF,
1220 they are not a gimple register. */
1221 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE && elt->is_vector_lhs)
1222 DECL_GIMPLE_REG_P (var) = 0;
1223
1224 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1225 DECL_ARTIFICIAL (var) = 1;
1226
1227 if (TREE_THIS_VOLATILE (elt->type))
1228 {
1229 TREE_THIS_VOLATILE (var) = 1;
1230 TREE_SIDE_EFFECTS (var) = 1;
1231 }
1232
1233 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1234 {
1235 char *pretty_name = build_element_name (elt);
1236 DECL_NAME (var) = get_identifier (pretty_name);
1237 obstack_free (&sra_obstack, pretty_name);
1238
1239 SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1240 DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1241
1242 DECL_IGNORED_P (var) = 0;
1243 TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1244 if (elt->element && TREE_NO_WARNING (elt->element))
1245 TREE_NO_WARNING (var) = 1;
1246 }
1247 else
1248 {
1249 DECL_IGNORED_P (var) = 1;
1250 /* ??? We can't generate any warning that would be meaningful. */
1251 TREE_NO_WARNING (var) = 1;
1252 }
1253
1254 if (dump_file)
1255 {
1256 fputs (" ", dump_file);
1257 dump_sra_elt_name (dump_file, elt);
1258 fputs (" -> ", dump_file);
1259 print_generic_expr (dump_file, var, dump_flags);
1260 fputc ('\n', dump_file);
1261 }
1262 }
1263
1264 /* Make one pass across an element tree deciding whether or not it's
1265 profitable to instantiate individual leaf scalars.
1266
1267 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1268 fields all the way up the tree. */
1269
1270 static void
1271 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1272 unsigned int parent_copies)
1273 {
1274 if (dump_file && !elt->parent)
1275 {
1276 fputs ("Initial instantiation for ", dump_file);
1277 dump_sra_elt_name (dump_file, elt);
1278 fputc ('\n', dump_file);
1279 }
1280
1281 if (elt->cannot_scalarize)
1282 return;
1283
1284 if (elt->is_scalar)
1285 {
1286 /* The decision is simple: instantiate if we're used more frequently
1287 than the parent needs to be seen as a complete unit. */
1288 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1289 instantiate_element (elt);
1290 }
1291 else
1292 {
1293 struct sra_elt *c, *group;
1294 unsigned int this_uses = elt->n_uses + parent_uses;
1295 unsigned int this_copies = elt->n_copies + parent_copies;
1296
1297 /* Consider groups of sub-elements as weighing in favour of
1298 instantiation whatever their size. */
1299 for (group = elt->groups; group ; group = group->sibling)
1300 FOR_EACH_ACTUAL_CHILD (c, group)
1301 {
1302 c->n_uses += group->n_uses;
1303 c->n_copies += group->n_copies;
1304 }
1305
1306 for (c = elt->children; c ; c = c->sibling)
1307 decide_instantiation_1 (c, this_uses, this_copies);
1308 }
1309 }
1310
1311 /* Compute the size and number of all instantiated elements below ELT.
1312 We will only care about this if the size of the complete structure
1313 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1314
1315 static unsigned int
1316 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1317 {
1318 if (elt->replacement)
1319 {
1320 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1321 return 1;
1322 }
1323 else
1324 {
1325 struct sra_elt *c;
1326 unsigned int count = 0;
1327
1328 for (c = elt->children; c ; c = c->sibling)
1329 count += sum_instantiated_sizes (c, sizep);
1330
1331 return count;
1332 }
1333 }
1334
1335 /* Instantiate fields in ELT->TYPE that are not currently present as
1336 children of ELT. */
1337
1338 static void instantiate_missing_elements (struct sra_elt *elt);
1339
1340 static void
1341 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1342 {
1343 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1344 if (sub->is_scalar)
1345 {
1346 if (sub->replacement == NULL)
1347 instantiate_element (sub);
1348 }
1349 else
1350 instantiate_missing_elements (sub);
1351 }
1352
1353 static void
1354 instantiate_missing_elements (struct sra_elt *elt)
1355 {
1356 tree type = elt->type;
1357
1358 switch (TREE_CODE (type))
1359 {
1360 case RECORD_TYPE:
1361 {
1362 tree f;
1363 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1364 if (TREE_CODE (f) == FIELD_DECL)
1365 {
1366 tree field_type = TREE_TYPE (f);
1367
1368 /* canonicalize_component_ref() unwidens some bit-field
1369 types (not marked as DECL_BIT_FIELD in C++), so we
1370 must do the same, lest we may introduce type
1371 mismatches. */
1372 if (INTEGRAL_TYPE_P (field_type)
1373 && DECL_MODE (f) != TYPE_MODE (field_type))
1374 field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
1375 field_type,
1376 elt->element,
1377 f, NULL_TREE),
1378 NULL_TREE));
1379
1380 instantiate_missing_elements_1 (elt, f, field_type);
1381 }
1382 break;
1383 }
1384
1385 case ARRAY_TYPE:
1386 {
1387 tree i, max, subtype;
1388
1389 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1390 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1391 subtype = TREE_TYPE (type);
1392
1393 while (1)
1394 {
1395 instantiate_missing_elements_1 (elt, i, subtype);
1396 if (tree_int_cst_equal (i, max))
1397 break;
1398 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1399 }
1400
1401 break;
1402 }
1403
1404 case COMPLEX_TYPE:
1405 type = TREE_TYPE (type);
1406 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1407 instantiate_missing_elements_1 (elt, integer_one_node, type);
1408 break;
1409
1410 default:
1411 gcc_unreachable ();
1412 }
1413 }
1414
1415 /* Return true if there is only one non aggregate field in the record, TYPE.
1416 Return false otherwise. */
1417
1418 static bool
1419 single_scalar_field_in_record_p (tree type)
1420 {
1421 int num_fields = 0;
1422 tree field;
1423 if (TREE_CODE (type) != RECORD_TYPE)
1424 return false;
1425
1426 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1427 if (TREE_CODE (field) == FIELD_DECL)
1428 {
1429 num_fields++;
1430
1431 if (num_fields == 2)
1432 return false;
1433
1434 if (AGGREGATE_TYPE_P (TREE_TYPE (field)))
1435 return false;
1436 }
1437
1438 return true;
1439 }
1440
1441 /* Make one pass across an element tree deciding whether to perform block
1442 or element copies. If we decide on element copies, instantiate all
1443 elements. Return true if there are any instantiated sub-elements. */
1444
1445 static bool
1446 decide_block_copy (struct sra_elt *elt)
1447 {
1448 struct sra_elt *c;
1449 bool any_inst;
1450
1451 /* We shouldn't be invoked on groups of sub-elements as they must
1452 behave like their parent as far as block copy is concerned. */
1453 gcc_assert (!elt->is_group);
1454
1455 /* If scalarization is disabled, respect it. */
1456 if (elt->cannot_scalarize)
1457 {
1458 elt->use_block_copy = 1;
1459
1460 if (dump_file)
1461 {
1462 fputs ("Scalarization disabled for ", dump_file);
1463 dump_sra_elt_name (dump_file, elt);
1464 fputc ('\n', dump_file);
1465 }
1466
1467 /* Disable scalarization of sub-elements */
1468 for (c = elt->children; c; c = c->sibling)
1469 {
1470 c->cannot_scalarize = 1;
1471 decide_block_copy (c);
1472 }
1473
1474 /* Groups behave like their parent. */
1475 for (c = elt->groups; c; c = c->sibling)
1476 {
1477 c->cannot_scalarize = 1;
1478 c->use_block_copy = 1;
1479 }
1480
1481 return false;
1482 }
1483
1484 /* Don't decide if we've no uses. */
1485 if (elt->n_uses == 0 && elt->n_copies == 0)
1486 ;
1487
1488 else if (!elt->is_scalar)
1489 {
1490 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1491 bool use_block_copy = true;
1492
1493 /* Tradeoffs for COMPLEX types pretty much always make it better
1494 to go ahead and split the components. */
1495 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1496 use_block_copy = false;
1497
1498 /* Don't bother trying to figure out the rest if the structure is
1499 so large we can't do easy arithmetic. This also forces block
1500 copies for variable sized structures. */
1501 else if (host_integerp (size_tree, 1))
1502 {
1503 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1504 unsigned int max_size, max_count, inst_count, full_count;
1505
1506 /* If the sra-max-structure-size parameter is 0, then the
1507 user has not overridden the parameter and we can choose a
1508 sensible default. */
1509 max_size = SRA_MAX_STRUCTURE_SIZE
1510 ? SRA_MAX_STRUCTURE_SIZE
1511 : MOVE_RATIO * UNITS_PER_WORD;
1512 max_count = SRA_MAX_STRUCTURE_COUNT
1513 ? SRA_MAX_STRUCTURE_COUNT
1514 : MOVE_RATIO;
1515
1516 full_size = tree_low_cst (size_tree, 1);
1517 full_count = count_type_elements (elt->type, false);
1518 inst_count = sum_instantiated_sizes (elt, &inst_size);
1519
1520 /* If there is only one scalar field in the record, don't block copy. */
1521 if (single_scalar_field_in_record_p (elt->type))
1522 use_block_copy = false;
1523
1524 /* ??? What to do here. If there are two fields, and we've only
1525 instantiated one, then instantiating the other is clearly a win.
1526 If there are a large number of fields then the size of the copy
1527 is much more of a factor. */
1528
1529 /* If the structure is small, and we've made copies, go ahead
1530 and instantiate, hoping that the copies will go away. */
1531 if (full_size <= max_size
1532 && (full_count - inst_count) <= max_count
1533 && elt->n_copies > elt->n_uses)
1534 use_block_copy = false;
1535 else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
1536 && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1537 use_block_copy = false;
1538
1539 /* In order to avoid block copy, we have to be able to instantiate
1540 all elements of the type. See if this is possible. */
1541 if (!use_block_copy
1542 && (!can_completely_scalarize_p (elt)
1543 || !type_can_instantiate_all_elements (elt->type)))
1544 use_block_copy = true;
1545 }
1546
1547 elt->use_block_copy = use_block_copy;
1548
1549 /* Groups behave like their parent. */
1550 for (c = elt->groups; c; c = c->sibling)
1551 c->use_block_copy = use_block_copy;
1552
1553 if (dump_file)
1554 {
1555 fprintf (dump_file, "Using %s for ",
1556 use_block_copy ? "block-copy" : "element-copy");
1557 dump_sra_elt_name (dump_file, elt);
1558 fputc ('\n', dump_file);
1559 }
1560
1561 if (!use_block_copy)
1562 {
1563 instantiate_missing_elements (elt);
1564 return true;
1565 }
1566 }
1567
1568 any_inst = elt->replacement != NULL;
1569
1570 for (c = elt->children; c ; c = c->sibling)
1571 any_inst |= decide_block_copy (c);
1572
1573 return any_inst;
1574 }
1575
1576 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1577
1578 static void
1579 decide_instantiations (void)
1580 {
1581 unsigned int i;
1582 bool cleared_any;
1583 bitmap_head done_head;
1584 bitmap_iterator bi;
1585
1586 /* We cannot clear bits from a bitmap we're iterating over,
1587 so save up all the bits to clear until the end. */
1588 bitmap_initialize (&done_head, &bitmap_default_obstack);
1589 cleared_any = false;
1590
1591 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1592 {
1593 tree var = referenced_var (i);
1594 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1595 if (elt)
1596 {
1597 decide_instantiation_1 (elt, 0, 0);
1598 if (!decide_block_copy (elt))
1599 elt = NULL;
1600 }
1601 if (!elt)
1602 {
1603 bitmap_set_bit (&done_head, i);
1604 cleared_any = true;
1605 }
1606 }
1607
1608 if (cleared_any)
1609 {
1610 bitmap_and_compl_into (sra_candidates, &done_head);
1611 bitmap_and_compl_into (needs_copy_in, &done_head);
1612 }
1613 bitmap_clear (&done_head);
1614
1615 if (!bitmap_empty_p (sra_candidates))
1616 todoflags |= TODO_update_smt_usage;
1617
1618 mark_set_for_renaming (sra_candidates);
1619
1620 if (dump_file)
1621 fputc ('\n', dump_file);
1622 }
1623
1624 \f
1625 /* Phase Four: Update the function to match the replacements created. */
1626
1627 /* Mark all the variables in VDEF/VUSE operators for STMT for
1628 renaming. This becomes necessary when we modify all of a
1629 non-scalar. */
1630
1631 static void
1632 mark_all_v_defs_1 (tree stmt)
1633 {
1634 tree sym;
1635 ssa_op_iter iter;
1636
1637 update_stmt_if_modified (stmt);
1638
1639 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
1640 {
1641 if (TREE_CODE (sym) == SSA_NAME)
1642 sym = SSA_NAME_VAR (sym);
1643 mark_sym_for_renaming (sym);
1644 }
1645 }
1646
1647
1648 /* Mark all the variables in virtual operands in all the statements in
1649 LIST for renaming. */
1650
1651 static void
1652 mark_all_v_defs (tree list)
1653 {
1654 if (TREE_CODE (list) != STATEMENT_LIST)
1655 mark_all_v_defs_1 (list);
1656 else
1657 {
1658 tree_stmt_iterator i;
1659 for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
1660 mark_all_v_defs_1 (tsi_stmt (i));
1661 }
1662 }
1663
1664
1665 /* Mark every replacement under ELT with TREE_NO_WARNING. */
1666
1667 static void
1668 mark_no_warning (struct sra_elt *elt)
1669 {
1670 if (!elt->all_no_warning)
1671 {
1672 if (elt->replacement)
1673 TREE_NO_WARNING (elt->replacement) = 1;
1674 else
1675 {
1676 struct sra_elt *c;
1677 FOR_EACH_ACTUAL_CHILD (c, elt)
1678 mark_no_warning (c);
1679 }
1680 elt->all_no_warning = true;
1681 }
1682 }
1683
1684 /* Build a single level component reference to ELT rooted at BASE. */
1685
1686 static tree
1687 generate_one_element_ref (struct sra_elt *elt, tree base)
1688 {
1689 switch (TREE_CODE (TREE_TYPE (base)))
1690 {
1691 case RECORD_TYPE:
1692 {
1693 tree field = elt->element;
1694
1695 /* Watch out for compatible records with differing field lists. */
1696 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
1697 field = find_compatible_field (TREE_TYPE (base), field);
1698
1699 return build3 (COMPONENT_REF, elt->type, base, field, NULL);
1700 }
1701
1702 case ARRAY_TYPE:
1703 todoflags |= TODO_update_smt_usage;
1704 if (TREE_CODE (elt->element) == RANGE_EXPR)
1705 return build4 (ARRAY_RANGE_REF, elt->type, base,
1706 TREE_OPERAND (elt->element, 0), NULL, NULL);
1707 else
1708 return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1709
1710 case COMPLEX_TYPE:
1711 if (elt->element == integer_zero_node)
1712 return build1 (REALPART_EXPR, elt->type, base);
1713 else
1714 return build1 (IMAGPART_EXPR, elt->type, base);
1715
1716 default:
1717 gcc_unreachable ();
1718 }
1719 }
1720
1721 /* Build a full component reference to ELT rooted at its native variable. */
1722
1723 static tree
1724 generate_element_ref (struct sra_elt *elt)
1725 {
1726 if (elt->parent)
1727 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1728 else
1729 return elt->element;
1730 }
1731
1732 /* Create an assignment statement from SRC to DST. */
1733
1734 static tree
1735 sra_build_assignment (tree dst, tree src)
1736 {
1737 /* It was hoped that we could perform some type sanity checking
1738 here, but since front-ends can emit accesses of fields in types
1739 different from their nominal types and copy structures containing
1740 them as a whole, we'd have to handle such differences here.
1741 Since such accesses under different types require compatibility
1742 anyway, there's little point in making tests and/or adding
1743 conversions to ensure the types of src and dst are the same.
1744 So we just assume type differences at this point are ok. */
1745 return build_gimple_modify_stmt (dst, src);
1746 }
1747
1748 /* Generate a set of assignment statements in *LIST_P to copy all
1749 instantiated elements under ELT to or from the equivalent structure
1750 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1751 true meaning to copy out of EXPR into ELT. */
1752
1753 static void
1754 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1755 tree *list_p)
1756 {
1757 struct sra_elt *c;
1758 tree t;
1759
1760 if (!copy_out && TREE_CODE (expr) == SSA_NAME
1761 && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
1762 {
1763 tree r, i;
1764
1765 c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
1766 r = c->replacement;
1767 c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
1768 i = c->replacement;
1769
1770 t = build2 (COMPLEX_EXPR, elt->type, r, i);
1771 t = sra_build_assignment (expr, t);
1772 SSA_NAME_DEF_STMT (expr) = t;
1773 append_to_statement_list (t, list_p);
1774 }
1775 else if (elt->replacement)
1776 {
1777 if (copy_out)
1778 t = sra_build_assignment (elt->replacement, expr);
1779 else
1780 t = sra_build_assignment (expr, elt->replacement);
1781 append_to_statement_list (t, list_p);
1782 }
1783 else
1784 {
1785 FOR_EACH_ACTUAL_CHILD (c, elt)
1786 {
1787 t = generate_one_element_ref (c, unshare_expr (expr));
1788 generate_copy_inout (c, copy_out, t, list_p);
1789 }
1790 }
1791 }
1792
1793 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1794 elements under SRC to their counterparts under DST. There must be a 1-1
1795 correspondence of instantiated elements. */
1796
1797 static void
1798 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1799 {
1800 struct sra_elt *dc, *sc;
1801
1802 FOR_EACH_ACTUAL_CHILD (dc, dst)
1803 {
1804 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1805 gcc_assert (sc);
1806 generate_element_copy (dc, sc, list_p);
1807 }
1808
1809 if (dst->replacement)
1810 {
1811 tree t;
1812
1813 gcc_assert (src->replacement);
1814
1815 t = sra_build_assignment (dst->replacement, src->replacement);
1816 append_to_statement_list (t, list_p);
1817 }
1818 }
1819
1820 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1821 elements under ELT. In addition, do not assign to elements that have been
1822 marked VISITED but do reset the visited flag; this allows easy coordination
1823 with generate_element_init. */
1824
1825 static void
1826 generate_element_zero (struct sra_elt *elt, tree *list_p)
1827 {
1828 struct sra_elt *c;
1829
1830 if (elt->visited)
1831 {
1832 elt->visited = false;
1833 return;
1834 }
1835
1836 FOR_EACH_ACTUAL_CHILD (c, elt)
1837 generate_element_zero (c, list_p);
1838
1839 if (elt->replacement)
1840 {
1841 tree t;
1842
1843 gcc_assert (elt->is_scalar);
1844 t = fold_convert (elt->type, integer_zero_node);
1845
1846 t = sra_build_assignment (elt->replacement, t);
1847 append_to_statement_list (t, list_p);
1848 }
1849 }
1850
1851 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
1852 Add the result to *LIST_P. */
1853
1854 static void
1855 generate_one_element_init (tree var, tree init, tree *list_p)
1856 {
1857 /* The replacement can be almost arbitrarily complex. Gimplify. */
1858 tree stmt = sra_build_assignment (var, init);
1859 gimplify_and_add (stmt, list_p);
1860 }
1861
1862 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1863 elements under ELT with the contents of the initializer INIT. In addition,
1864 mark all assigned elements VISITED; this allows easy coordination with
1865 generate_element_zero. Return false if we found a case we couldn't
1866 handle. */
1867
1868 static bool
1869 generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
1870 {
1871 bool result = true;
1872 enum tree_code init_code;
1873 struct sra_elt *sub;
1874 tree t;
1875 unsigned HOST_WIDE_INT idx;
1876 tree value, purpose;
1877
1878 /* We can be passed DECL_INITIAL of a static variable. It might have a
1879 conversion, which we strip off here. */
1880 STRIP_USELESS_TYPE_CONVERSION (init);
1881 init_code = TREE_CODE (init);
1882
1883 if (elt->is_scalar)
1884 {
1885 if (elt->replacement)
1886 {
1887 generate_one_element_init (elt->replacement, init, list_p);
1888 elt->visited = true;
1889 }
1890 return result;
1891 }
1892
1893 switch (init_code)
1894 {
1895 case COMPLEX_CST:
1896 case COMPLEX_EXPR:
1897 FOR_EACH_ACTUAL_CHILD (sub, elt)
1898 {
1899 if (sub->element == integer_zero_node)
1900 t = (init_code == COMPLEX_EXPR
1901 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1902 else
1903 t = (init_code == COMPLEX_EXPR
1904 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1905 result &= generate_element_init_1 (sub, t, list_p);
1906 }
1907 break;
1908
1909 case CONSTRUCTOR:
1910 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
1911 {
1912 if (TREE_CODE (purpose) == RANGE_EXPR)
1913 {
1914 tree lower = TREE_OPERAND (purpose, 0);
1915 tree upper = TREE_OPERAND (purpose, 1);
1916
1917 while (1)
1918 {
1919 sub = lookup_element (elt, lower, NULL, NO_INSERT);
1920 if (sub != NULL)
1921 result &= generate_element_init_1 (sub, value, list_p);
1922 if (tree_int_cst_equal (lower, upper))
1923 break;
1924 lower = int_const_binop (PLUS_EXPR, lower,
1925 integer_one_node, true);
1926 }
1927 }
1928 else
1929 {
1930 sub = lookup_element (elt, purpose, NULL, NO_INSERT);
1931 if (sub != NULL)
1932 result &= generate_element_init_1 (sub, value, list_p);
1933 }
1934 }
1935 break;
1936
1937 default:
1938 elt->visited = true;
1939 result = false;
1940 }
1941
1942 return result;
1943 }
1944
1945 /* A wrapper function for generate_element_init_1 that handles cleanup after
1946 gimplification. */
1947
1948 static bool
1949 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1950 {
1951 bool ret;
1952
1953 push_gimplify_context ();
1954 ret = generate_element_init_1 (elt, init, list_p);
1955 pop_gimplify_context (NULL);
1956
1957 /* The replacement can expose previously unreferenced variables. */
1958 if (ret && *list_p)
1959 {
1960 tree_stmt_iterator i;
1961
1962 for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
1963 find_new_referenced_vars (tsi_stmt_ptr (i));
1964 }
1965
1966 return ret;
1967 }
1968
1969 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1970 has more than one edge, STMT will be replicated for each edge. Also,
1971 abnormal edges will be ignored. */
1972
1973 void
1974 insert_edge_copies (tree stmt, basic_block bb)
1975 {
1976 edge e;
1977 edge_iterator ei;
1978 bool first_copy;
1979
1980 first_copy = true;
1981 FOR_EACH_EDGE (e, ei, bb->succs)
1982 {
1983 /* We don't need to insert copies on abnormal edges. The
1984 value of the scalar replacement is not guaranteed to
1985 be valid through an abnormal edge. */
1986 if (!(e->flags & EDGE_ABNORMAL))
1987 {
1988 if (first_copy)
1989 {
1990 bsi_insert_on_edge (e, stmt);
1991 first_copy = false;
1992 }
1993 else
1994 bsi_insert_on_edge (e, unsave_expr_now (stmt));
1995 }
1996 }
1997 }
1998
1999 /* Helper function to insert LIST before BSI, and set up line number info. */
2000
2001 void
2002 sra_insert_before (block_stmt_iterator *bsi, tree list)
2003 {
2004 tree stmt = bsi_stmt (*bsi);
2005
2006 if (EXPR_HAS_LOCATION (stmt))
2007 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
2008 bsi_insert_before (bsi, list, BSI_SAME_STMT);
2009 }
2010
2011 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
2012
2013 void
2014 sra_insert_after (block_stmt_iterator *bsi, tree list)
2015 {
2016 tree stmt = bsi_stmt (*bsi);
2017
2018 if (EXPR_HAS_LOCATION (stmt))
2019 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
2020
2021 if (stmt_ends_bb_p (stmt))
2022 insert_edge_copies (list, bsi->bb);
2023 else
2024 bsi_insert_after (bsi, list, BSI_SAME_STMT);
2025 }
2026
2027 /* Similarly, but replace the statement at BSI. */
2028
2029 static void
2030 sra_replace (block_stmt_iterator *bsi, tree list)
2031 {
2032 sra_insert_before (bsi, list);
2033 bsi_remove (bsi, false);
2034 if (bsi_end_p (*bsi))
2035 *bsi = bsi_last (bsi->bb);
2036 else
2037 bsi_prev (bsi);
2038 }
2039
2040 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
2041 if elt is scalar, or some occurrence of ELT that requires a complete
2042 aggregate. IS_OUTPUT is true if ELT is being modified. */
2043
2044 static void
2045 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
2046 bool is_output, bool use_all)
2047 {
2048 tree list = NULL, stmt = bsi_stmt (*bsi);
2049
2050 if (elt->replacement)
2051 {
2052 /* If we have a replacement, then updating the reference is as
2053 simple as modifying the existing statement in place. */
2054 if (is_output)
2055 mark_all_v_defs (stmt);
2056 *expr_p = elt->replacement;
2057 update_stmt (stmt);
2058 }
2059 else
2060 {
2061 /* Otherwise we need some copies. If ELT is being read, then we want
2062 to store all (modified) sub-elements back into the structure before
2063 the reference takes place. If ELT is being written, then we want to
2064 load the changed values back into our shadow variables. */
2065 /* ??? We don't check modified for reads, we just always write all of
2066 the values. We should be able to record the SSA number of the VOP
2067 for which the values were last read. If that number matches the
2068 SSA number of the VOP in the current statement, then we needn't
2069 emit an assignment. This would also eliminate double writes when
2070 a structure is passed as more than one argument to a function call.
2071 This optimization would be most effective if sra_walk_function
2072 processed the blocks in dominator order. */
2073
2074 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
2075 if (list == NULL)
2076 return;
2077 mark_all_v_defs (list);
2078 if (is_output)
2079 sra_insert_after (bsi, list);
2080 else
2081 {
2082 sra_insert_before (bsi, list);
2083 if (use_all)
2084 mark_no_warning (elt);
2085 }
2086 }
2087 }
2088
2089 /* Scalarize a COPY. To recap, this is an assignment statement between
2090 two scalarizable references, LHS_ELT and RHS_ELT. */
2091
2092 static void
2093 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
2094 block_stmt_iterator *bsi)
2095 {
2096 tree list, stmt;
2097
2098 if (lhs_elt->replacement && rhs_elt->replacement)
2099 {
2100 /* If we have two scalar operands, modify the existing statement. */
2101 stmt = bsi_stmt (*bsi);
2102
2103 /* See the commentary in sra_walk_function concerning
2104 RETURN_EXPR, and why we should never see one here. */
2105 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
2106
2107 GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement;
2108 GIMPLE_STMT_OPERAND (stmt, 1) = rhs_elt->replacement;
2109 update_stmt (stmt);
2110 }
2111 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
2112 {
2113 /* If either side requires a block copy, then sync the RHS back
2114 to the original structure, leave the original assignment
2115 statement (which will perform the block copy), then load the
2116 LHS values out of its now-updated original structure. */
2117 /* ??? Could perform a modified pair-wise element copy. That
2118 would at least allow those elements that are instantiated in
2119 both structures to be optimized well. */
2120
2121 list = NULL;
2122 generate_copy_inout (rhs_elt, false,
2123 generate_element_ref (rhs_elt), &list);
2124 if (list)
2125 {
2126 mark_all_v_defs (list);
2127 sra_insert_before (bsi, list);
2128 }
2129
2130 list = NULL;
2131 generate_copy_inout (lhs_elt, true,
2132 generate_element_ref (lhs_elt), &list);
2133 if (list)
2134 {
2135 mark_all_v_defs (list);
2136 sra_insert_after (bsi, list);
2137 }
2138 }
2139 else
2140 {
2141 /* Otherwise both sides must be fully instantiated. In which
2142 case perform pair-wise element assignments and replace the
2143 original block copy statement. */
2144
2145 stmt = bsi_stmt (*bsi);
2146 mark_all_v_defs (stmt);
2147
2148 list = NULL;
2149 generate_element_copy (lhs_elt, rhs_elt, &list);
2150 gcc_assert (list);
2151 mark_all_v_defs (list);
2152 sra_replace (bsi, list);
2153 }
2154 }
2155
2156 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
2157 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
2158 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
2159 CONSTRUCTOR. */
2160
2161 static void
2162 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
2163 {
2164 bool result = true;
2165 tree list = NULL;
2166
2167 /* Generate initialization statements for all members extant in the RHS. */
2168 if (rhs)
2169 {
2170 /* Unshare the expression just in case this is from a decl's initial. */
2171 rhs = unshare_expr (rhs);
2172 result = generate_element_init (lhs_elt, rhs, &list);
2173 }
2174
2175 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
2176 a zero value. Initialize the rest of the instantiated elements. */
2177 generate_element_zero (lhs_elt, &list);
2178
2179 if (!result)
2180 {
2181 /* If we failed to convert the entire initializer, then we must
2182 leave the structure assignment in place and must load values
2183 from the structure into the slots for which we did not find
2184 constants. The easiest way to do this is to generate a complete
2185 copy-out, and then follow that with the constant assignments
2186 that we were able to build. DCE will clean things up. */
2187 tree list0 = NULL;
2188 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
2189 &list0);
2190 append_to_statement_list (list, &list0);
2191 list = list0;
2192 }
2193
2194 if (lhs_elt->use_block_copy || !result)
2195 {
2196 /* Since LHS is not fully instantiated, we must leave the structure
2197 assignment in place. Treating this case differently from a USE
2198 exposes constants to later optimizations. */
2199 if (list)
2200 {
2201 mark_all_v_defs (list);
2202 sra_insert_after (bsi, list);
2203 }
2204 }
2205 else
2206 {
2207 /* The LHS is fully instantiated. The list of initializations
2208 replaces the original structure assignment. */
2209 gcc_assert (list);
2210 mark_all_v_defs (bsi_stmt (*bsi));
2211 mark_all_v_defs (list);
2212 sra_replace (bsi, list);
2213 }
2214 }
2215
2216 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
2217 on all INDIRECT_REFs. */
2218
2219 static tree
2220 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2221 {
2222 tree t = *tp;
2223
2224 if (TREE_CODE (t) == INDIRECT_REF)
2225 {
2226 TREE_THIS_NOTRAP (t) = 1;
2227 *walk_subtrees = 0;
2228 }
2229 else if (IS_TYPE_OR_DECL_P (t))
2230 *walk_subtrees = 0;
2231
2232 return NULL;
2233 }
2234
2235 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
2236 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
2237 if ELT is on the left-hand side. */
2238
2239 static void
2240 scalarize_ldst (struct sra_elt *elt, tree other,
2241 block_stmt_iterator *bsi, bool is_output)
2242 {
2243 /* Shouldn't have gotten called for a scalar. */
2244 gcc_assert (!elt->replacement);
2245
2246 if (elt->use_block_copy)
2247 {
2248 /* Since ELT is not fully instantiated, we have to leave the
2249 block copy in place. Treat this as a USE. */
2250 scalarize_use (elt, NULL, bsi, is_output, false);
2251 }
2252 else
2253 {
2254 /* The interesting case is when ELT is fully instantiated. In this
2255 case we can have each element stored/loaded directly to/from the
2256 corresponding slot in OTHER. This avoids a block copy. */
2257
2258 tree list = NULL, stmt = bsi_stmt (*bsi);
2259
2260 mark_all_v_defs (stmt);
2261 generate_copy_inout (elt, is_output, other, &list);
2262 mark_all_v_defs (list);
2263 gcc_assert (list);
2264
2265 /* Preserve EH semantics. */
2266 if (stmt_ends_bb_p (stmt))
2267 {
2268 tree_stmt_iterator tsi;
2269 tree first;
2270
2271 /* Extract the first statement from LIST. */
2272 tsi = tsi_start (list);
2273 first = tsi_stmt (tsi);
2274 tsi_delink (&tsi);
2275
2276 /* Replace the old statement with this new representative. */
2277 bsi_replace (bsi, first, true);
2278
2279 if (!tsi_end_p (tsi))
2280 {
2281 /* If any reference would trap, then they all would. And more
2282 to the point, the first would. Therefore none of the rest
2283 will trap since the first didn't. Indicate this by
2284 iterating over the remaining statements and set
2285 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
2286 do
2287 {
2288 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
2289 tsi_next (&tsi);
2290 }
2291 while (!tsi_end_p (tsi));
2292
2293 insert_edge_copies (list, bsi->bb);
2294 }
2295 }
2296 else
2297 sra_replace (bsi, list);
2298 }
2299 }
2300
2301 /* Generate initializations for all scalarizable parameters. */
2302
2303 static void
2304 scalarize_parms (void)
2305 {
2306 tree list = NULL;
2307 unsigned i;
2308 bitmap_iterator bi;
2309
2310 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
2311 {
2312 tree var = referenced_var (i);
2313 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
2314 generate_copy_inout (elt, true, var, &list);
2315 }
2316
2317 if (list)
2318 {
2319 insert_edge_copies (list, ENTRY_BLOCK_PTR);
2320 mark_all_v_defs (list);
2321 }
2322 }
2323
2324 /* Entry point to phase 4. Update the function to match replacements. */
2325
2326 static void
2327 scalarize_function (void)
2328 {
2329 static const struct sra_walk_fns fns = {
2330 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
2331 };
2332
2333 sra_walk_function (&fns);
2334 scalarize_parms ();
2335 bsi_commit_edge_inserts ();
2336 }
2337
2338 \f
2339 /* Debug helper function. Print ELT in a nice human-readable format. */
2340
2341 static void
2342 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
2343 {
2344 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
2345 {
2346 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
2347 dump_sra_elt_name (f, elt->parent);
2348 }
2349 else
2350 {
2351 if (elt->parent)
2352 dump_sra_elt_name (f, elt->parent);
2353 if (DECL_P (elt->element))
2354 {
2355 if (TREE_CODE (elt->element) == FIELD_DECL)
2356 fputc ('.', f);
2357 print_generic_expr (f, elt->element, dump_flags);
2358 }
2359 else if (TREE_CODE (elt->element) == RANGE_EXPR)
2360 fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
2361 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
2362 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
2363 else
2364 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
2365 TREE_INT_CST_LOW (elt->element));
2366 }
2367 }
2368
2369 /* Likewise, but callable from the debugger. */
2370
2371 void
2372 debug_sra_elt_name (struct sra_elt *elt)
2373 {
2374 dump_sra_elt_name (stderr, elt);
2375 fputc ('\n', stderr);
2376 }
2377
2378 void
2379 sra_init_cache (void)
2380 {
2381 if (sra_type_decomp_cache)
2382 return;
2383
2384 sra_type_decomp_cache = BITMAP_ALLOC (NULL);
2385 sra_type_inst_cache = BITMAP_ALLOC (NULL);
2386 }
2387
2388 /* Main entry point. */
2389
2390 static unsigned int
2391 tree_sra (void)
2392 {
2393 /* Initialize local variables. */
2394 todoflags = 0;
2395 gcc_obstack_init (&sra_obstack);
2396 sra_candidates = BITMAP_ALLOC (NULL);
2397 needs_copy_in = BITMAP_ALLOC (NULL);
2398 sra_init_cache ();
2399 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
2400
2401 /* Scan. If we find anything, instantiate and scalarize. */
2402 if (find_candidates_for_sra ())
2403 {
2404 scan_function ();
2405 decide_instantiations ();
2406 scalarize_function ();
2407 }
2408
2409 /* Free allocated memory. */
2410 htab_delete (sra_map);
2411 sra_map = NULL;
2412 BITMAP_FREE (sra_candidates);
2413 BITMAP_FREE (needs_copy_in);
2414 BITMAP_FREE (sra_type_decomp_cache);
2415 BITMAP_FREE (sra_type_inst_cache);
2416 obstack_free (&sra_obstack, NULL);
2417 return todoflags;
2418 }
2419
2420 static unsigned int
2421 tree_sra_early (void)
2422 {
2423 unsigned int ret;
2424
2425 early_sra = true;
2426 ret = tree_sra ();
2427 early_sra = false;
2428
2429 return ret;
2430 }
2431
2432 static bool
2433 gate_sra (void)
2434 {
2435 return flag_tree_sra != 0;
2436 }
2437
2438 struct tree_opt_pass pass_sra_early =
2439 {
2440 "esra", /* name */
2441 gate_sra, /* gate */
2442 tree_sra_early, /* execute */
2443 NULL, /* sub */
2444 NULL, /* next */
2445 0, /* static_pass_number */
2446 TV_TREE_SRA, /* tv_id */
2447 PROP_cfg | PROP_ssa, /* properties_required */
2448 0, /* properties_provided */
2449 0, /* properties_destroyed */
2450 0, /* todo_flags_start */
2451 TODO_dump_func
2452 | TODO_update_ssa
2453 | TODO_ggc_collect
2454 | TODO_verify_ssa, /* todo_flags_finish */
2455 0 /* letter */
2456 };
2457
2458 struct tree_opt_pass pass_sra =
2459 {
2460 "sra", /* name */
2461 gate_sra, /* gate */
2462 tree_sra, /* execute */
2463 NULL, /* sub */
2464 NULL, /* next */
2465 0, /* static_pass_number */
2466 TV_TREE_SRA, /* tv_id */
2467 PROP_cfg | PROP_ssa, /* properties_required */
2468 0, /* properties_provided */
2469 0, /* properties_destroyed */
2470 0, /* todo_flags_start */
2471 TODO_dump_func
2472 | TODO_update_ssa
2473 | TODO_ggc_collect
2474 | TODO_verify_ssa, /* todo_flags_finish */
2475 0 /* letter */
2476 };