re PR middle-end/35411 (internal compiler error: in set_ssa_val_to, at tree-ssa-sccvn...
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
5 Free Software Foundation, Inc.
6 Contributed by Diego Novillo <dnovillo@redhat.com>
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 3, or (at your option) any
13 later version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "ggc.h"
29 #include "tree.h"
30
31 /* These RTL headers are needed for basic-block.h. */
32 #include "rtl.h"
33 #include "tm_p.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
40 #include "tree-gimple.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "timevar.h"
44 #include "flags.h"
45 #include "bitmap.h"
46 #include "obstack.h"
47 #include "target.h"
48 /* expr.h is needed for MOVE_RATIO. */
49 #include "expr.h"
50 #include "params.h"
51
52
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
58
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
61
62 The optimization proceeds in phases:
63
64 (1) Identify variables that have types that are candidates for
65 decomposition.
66
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
71
72 (3) Based on the usage profile, instantiate substitution variables.
73
74 (4) Scan the function making replacements.
75 */
76
77
78 /* True if this is the "early" pass, before inlining. */
79 static bool early_sra;
80
81 /* The set of todo flags to return from tree_sra. */
82 static unsigned int todoflags;
83
84 /* The set of aggregate variables that are candidates for scalarization. */
85 static bitmap sra_candidates;
86
87 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
88 beginning of the function. */
89 static bitmap needs_copy_in;
90
91 /* Sets of bit pairs that cache type decomposition and instantiation. */
92 static bitmap sra_type_decomp_cache;
93 static bitmap sra_type_inst_cache;
94
95 /* One of these structures is created for each candidate aggregate and
96 each (accessed) member or group of members of such an aggregate. */
97 struct sra_elt
98 {
99 /* A tree of the elements. Used when we want to traverse everything. */
100 struct sra_elt *parent;
101 struct sra_elt *groups;
102 struct sra_elt *children;
103 struct sra_elt *sibling;
104
105 /* If this element is a root, then this is the VAR_DECL. If this is
106 a sub-element, this is some token used to identify the reference.
107 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
108 of an ARRAY_REF, this is the (constant) index. In the case of an
109 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
110 of a complex number, this is a zero or one. */
111 tree element;
112
113 /* The type of the element. */
114 tree type;
115
116 /* A VAR_DECL, for any sub-element we've decided to replace. */
117 tree replacement;
118
119 /* The number of times the element is referenced as a whole. I.e.
120 given "a.b.c", this would be incremented for C, but not for A or B. */
121 unsigned int n_uses;
122
123 /* The number of times the element is copied to or from another
124 scalarizable element. */
125 unsigned int n_copies;
126
127 /* True if TYPE is scalar. */
128 bool is_scalar;
129
130 /* True if this element is a group of members of its parent. */
131 bool is_group;
132
133 /* True if we saw something about this element that prevents scalarization,
134 such as non-constant indexing. */
135 bool cannot_scalarize;
136
137 /* True if we've decided that structure-to-structure assignment
138 should happen via memcpy and not per-element. */
139 bool use_block_copy;
140
141 /* True if everything under this element has been marked TREE_NO_WARNING. */
142 bool all_no_warning;
143
144 /* A flag for use with/after random access traversals. */
145 bool visited;
146
147 /* True if there is BIT_FIELD_REF on the lhs with a vector. */
148 bool is_vector_lhs;
149
150 /* 1 if the element is a field that is part of a block, 2 if the field
151 is the block itself, 0 if it's neither. */
152 char in_bitfld_block;
153 };
154
155 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
156
157 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
158 for ((CHILD) = (ELT)->is_group \
159 ? next_child_for_group (NULL, (ELT)) \
160 : (ELT)->children; \
161 (CHILD); \
162 (CHILD) = (ELT)->is_group \
163 ? next_child_for_group ((CHILD), (ELT)) \
164 : (CHILD)->sibling)
165
166 /* Helper function for above macro. Return next child in group. */
167 static struct sra_elt *
168 next_child_for_group (struct sra_elt *child, struct sra_elt *group)
169 {
170 gcc_assert (group->is_group);
171
172 /* Find the next child in the parent. */
173 if (child)
174 child = child->sibling;
175 else
176 child = group->parent->children;
177
178 /* Skip siblings that do not belong to the group. */
179 while (child)
180 {
181 tree g_elt = group->element;
182 if (TREE_CODE (g_elt) == RANGE_EXPR)
183 {
184 if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
185 && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
186 break;
187 }
188 else
189 gcc_unreachable ();
190
191 child = child->sibling;
192 }
193
194 return child;
195 }
196
197 /* Random access to the child of a parent is performed by hashing.
198 This prevents quadratic behavior, and allows SRA to function
199 reasonably on larger records. */
200 static htab_t sra_map;
201
202 /* All structures are allocated out of the following obstack. */
203 static struct obstack sra_obstack;
204
205 /* Debugging functions. */
206 static void dump_sra_elt_name (FILE *, struct sra_elt *);
207 extern void debug_sra_elt_name (struct sra_elt *);
208
209 /* Forward declarations. */
210 static tree generate_element_ref (struct sra_elt *);
211 static tree sra_build_assignment (tree dst, tree src);
212 static void mark_all_v_defs (tree list);
213
214 \f
215 /* Return true if DECL is an SRA candidate. */
216
217 static bool
218 is_sra_candidate_decl (tree decl)
219 {
220 return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
221 }
222
223 /* Return true if TYPE is a scalar type. */
224
225 static bool
226 is_sra_scalar_type (tree type)
227 {
228 enum tree_code code = TREE_CODE (type);
229 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
230 || code == FIXED_POINT_TYPE
231 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
232 || code == POINTER_TYPE || code == OFFSET_TYPE
233 || code == REFERENCE_TYPE);
234 }
235
236 /* Return true if TYPE can be decomposed into a set of independent variables.
237
238 Note that this doesn't imply that all elements of TYPE can be
239 instantiated, just that if we decide to break up the type into
240 separate pieces that it can be done. */
241
242 bool
243 sra_type_can_be_decomposed_p (tree type)
244 {
245 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
246 tree t;
247
248 /* Avoid searching the same type twice. */
249 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
250 return true;
251 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
252 return false;
253
254 /* The type must have a definite nonzero size. */
255 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
256 || integer_zerop (TYPE_SIZE (type)))
257 goto fail;
258
259 /* The type must be a non-union aggregate. */
260 switch (TREE_CODE (type))
261 {
262 case RECORD_TYPE:
263 {
264 bool saw_one_field = false;
265
266 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
267 if (TREE_CODE (t) == FIELD_DECL)
268 {
269 /* Reject incorrectly represented bit fields. */
270 if (DECL_BIT_FIELD (t)
271 && (tree_low_cst (DECL_SIZE (t), 1)
272 != TYPE_PRECISION (TREE_TYPE (t))))
273 goto fail;
274
275 saw_one_field = true;
276 }
277
278 /* Record types must have at least one field. */
279 if (!saw_one_field)
280 goto fail;
281 }
282 break;
283
284 case ARRAY_TYPE:
285 /* Array types must have a fixed lower and upper bound. */
286 t = TYPE_DOMAIN (type);
287 if (t == NULL)
288 goto fail;
289 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
290 goto fail;
291 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
292 goto fail;
293 break;
294
295 case COMPLEX_TYPE:
296 break;
297
298 default:
299 goto fail;
300 }
301
302 bitmap_set_bit (sra_type_decomp_cache, cache+0);
303 return true;
304
305 fail:
306 bitmap_set_bit (sra_type_decomp_cache, cache+1);
307 return false;
308 }
309
310 /* Return true if DECL can be decomposed into a set of independent
311 (though not necessarily scalar) variables. */
312
313 static bool
314 decl_can_be_decomposed_p (tree var)
315 {
316 /* Early out for scalars. */
317 if (is_sra_scalar_type (TREE_TYPE (var)))
318 return false;
319
320 /* The variable must not be aliased. */
321 if (!is_gimple_non_addressable (var))
322 {
323 if (dump_file && (dump_flags & TDF_DETAILS))
324 {
325 fprintf (dump_file, "Cannot scalarize variable ");
326 print_generic_expr (dump_file, var, dump_flags);
327 fprintf (dump_file, " because it must live in memory\n");
328 }
329 return false;
330 }
331
332 /* The variable must not be volatile. */
333 if (TREE_THIS_VOLATILE (var))
334 {
335 if (dump_file && (dump_flags & TDF_DETAILS))
336 {
337 fprintf (dump_file, "Cannot scalarize variable ");
338 print_generic_expr (dump_file, var, dump_flags);
339 fprintf (dump_file, " because it is declared volatile\n");
340 }
341 return false;
342 }
343
344 /* We must be able to decompose the variable's type. */
345 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
346 {
347 if (dump_file && (dump_flags & TDF_DETAILS))
348 {
349 fprintf (dump_file, "Cannot scalarize variable ");
350 print_generic_expr (dump_file, var, dump_flags);
351 fprintf (dump_file, " because its type cannot be decomposed\n");
352 }
353 return false;
354 }
355
356 /* HACK: if we decompose a va_list_type_node before inlining, then we'll
357 confuse tree-stdarg.c, and we won't be able to figure out which and
358 how many arguments are accessed. This really should be improved in
359 tree-stdarg.c, as the decomposition is truely a win. This could also
360 be fixed if the stdarg pass ran early, but this can't be done until
361 we've aliasing information early too. See PR 30791. */
362 if (early_sra
363 && TYPE_MAIN_VARIANT (TREE_TYPE (var))
364 == TYPE_MAIN_VARIANT (va_list_type_node))
365 return false;
366
367 return true;
368 }
369
370 /* Return true if TYPE can be *completely* decomposed into scalars. */
371
372 static bool
373 type_can_instantiate_all_elements (tree type)
374 {
375 if (is_sra_scalar_type (type))
376 return true;
377 if (!sra_type_can_be_decomposed_p (type))
378 return false;
379
380 switch (TREE_CODE (type))
381 {
382 case RECORD_TYPE:
383 {
384 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
385 tree f;
386
387 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
388 return true;
389 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
390 return false;
391
392 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
393 if (TREE_CODE (f) == FIELD_DECL)
394 {
395 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
396 {
397 bitmap_set_bit (sra_type_inst_cache, cache+1);
398 return false;
399 }
400 }
401
402 bitmap_set_bit (sra_type_inst_cache, cache+0);
403 return true;
404 }
405
406 case ARRAY_TYPE:
407 return type_can_instantiate_all_elements (TREE_TYPE (type));
408
409 case COMPLEX_TYPE:
410 return true;
411
412 default:
413 gcc_unreachable ();
414 }
415 }
416
417 /* Test whether ELT or some sub-element cannot be scalarized. */
418
419 static bool
420 can_completely_scalarize_p (struct sra_elt *elt)
421 {
422 struct sra_elt *c;
423
424 if (elt->cannot_scalarize)
425 return false;
426
427 for (c = elt->children; c; c = c->sibling)
428 if (!can_completely_scalarize_p (c))
429 return false;
430
431 for (c = elt->groups; c; c = c->sibling)
432 if (!can_completely_scalarize_p (c))
433 return false;
434
435 return true;
436 }
437
438 \f
439 /* A simplified tree hashing algorithm that only handles the types of
440 trees we expect to find in sra_elt->element. */
441
442 static hashval_t
443 sra_hash_tree (tree t)
444 {
445 hashval_t h;
446
447 switch (TREE_CODE (t))
448 {
449 case VAR_DECL:
450 case PARM_DECL:
451 case RESULT_DECL:
452 h = DECL_UID (t);
453 break;
454
455 case INTEGER_CST:
456 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
457 break;
458
459 case RANGE_EXPR:
460 h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
461 h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
462 break;
463
464 case FIELD_DECL:
465 /* We can have types that are compatible, but have different member
466 lists, so we can't hash fields by ID. Use offsets instead. */
467 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
468 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
469 break;
470
471 case BIT_FIELD_REF:
472 /* Don't take operand 0 into account, that's our parent. */
473 h = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
474 h = iterative_hash_expr (TREE_OPERAND (t, 2), h);
475 break;
476
477 default:
478 gcc_unreachable ();
479 }
480
481 return h;
482 }
483
484 /* Hash function for type SRA_PAIR. */
485
486 static hashval_t
487 sra_elt_hash (const void *x)
488 {
489 const struct sra_elt *e = x;
490 const struct sra_elt *p;
491 hashval_t h;
492
493 h = sra_hash_tree (e->element);
494
495 /* Take into account everything except bitfield blocks back up the
496 chain. Given that chain lengths are rarely very long, this
497 should be acceptable. If we truly identify this as a performance
498 problem, it should work to hash the pointer value
499 "e->parent". */
500 for (p = e->parent; p ; p = p->parent)
501 if (!p->in_bitfld_block)
502 h = (h * 65521) ^ sra_hash_tree (p->element);
503
504 return h;
505 }
506
507 /* Equality function for type SRA_PAIR. */
508
509 static int
510 sra_elt_eq (const void *x, const void *y)
511 {
512 const struct sra_elt *a = x;
513 const struct sra_elt *b = y;
514 tree ae, be;
515 const struct sra_elt *ap = a->parent;
516 const struct sra_elt *bp = b->parent;
517
518 if (ap)
519 while (ap->in_bitfld_block)
520 ap = ap->parent;
521 if (bp)
522 while (bp->in_bitfld_block)
523 bp = bp->parent;
524
525 if (ap != bp)
526 return false;
527
528 ae = a->element;
529 be = b->element;
530
531 if (ae == be)
532 return true;
533 if (TREE_CODE (ae) != TREE_CODE (be))
534 return false;
535
536 switch (TREE_CODE (ae))
537 {
538 case VAR_DECL:
539 case PARM_DECL:
540 case RESULT_DECL:
541 /* These are all pointer unique. */
542 return false;
543
544 case INTEGER_CST:
545 /* Integers are not pointer unique, so compare their values. */
546 return tree_int_cst_equal (ae, be);
547
548 case RANGE_EXPR:
549 return
550 tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
551 && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
552
553 case FIELD_DECL:
554 /* Fields are unique within a record, but not between
555 compatible records. */
556 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
557 return false;
558 return fields_compatible_p (ae, be);
559
560 case BIT_FIELD_REF:
561 return
562 tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1))
563 && tree_int_cst_equal (TREE_OPERAND (ae, 2), TREE_OPERAND (be, 2));
564
565 default:
566 gcc_unreachable ();
567 }
568 }
569
570 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
571 may be null, in which case CHILD must be a DECL. */
572
573 static struct sra_elt *
574 lookup_element (struct sra_elt *parent, tree child, tree type,
575 enum insert_option insert)
576 {
577 struct sra_elt dummy;
578 struct sra_elt **slot;
579 struct sra_elt *elt;
580
581 if (parent)
582 dummy.parent = parent->is_group ? parent->parent : parent;
583 else
584 dummy.parent = NULL;
585 dummy.element = child;
586
587 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
588 if (!slot && insert == NO_INSERT)
589 return NULL;
590
591 elt = *slot;
592 if (!elt && insert == INSERT)
593 {
594 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
595 memset (elt, 0, sizeof (*elt));
596
597 elt->parent = parent;
598 elt->element = child;
599 elt->type = type;
600 elt->is_scalar = is_sra_scalar_type (type);
601
602 if (parent)
603 {
604 if (IS_ELEMENT_FOR_GROUP (elt->element))
605 {
606 elt->is_group = true;
607 elt->sibling = parent->groups;
608 parent->groups = elt;
609 }
610 else
611 {
612 elt->sibling = parent->children;
613 parent->children = elt;
614 }
615 }
616
617 /* If this is a parameter, then if we want to scalarize, we have
618 one copy from the true function parameter. Count it now. */
619 if (TREE_CODE (child) == PARM_DECL)
620 {
621 elt->n_copies = 1;
622 bitmap_set_bit (needs_copy_in, DECL_UID (child));
623 }
624 }
625
626 return elt;
627 }
628
629 /* Create or return the SRA_ELT structure for EXPR if the expression
630 refers to a scalarizable variable. */
631
632 static struct sra_elt *
633 maybe_lookup_element_for_expr (tree expr)
634 {
635 struct sra_elt *elt;
636 tree child;
637
638 switch (TREE_CODE (expr))
639 {
640 case VAR_DECL:
641 case PARM_DECL:
642 case RESULT_DECL:
643 if (is_sra_candidate_decl (expr))
644 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
645 return NULL;
646
647 case ARRAY_REF:
648 /* We can't scalarize variable array indices. */
649 if (in_array_bounds_p (expr))
650 child = TREE_OPERAND (expr, 1);
651 else
652 return NULL;
653 break;
654
655 case ARRAY_RANGE_REF:
656 /* We can't scalarize variable array indices. */
657 if (range_in_array_bounds_p (expr))
658 {
659 tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
660 child = build2 (RANGE_EXPR, integer_type_node,
661 TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
662 }
663 else
664 return NULL;
665 break;
666
667 case COMPONENT_REF:
668 {
669 tree type = TREE_TYPE (TREE_OPERAND (expr, 0));
670 /* Don't look through unions. */
671 if (TREE_CODE (type) != RECORD_TYPE)
672 return NULL;
673 /* Neither through variable-sized records. */
674 if (TYPE_SIZE (type) == NULL_TREE
675 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
676 return NULL;
677 child = TREE_OPERAND (expr, 1);
678 }
679 break;
680
681 case REALPART_EXPR:
682 child = integer_zero_node;
683 break;
684 case IMAGPART_EXPR:
685 child = integer_one_node;
686 break;
687
688 default:
689 return NULL;
690 }
691
692 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
693 if (elt)
694 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
695 return NULL;
696 }
697
698 \f
699 /* Functions to walk just enough of the tree to see all scalarizable
700 references, and categorize them. */
701
702 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
703 various kinds of references seen. In all cases, *BSI is an iterator
704 pointing to the statement being processed. */
705 struct sra_walk_fns
706 {
707 /* Invoked when ELT is required as a unit. Note that ELT might refer to
708 a leaf node, in which case this is a simple scalar reference. *EXPR_P
709 points to the location of the expression. IS_OUTPUT is true if this
710 is a left-hand-side reference. USE_ALL is true if we saw something we
711 couldn't quite identify and had to force the use of the entire object. */
712 void (*use) (struct sra_elt *elt, tree *expr_p,
713 block_stmt_iterator *bsi, bool is_output, bool use_all);
714
715 /* Invoked when we have a copy between two scalarizable references. */
716 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
717 block_stmt_iterator *bsi);
718
719 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
720 in which case it should be treated as an empty CONSTRUCTOR. */
721 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
722
723 /* Invoked when we have a copy between one scalarizable reference ELT
724 and one non-scalarizable reference OTHER without side-effects.
725 IS_OUTPUT is true if ELT is on the left-hand side. */
726 void (*ldst) (struct sra_elt *elt, tree other,
727 block_stmt_iterator *bsi, bool is_output);
728
729 /* True during phase 2, false during phase 4. */
730 /* ??? This is a hack. */
731 bool initial_scan;
732 };
733
734 #ifdef ENABLE_CHECKING
735 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
736
737 static tree
738 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
739 void *data ATTRIBUTE_UNUSED)
740 {
741 tree t = *tp;
742 enum tree_code code = TREE_CODE (t);
743
744 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
745 {
746 *walk_subtrees = 0;
747 if (is_sra_candidate_decl (t))
748 return t;
749 }
750 else if (TYPE_P (t))
751 *walk_subtrees = 0;
752
753 return NULL;
754 }
755 #endif
756
757 /* Walk most expressions looking for a scalarizable aggregate.
758 If we find one, invoke FNS->USE. */
759
760 static void
761 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
762 const struct sra_walk_fns *fns)
763 {
764 tree expr = *expr_p;
765 tree inner = expr;
766 bool disable_scalarization = false;
767 bool use_all_p = false;
768
769 /* We're looking to collect a reference expression between EXPR and INNER,
770 such that INNER is a scalarizable decl and all other nodes through EXPR
771 are references that we can scalarize. If we come across something that
772 we can't scalarize, we reset EXPR. This has the effect of making it
773 appear that we're referring to the larger expression as a whole. */
774
775 while (1)
776 switch (TREE_CODE (inner))
777 {
778 case VAR_DECL:
779 case PARM_DECL:
780 case RESULT_DECL:
781 /* If there is a scalarizable decl at the bottom, then process it. */
782 if (is_sra_candidate_decl (inner))
783 {
784 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
785 if (disable_scalarization)
786 elt->cannot_scalarize = true;
787 else
788 fns->use (elt, expr_p, bsi, is_output, use_all_p);
789 }
790 return;
791
792 case ARRAY_REF:
793 /* Non-constant index means any member may be accessed. Prevent the
794 expression from being scalarized. If we were to treat this as a
795 reference to the whole array, we can wind up with a single dynamic
796 index reference inside a loop being overridden by several constant
797 index references during loop setup. It's possible that this could
798 be avoided by using dynamic usage counts based on BB trip counts
799 (based on loop analysis or profiling), but that hardly seems worth
800 the effort. */
801 /* ??? Hack. Figure out how to push this into the scan routines
802 without duplicating too much code. */
803 if (!in_array_bounds_p (inner))
804 {
805 disable_scalarization = true;
806 goto use_all;
807 }
808 /* ??? Are we assured that non-constant bounds and stride will have
809 the same value everywhere? I don't think Fortran will... */
810 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
811 goto use_all;
812 inner = TREE_OPERAND (inner, 0);
813 break;
814
815 case ARRAY_RANGE_REF:
816 if (!range_in_array_bounds_p (inner))
817 {
818 disable_scalarization = true;
819 goto use_all;
820 }
821 /* ??? See above non-constant bounds and stride . */
822 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
823 goto use_all;
824 inner = TREE_OPERAND (inner, 0);
825 break;
826
827 case COMPONENT_REF:
828 {
829 tree type = TREE_TYPE (TREE_OPERAND (inner, 0));
830 /* Don't look through unions. */
831 if (TREE_CODE (type) != RECORD_TYPE)
832 goto use_all;
833 /* Neither through variable-sized records. */
834 if (TYPE_SIZE (type) == NULL_TREE
835 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
836 goto use_all;
837 inner = TREE_OPERAND (inner, 0);
838 }
839 break;
840
841 case REALPART_EXPR:
842 case IMAGPART_EXPR:
843 inner = TREE_OPERAND (inner, 0);
844 break;
845
846 case BIT_FIELD_REF:
847 /* A bit field reference to a specific vector is scalarized but for
848 ones for inputs need to be marked as used on the left hand size so
849 when we scalarize it, we can mark that variable as non renamable. */
850 if (is_output
851 && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
852 {
853 struct sra_elt *elt
854 = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
855 if (elt)
856 elt->is_vector_lhs = true;
857 }
858 /* A bit field reference (access to *multiple* fields simultaneously)
859 is not currently scalarized. Consider this an access to the
860 complete outer element, to which walk_tree will bring us next. */
861
862 goto use_all;
863
864 case VIEW_CONVERT_EXPR:
865 case NOP_EXPR:
866 /* Similarly, a view/nop explicitly wants to look at an object in a
867 type other than the one we've scalarized. */
868 goto use_all;
869
870 case WITH_SIZE_EXPR:
871 /* This is a transparent wrapper. The entire inner expression really
872 is being used. */
873 goto use_all;
874
875 use_all:
876 expr_p = &TREE_OPERAND (inner, 0);
877 inner = expr = *expr_p;
878 use_all_p = true;
879 break;
880
881 default:
882 #ifdef ENABLE_CHECKING
883 /* Validate that we're not missing any references. */
884 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
885 #endif
886 return;
887 }
888 }
889
890 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
891 If we find one, invoke FNS->USE. */
892
893 static void
894 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
895 const struct sra_walk_fns *fns)
896 {
897 tree op;
898 for (op = list; op ; op = TREE_CHAIN (op))
899 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
900 }
901
902 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
903 If we find one, invoke FNS->USE. */
904
905 static void
906 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
907 const struct sra_walk_fns *fns)
908 {
909 int i;
910 int nargs = call_expr_nargs (expr);
911 for (i = 0; i < nargs; i++)
912 sra_walk_expr (&CALL_EXPR_ARG (expr, i), bsi, false, fns);
913 }
914
915 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
916 aggregates. If we find one, invoke FNS->USE. */
917
918 static void
919 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
920 const struct sra_walk_fns *fns)
921 {
922 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
923 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
924 }
925
926 /* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately. */
927
928 static void
929 sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
930 const struct sra_walk_fns *fns)
931 {
932 struct sra_elt *lhs_elt, *rhs_elt;
933 tree lhs, rhs;
934
935 lhs = GIMPLE_STMT_OPERAND (expr, 0);
936 rhs = GIMPLE_STMT_OPERAND (expr, 1);
937 lhs_elt = maybe_lookup_element_for_expr (lhs);
938 rhs_elt = maybe_lookup_element_for_expr (rhs);
939
940 /* If both sides are scalarizable, this is a COPY operation. */
941 if (lhs_elt && rhs_elt)
942 {
943 fns->copy (lhs_elt, rhs_elt, bsi);
944 return;
945 }
946
947 /* If the RHS is scalarizable, handle it. There are only two cases. */
948 if (rhs_elt)
949 {
950 if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
951 fns->ldst (rhs_elt, lhs, bsi, false);
952 else
953 fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false, false);
954 }
955
956 /* If it isn't scalarizable, there may be scalarizable variables within, so
957 check for a call or else walk the RHS to see if we need to do any
958 copy-in operations. We need to do it before the LHS is scalarized so
959 that the statements get inserted in the proper place, before any
960 copy-out operations. */
961 else
962 {
963 tree call = get_call_expr_in (rhs);
964 if (call)
965 sra_walk_call_expr (call, bsi, fns);
966 else
967 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 1), bsi, false, fns);
968 }
969
970 /* Likewise, handle the LHS being scalarizable. We have cases similar
971 to those above, but also want to handle RHS being constant. */
972 if (lhs_elt)
973 {
974 /* If this is an assignment from a constant, or constructor, then
975 we have access to all of the elements individually. Invoke INIT. */
976 if (TREE_CODE (rhs) == COMPLEX_EXPR
977 || TREE_CODE (rhs) == COMPLEX_CST
978 || TREE_CODE (rhs) == CONSTRUCTOR)
979 fns->init (lhs_elt, rhs, bsi);
980
981 /* If this is an assignment from read-only memory, treat this as if
982 we'd been passed the constructor directly. Invoke INIT. */
983 else if (TREE_CODE (rhs) == VAR_DECL
984 && TREE_STATIC (rhs)
985 && TREE_READONLY (rhs)
986 && targetm.binds_local_p (rhs))
987 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
988
989 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
990 The lvalue requirement prevents us from trying to directly scalarize
991 the result of a function call. Which would result in trying to call
992 the function multiple times, and other evil things. */
993 else if (!lhs_elt->is_scalar
994 && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
995 fns->ldst (lhs_elt, rhs, bsi, true);
996
997 /* Otherwise we're being used in some context that requires the
998 aggregate to be seen as a whole. Invoke USE. */
999 else
1000 fns->use (lhs_elt, &GIMPLE_STMT_OPERAND (expr, 0), bsi, true, false);
1001 }
1002
1003 /* Similarly to above, LHS_ELT being null only means that the LHS as a
1004 whole is not a scalarizable reference. There may be occurrences of
1005 scalarizable variables within, which implies a USE. */
1006 else
1007 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 0), bsi, true, fns);
1008 }
1009
1010 /* Entry point to the walk functions. Search the entire function,
1011 invoking the callbacks in FNS on each of the references to
1012 scalarizable variables. */
1013
1014 static void
1015 sra_walk_function (const struct sra_walk_fns *fns)
1016 {
1017 basic_block bb;
1018 block_stmt_iterator si, ni;
1019
1020 /* ??? Phase 4 could derive some benefit to walking the function in
1021 dominator tree order. */
1022
1023 FOR_EACH_BB (bb)
1024 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
1025 {
1026 tree stmt, t;
1027 stmt_ann_t ann;
1028
1029 stmt = bsi_stmt (si);
1030 ann = stmt_ann (stmt);
1031
1032 ni = si;
1033 bsi_next (&ni);
1034
1035 /* If the statement has no virtual operands, then it doesn't
1036 make any structure references that we care about. */
1037 if (gimple_aliases_computed_p (cfun)
1038 && ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
1039 continue;
1040
1041 switch (TREE_CODE (stmt))
1042 {
1043 case RETURN_EXPR:
1044 /* If we have "return <retval>" then the return value is
1045 already exposed for our pleasure. Walk it as a USE to
1046 force all the components back in place for the return.
1047
1048 If we have an embedded assignment, then <retval> is of
1049 a type that gets returned in registers in this ABI, and
1050 we do not wish to extend their lifetimes. Treat this
1051 as a USE of the variable on the RHS of this assignment. */
1052
1053 t = TREE_OPERAND (stmt, 0);
1054 if (t == NULL_TREE)
1055 ;
1056 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
1057 sra_walk_expr (&GIMPLE_STMT_OPERAND (t, 1), &si, false, fns);
1058 else
1059 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
1060 break;
1061
1062 case GIMPLE_MODIFY_STMT:
1063 sra_walk_gimple_modify_stmt (stmt, &si, fns);
1064 break;
1065 case CALL_EXPR:
1066 sra_walk_call_expr (stmt, &si, fns);
1067 break;
1068 case ASM_EXPR:
1069 sra_walk_asm_expr (stmt, &si, fns);
1070 break;
1071
1072 default:
1073 break;
1074 }
1075 }
1076 }
1077 \f
1078 /* Phase One: Scan all referenced variables in the program looking for
1079 structures that could be decomposed. */
1080
1081 static bool
1082 find_candidates_for_sra (void)
1083 {
1084 bool any_set = false;
1085 tree var;
1086 referenced_var_iterator rvi;
1087
1088 FOR_EACH_REFERENCED_VAR (var, rvi)
1089 {
1090 if (decl_can_be_decomposed_p (var))
1091 {
1092 bitmap_set_bit (sra_candidates, DECL_UID (var));
1093 any_set = true;
1094 }
1095 }
1096
1097 return any_set;
1098 }
1099
1100 \f
1101 /* Phase Two: Scan all references to scalarizable variables. Count the
1102 number of times they are used or copied respectively. */
1103
1104 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1105 considered a copy, because we can decompose the reference such that
1106 the sub-elements needn't be contiguous. */
1107
1108 static void
1109 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
1110 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1111 bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
1112 {
1113 elt->n_uses += 1;
1114 }
1115
1116 static void
1117 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1118 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1119 {
1120 lhs_elt->n_copies += 1;
1121 rhs_elt->n_copies += 1;
1122 }
1123
1124 static void
1125 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
1126 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1127 {
1128 lhs_elt->n_copies += 1;
1129 }
1130
1131 static void
1132 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
1133 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1134 bool is_output ATTRIBUTE_UNUSED)
1135 {
1136 elt->n_copies += 1;
1137 }
1138
1139 /* Dump the values we collected during the scanning phase. */
1140
1141 static void
1142 scan_dump (struct sra_elt *elt)
1143 {
1144 struct sra_elt *c;
1145
1146 dump_sra_elt_name (dump_file, elt);
1147 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1148
1149 for (c = elt->children; c ; c = c->sibling)
1150 scan_dump (c);
1151
1152 for (c = elt->groups; c ; c = c->sibling)
1153 scan_dump (c);
1154 }
1155
1156 /* Entry point to phase 2. Scan the entire function, building up
1157 scalarization data structures, recording copies and uses. */
1158
1159 static void
1160 scan_function (void)
1161 {
1162 static const struct sra_walk_fns fns = {
1163 scan_use, scan_copy, scan_init, scan_ldst, true
1164 };
1165 bitmap_iterator bi;
1166
1167 sra_walk_function (&fns);
1168
1169 if (dump_file && (dump_flags & TDF_DETAILS))
1170 {
1171 unsigned i;
1172
1173 fputs ("\nScan results:\n", dump_file);
1174 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1175 {
1176 tree var = referenced_var (i);
1177 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1178 if (elt)
1179 scan_dump (elt);
1180 }
1181 fputc ('\n', dump_file);
1182 }
1183 }
1184 \f
1185 /* Phase Three: Make decisions about which variables to scalarize, if any.
1186 All elements to be scalarized have replacement variables made for them. */
1187
1188 /* A subroutine of build_element_name. Recursively build the element
1189 name on the obstack. */
1190
1191 static void
1192 build_element_name_1 (struct sra_elt *elt)
1193 {
1194 tree t;
1195 char buffer[32];
1196
1197 if (elt->parent)
1198 {
1199 build_element_name_1 (elt->parent);
1200 obstack_1grow (&sra_obstack, '$');
1201
1202 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1203 {
1204 if (elt->element == integer_zero_node)
1205 obstack_grow (&sra_obstack, "real", 4);
1206 else
1207 obstack_grow (&sra_obstack, "imag", 4);
1208 return;
1209 }
1210 }
1211
1212 t = elt->element;
1213 if (TREE_CODE (t) == INTEGER_CST)
1214 {
1215 /* ??? Eh. Don't bother doing double-wide printing. */
1216 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1217 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1218 }
1219 else if (TREE_CODE (t) == BIT_FIELD_REF)
1220 {
1221 sprintf (buffer, "B" HOST_WIDE_INT_PRINT_DEC,
1222 tree_low_cst (TREE_OPERAND (t, 2), 1));
1223 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1224 sprintf (buffer, "F" HOST_WIDE_INT_PRINT_DEC,
1225 tree_low_cst (TREE_OPERAND (t, 1), 1));
1226 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1227 }
1228 else
1229 {
1230 tree name = DECL_NAME (t);
1231 if (name)
1232 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1233 IDENTIFIER_LENGTH (name));
1234 else
1235 {
1236 sprintf (buffer, "D%u", DECL_UID (t));
1237 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1238 }
1239 }
1240 }
1241
1242 /* Construct a pretty variable name for an element's replacement variable.
1243 The name is built on the obstack. */
1244
1245 static char *
1246 build_element_name (struct sra_elt *elt)
1247 {
1248 build_element_name_1 (elt);
1249 obstack_1grow (&sra_obstack, '\0');
1250 return XOBFINISH (&sra_obstack, char *);
1251 }
1252
1253 /* Instantiate an element as an independent variable. */
1254
1255 static void
1256 instantiate_element (struct sra_elt *elt)
1257 {
1258 struct sra_elt *base_elt;
1259 tree var, base;
1260 bool nowarn = TREE_NO_WARNING (elt->element);
1261
1262 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1263 if (!nowarn)
1264 nowarn = TREE_NO_WARNING (base_elt->parent->element);
1265 base = base_elt->element;
1266
1267 elt->replacement = var = make_rename_temp (elt->type, "SR");
1268
1269 if (DECL_P (elt->element)
1270 && !tree_int_cst_equal (DECL_SIZE (var), DECL_SIZE (elt->element)))
1271 {
1272 DECL_SIZE (var) = DECL_SIZE (elt->element);
1273 DECL_SIZE_UNIT (var) = DECL_SIZE_UNIT (elt->element);
1274
1275 elt->in_bitfld_block = 1;
1276 elt->replacement = build3 (BIT_FIELD_REF, elt->type, var,
1277 DECL_SIZE (var),
1278 BYTES_BIG_ENDIAN
1279 ? size_binop (MINUS_EXPR,
1280 TYPE_SIZE (elt->type),
1281 DECL_SIZE (var))
1282 : bitsize_int (0));
1283 if (!INTEGRAL_TYPE_P (elt->type)
1284 || TYPE_UNSIGNED (elt->type))
1285 BIT_FIELD_REF_UNSIGNED (elt->replacement) = 1;
1286 }
1287
1288 /* For vectors, if used on the left hand side with BIT_FIELD_REF,
1289 they are not a gimple register. */
1290 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE && elt->is_vector_lhs)
1291 DECL_GIMPLE_REG_P (var) = 0;
1292
1293 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1294 DECL_ARTIFICIAL (var) = 1;
1295
1296 if (TREE_THIS_VOLATILE (elt->type))
1297 {
1298 TREE_THIS_VOLATILE (var) = 1;
1299 TREE_SIDE_EFFECTS (var) = 1;
1300 }
1301
1302 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1303 {
1304 char *pretty_name = build_element_name (elt);
1305 DECL_NAME (var) = get_identifier (pretty_name);
1306 obstack_free (&sra_obstack, pretty_name);
1307
1308 SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1309 DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1310
1311 DECL_IGNORED_P (var) = 0;
1312 TREE_NO_WARNING (var) = nowarn;
1313 }
1314 else
1315 {
1316 DECL_IGNORED_P (var) = 1;
1317 /* ??? We can't generate any warning that would be meaningful. */
1318 TREE_NO_WARNING (var) = 1;
1319 }
1320
1321 /* Zero-initialize bit-field scalarization variables, to avoid
1322 triggering undefined behavior. */
1323 if (TREE_CODE (elt->element) == BIT_FIELD_REF
1324 || (var != elt->replacement
1325 && TREE_CODE (elt->replacement) == BIT_FIELD_REF))
1326 {
1327 tree init = sra_build_assignment (var, fold_convert (TREE_TYPE (var),
1328 integer_zero_node));
1329 insert_edge_copies (init, ENTRY_BLOCK_PTR);
1330 mark_all_v_defs (init);
1331 }
1332
1333 if (dump_file)
1334 {
1335 fputs (" ", dump_file);
1336 dump_sra_elt_name (dump_file, elt);
1337 fputs (" -> ", dump_file);
1338 print_generic_expr (dump_file, var, dump_flags);
1339 fputc ('\n', dump_file);
1340 }
1341 }
1342
1343 /* Make one pass across an element tree deciding whether or not it's
1344 profitable to instantiate individual leaf scalars.
1345
1346 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1347 fields all the way up the tree. */
1348
1349 static void
1350 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1351 unsigned int parent_copies)
1352 {
1353 if (dump_file && !elt->parent)
1354 {
1355 fputs ("Initial instantiation for ", dump_file);
1356 dump_sra_elt_name (dump_file, elt);
1357 fputc ('\n', dump_file);
1358 }
1359
1360 if (elt->cannot_scalarize)
1361 return;
1362
1363 if (elt->is_scalar)
1364 {
1365 /* The decision is simple: instantiate if we're used more frequently
1366 than the parent needs to be seen as a complete unit. */
1367 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1368 instantiate_element (elt);
1369 }
1370 else
1371 {
1372 struct sra_elt *c, *group;
1373 unsigned int this_uses = elt->n_uses + parent_uses;
1374 unsigned int this_copies = elt->n_copies + parent_copies;
1375
1376 /* Consider groups of sub-elements as weighing in favour of
1377 instantiation whatever their size. */
1378 for (group = elt->groups; group ; group = group->sibling)
1379 FOR_EACH_ACTUAL_CHILD (c, group)
1380 {
1381 c->n_uses += group->n_uses;
1382 c->n_copies += group->n_copies;
1383 }
1384
1385 for (c = elt->children; c ; c = c->sibling)
1386 decide_instantiation_1 (c, this_uses, this_copies);
1387 }
1388 }
1389
1390 /* Compute the size and number of all instantiated elements below ELT.
1391 We will only care about this if the size of the complete structure
1392 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1393
1394 static unsigned int
1395 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1396 {
1397 if (elt->replacement)
1398 {
1399 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1400 return 1;
1401 }
1402 else
1403 {
1404 struct sra_elt *c;
1405 unsigned int count = 0;
1406
1407 for (c = elt->children; c ; c = c->sibling)
1408 count += sum_instantiated_sizes (c, sizep);
1409
1410 return count;
1411 }
1412 }
1413
1414 /* Instantiate fields in ELT->TYPE that are not currently present as
1415 children of ELT. */
1416
1417 static void instantiate_missing_elements (struct sra_elt *elt);
1418
1419 static struct sra_elt *
1420 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1421 {
1422 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1423 if (sub->is_scalar)
1424 {
1425 if (sub->replacement == NULL)
1426 instantiate_element (sub);
1427 }
1428 else
1429 instantiate_missing_elements (sub);
1430 return sub;
1431 }
1432
1433 /* Obtain the canonical type for field F of ELEMENT. */
1434
1435 static tree
1436 canon_type_for_field (tree f, tree element)
1437 {
1438 tree field_type = TREE_TYPE (f);
1439
1440 /* canonicalize_component_ref() unwidens some bit-field types (not
1441 marked as DECL_BIT_FIELD in C++), so we must do the same, lest we
1442 may introduce type mismatches. */
1443 if (INTEGRAL_TYPE_P (field_type)
1444 && DECL_MODE (f) != TYPE_MODE (field_type))
1445 field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
1446 field_type,
1447 element,
1448 f, NULL_TREE),
1449 NULL_TREE));
1450
1451 return field_type;
1452 }
1453
1454 /* Look for adjacent fields of ELT starting at F that we'd like to
1455 scalarize as a single variable. Return the last field of the
1456 group. */
1457
1458 static tree
1459 try_instantiate_multiple_fields (struct sra_elt *elt, tree f)
1460 {
1461 int count;
1462 unsigned HOST_WIDE_INT align, bit, size, alchk;
1463 enum machine_mode mode;
1464 tree first = f, prev;
1465 tree type, var;
1466 struct sra_elt *block;
1467
1468 if (!is_sra_scalar_type (TREE_TYPE (f))
1469 || !host_integerp (DECL_FIELD_OFFSET (f), 1)
1470 || !host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1471 || !host_integerp (DECL_SIZE (f), 1)
1472 || lookup_element (elt, f, NULL, NO_INSERT))
1473 return f;
1474
1475 block = elt;
1476
1477 /* For complex and array objects, there are going to be integer
1478 literals as child elements. In this case, we can't just take the
1479 alignment and mode of the decl, so we instead rely on the element
1480 type.
1481
1482 ??? We could try to infer additional alignment from the full
1483 object declaration and the location of the sub-elements we're
1484 accessing. */
1485 for (count = 0; !DECL_P (block->element); count++)
1486 block = block->parent;
1487
1488 align = DECL_ALIGN (block->element);
1489 alchk = GET_MODE_BITSIZE (DECL_MODE (block->element));
1490
1491 if (count)
1492 {
1493 type = TREE_TYPE (block->element);
1494 while (count--)
1495 type = TREE_TYPE (type);
1496
1497 align = TYPE_ALIGN (type);
1498 alchk = GET_MODE_BITSIZE (TYPE_MODE (type));
1499 }
1500
1501 if (align < alchk)
1502 align = alchk;
1503
1504 /* Coalescing wider fields is probably pointless and
1505 inefficient. */
1506 if (align > BITS_PER_WORD)
1507 align = BITS_PER_WORD;
1508
1509 bit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1510 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1511 size = tree_low_cst (DECL_SIZE (f), 1);
1512
1513 alchk = align - 1;
1514 alchk = ~alchk;
1515
1516 if ((bit & alchk) != ((bit + size - 1) & alchk))
1517 return f;
1518
1519 /* Find adjacent fields in the same alignment word. */
1520
1521 for (prev = f, f = TREE_CHAIN (f);
1522 f && TREE_CODE (f) == FIELD_DECL
1523 && is_sra_scalar_type (TREE_TYPE (f))
1524 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1525 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1526 && host_integerp (DECL_SIZE (f), 1)
1527 && !lookup_element (elt, f, NULL, NO_INSERT);
1528 prev = f, f = TREE_CHAIN (f))
1529 {
1530 unsigned HOST_WIDE_INT nbit, nsize;
1531
1532 nbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1533 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1534 nsize = tree_low_cst (DECL_SIZE (f), 1);
1535
1536 if (bit + size == nbit)
1537 {
1538 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1539 {
1540 /* If we're at an alignment boundary, don't bother
1541 growing alignment such that we can include this next
1542 field. */
1543 if ((nbit & alchk)
1544 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1545 break;
1546
1547 align = GET_MODE_BITSIZE (DECL_MODE (f));
1548 alchk = align - 1;
1549 alchk = ~alchk;
1550
1551 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1552 break;
1553 }
1554 size += nsize;
1555 }
1556 else if (nbit + nsize == bit)
1557 {
1558 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1559 {
1560 if ((bit & alchk)
1561 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1562 break;
1563
1564 align = GET_MODE_BITSIZE (DECL_MODE (f));
1565 alchk = align - 1;
1566 alchk = ~alchk;
1567
1568 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1569 break;
1570 }
1571 bit = nbit;
1572 size += nsize;
1573 }
1574 else
1575 break;
1576 }
1577
1578 f = prev;
1579
1580 if (f == first)
1581 return f;
1582
1583 gcc_assert ((bit & alchk) == ((bit + size - 1) & alchk));
1584
1585 /* Try to widen the bit range so as to cover padding bits as well. */
1586
1587 if ((bit & ~alchk) || size != align)
1588 {
1589 unsigned HOST_WIDE_INT mbit = bit & alchk;
1590 unsigned HOST_WIDE_INT msize = align;
1591
1592 for (f = TYPE_FIELDS (elt->type);
1593 f; f = TREE_CHAIN (f))
1594 {
1595 unsigned HOST_WIDE_INT fbit, fsize;
1596
1597 /* Skip the fields from first to prev. */
1598 if (f == first)
1599 {
1600 f = prev;
1601 continue;
1602 }
1603
1604 if (!(TREE_CODE (f) == FIELD_DECL
1605 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1606 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)))
1607 continue;
1608
1609 fbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1610 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1611
1612 /* If we're past the selected word, we're fine. */
1613 if ((bit & alchk) < (fbit & alchk))
1614 continue;
1615
1616 if (host_integerp (DECL_SIZE (f), 1))
1617 fsize = tree_low_cst (DECL_SIZE (f), 1);
1618 else
1619 /* Assume a variable-sized field takes up all space till
1620 the end of the word. ??? Endianness issues? */
1621 fsize = align - (fbit & alchk);
1622
1623 if ((fbit & alchk) < (bit & alchk))
1624 {
1625 /* A large field might start at a previous word and
1626 extend into the selected word. Exclude those
1627 bits. ??? Endianness issues? */
1628 HOST_WIDE_INT diff = fbit + fsize - mbit;
1629
1630 if (diff <= 0)
1631 continue;
1632
1633 mbit += diff;
1634 msize -= diff;
1635 }
1636 else
1637 {
1638 /* Non-overlapping, great. */
1639 if (fbit + fsize <= mbit
1640 || mbit + msize <= fbit)
1641 continue;
1642
1643 if (fbit <= mbit)
1644 {
1645 unsigned HOST_WIDE_INT diff = fbit + fsize - mbit;
1646 mbit += diff;
1647 msize -= diff;
1648 }
1649 else if (fbit > mbit)
1650 msize -= (mbit + msize - fbit);
1651 else
1652 gcc_unreachable ();
1653 }
1654 }
1655
1656 bit = mbit;
1657 size = msize;
1658 }
1659
1660 /* Now we know the bit range we're interested in. Find the smallest
1661 machine mode we can use to access it. */
1662
1663 for (mode = smallest_mode_for_size (size, MODE_INT);
1664 ;
1665 mode = GET_MODE_WIDER_MODE (mode))
1666 {
1667 gcc_assert (mode != VOIDmode);
1668
1669 alchk = GET_MODE_PRECISION (mode) - 1;
1670 alchk = ~alchk;
1671
1672 if ((bit & alchk) == ((bit + size - 1) & alchk))
1673 break;
1674 }
1675
1676 gcc_assert (~alchk < align);
1677
1678 /* Create the field group as a single variable. */
1679
1680 type = lang_hooks.types.type_for_mode (mode, 1);
1681 gcc_assert (type);
1682 var = build3 (BIT_FIELD_REF, type, NULL_TREE,
1683 bitsize_int (size),
1684 bitsize_int (bit));
1685 BIT_FIELD_REF_UNSIGNED (var) = 1;
1686
1687 block = instantiate_missing_elements_1 (elt, var, type);
1688 gcc_assert (block && block->is_scalar);
1689
1690 var = block->replacement;
1691
1692 if ((bit & ~alchk)
1693 || (HOST_WIDE_INT)size != tree_low_cst (DECL_SIZE (var), 1))
1694 {
1695 block->replacement = build3 (BIT_FIELD_REF,
1696 TREE_TYPE (block->element), var,
1697 bitsize_int (size),
1698 bitsize_int (bit & ~alchk));
1699 BIT_FIELD_REF_UNSIGNED (block->replacement) = 1;
1700 }
1701
1702 block->in_bitfld_block = 2;
1703
1704 /* Add the member fields to the group, such that they access
1705 portions of the group variable. */
1706
1707 for (f = first; f != TREE_CHAIN (prev); f = TREE_CHAIN (f))
1708 {
1709 tree field_type = canon_type_for_field (f, elt->element);
1710 struct sra_elt *fld = lookup_element (block, f, field_type, INSERT);
1711
1712 gcc_assert (fld && fld->is_scalar && !fld->replacement);
1713
1714 fld->replacement = build3 (BIT_FIELD_REF, field_type, var,
1715 DECL_SIZE (f),
1716 bitsize_int
1717 ((TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f))
1718 * BITS_PER_UNIT
1719 + (TREE_INT_CST_LOW
1720 (DECL_FIELD_BIT_OFFSET (f))))
1721 & ~alchk));
1722 BIT_FIELD_REF_UNSIGNED (fld->replacement) = TYPE_UNSIGNED (field_type);
1723 fld->in_bitfld_block = 1;
1724 }
1725
1726 return prev;
1727 }
1728
1729 static void
1730 instantiate_missing_elements (struct sra_elt *elt)
1731 {
1732 tree type = elt->type;
1733
1734 switch (TREE_CODE (type))
1735 {
1736 case RECORD_TYPE:
1737 {
1738 tree f;
1739 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1740 if (TREE_CODE (f) == FIELD_DECL)
1741 {
1742 tree last = try_instantiate_multiple_fields (elt, f);
1743
1744 if (last != f)
1745 {
1746 f = last;
1747 continue;
1748 }
1749
1750 instantiate_missing_elements_1 (elt, f,
1751 canon_type_for_field
1752 (f, elt->element));
1753 }
1754 break;
1755 }
1756
1757 case ARRAY_TYPE:
1758 {
1759 tree i, max, subtype;
1760
1761 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1762 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1763 subtype = TREE_TYPE (type);
1764
1765 while (1)
1766 {
1767 instantiate_missing_elements_1 (elt, i, subtype);
1768 if (tree_int_cst_equal (i, max))
1769 break;
1770 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1771 }
1772
1773 break;
1774 }
1775
1776 case COMPLEX_TYPE:
1777 type = TREE_TYPE (type);
1778 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1779 instantiate_missing_elements_1 (elt, integer_one_node, type);
1780 break;
1781
1782 default:
1783 gcc_unreachable ();
1784 }
1785 }
1786
1787 /* Return true if there is only one non aggregate field in the record, TYPE.
1788 Return false otherwise. */
1789
1790 static bool
1791 single_scalar_field_in_record_p (tree type)
1792 {
1793 int num_fields = 0;
1794 tree field;
1795 if (TREE_CODE (type) != RECORD_TYPE)
1796 return false;
1797
1798 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1799 if (TREE_CODE (field) == FIELD_DECL)
1800 {
1801 num_fields++;
1802
1803 if (num_fields == 2)
1804 return false;
1805
1806 if (AGGREGATE_TYPE_P (TREE_TYPE (field)))
1807 return false;
1808 }
1809
1810 return true;
1811 }
1812
1813 /* Make one pass across an element tree deciding whether to perform block
1814 or element copies. If we decide on element copies, instantiate all
1815 elements. Return true if there are any instantiated sub-elements. */
1816
1817 static bool
1818 decide_block_copy (struct sra_elt *elt)
1819 {
1820 struct sra_elt *c;
1821 bool any_inst;
1822
1823 /* We shouldn't be invoked on groups of sub-elements as they must
1824 behave like their parent as far as block copy is concerned. */
1825 gcc_assert (!elt->is_group);
1826
1827 /* If scalarization is disabled, respect it. */
1828 if (elt->cannot_scalarize)
1829 {
1830 elt->use_block_copy = 1;
1831
1832 if (dump_file)
1833 {
1834 fputs ("Scalarization disabled for ", dump_file);
1835 dump_sra_elt_name (dump_file, elt);
1836 fputc ('\n', dump_file);
1837 }
1838
1839 /* Disable scalarization of sub-elements */
1840 for (c = elt->children; c; c = c->sibling)
1841 {
1842 c->cannot_scalarize = 1;
1843 decide_block_copy (c);
1844 }
1845
1846 /* Groups behave like their parent. */
1847 for (c = elt->groups; c; c = c->sibling)
1848 {
1849 c->cannot_scalarize = 1;
1850 c->use_block_copy = 1;
1851 }
1852
1853 return false;
1854 }
1855
1856 /* Don't decide if we've no uses and no groups. */
1857 if (elt->n_uses == 0 && elt->n_copies == 0 && elt->groups == NULL)
1858 ;
1859
1860 else if (!elt->is_scalar)
1861 {
1862 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1863 bool use_block_copy = true;
1864
1865 /* Tradeoffs for COMPLEX types pretty much always make it better
1866 to go ahead and split the components. */
1867 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1868 use_block_copy = false;
1869
1870 /* Don't bother trying to figure out the rest if the structure is
1871 so large we can't do easy arithmetic. This also forces block
1872 copies for variable sized structures. */
1873 else if (host_integerp (size_tree, 1))
1874 {
1875 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1876 unsigned int max_size, max_count, inst_count, full_count;
1877
1878 /* If the sra-max-structure-size parameter is 0, then the
1879 user has not overridden the parameter and we can choose a
1880 sensible default. */
1881 max_size = SRA_MAX_STRUCTURE_SIZE
1882 ? SRA_MAX_STRUCTURE_SIZE
1883 : MOVE_RATIO * UNITS_PER_WORD;
1884 max_count = SRA_MAX_STRUCTURE_COUNT
1885 ? SRA_MAX_STRUCTURE_COUNT
1886 : MOVE_RATIO;
1887
1888 full_size = tree_low_cst (size_tree, 1);
1889 full_count = count_type_elements (elt->type, false);
1890 inst_count = sum_instantiated_sizes (elt, &inst_size);
1891
1892 /* If there is only one scalar field in the record, don't block copy. */
1893 if (single_scalar_field_in_record_p (elt->type))
1894 use_block_copy = false;
1895
1896 /* ??? What to do here. If there are two fields, and we've only
1897 instantiated one, then instantiating the other is clearly a win.
1898 If there are a large number of fields then the size of the copy
1899 is much more of a factor. */
1900
1901 /* If the structure is small, and we've made copies, go ahead
1902 and instantiate, hoping that the copies will go away. */
1903 if (full_size <= max_size
1904 && (full_count - inst_count) <= max_count
1905 && elt->n_copies > elt->n_uses)
1906 use_block_copy = false;
1907 else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
1908 && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1909 use_block_copy = false;
1910
1911 /* In order to avoid block copy, we have to be able to instantiate
1912 all elements of the type. See if this is possible. */
1913 if (!use_block_copy
1914 && (!can_completely_scalarize_p (elt)
1915 || !type_can_instantiate_all_elements (elt->type)))
1916 use_block_copy = true;
1917 }
1918
1919 elt->use_block_copy = use_block_copy;
1920
1921 /* Groups behave like their parent. */
1922 for (c = elt->groups; c; c = c->sibling)
1923 c->use_block_copy = use_block_copy;
1924
1925 if (dump_file)
1926 {
1927 fprintf (dump_file, "Using %s for ",
1928 use_block_copy ? "block-copy" : "element-copy");
1929 dump_sra_elt_name (dump_file, elt);
1930 fputc ('\n', dump_file);
1931 }
1932
1933 if (!use_block_copy)
1934 {
1935 instantiate_missing_elements (elt);
1936 return true;
1937 }
1938 }
1939
1940 any_inst = elt->replacement != NULL;
1941
1942 for (c = elt->children; c ; c = c->sibling)
1943 any_inst |= decide_block_copy (c);
1944
1945 return any_inst;
1946 }
1947
1948 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1949
1950 static void
1951 decide_instantiations (void)
1952 {
1953 unsigned int i;
1954 bool cleared_any;
1955 bitmap_head done_head;
1956 bitmap_iterator bi;
1957
1958 /* We cannot clear bits from a bitmap we're iterating over,
1959 so save up all the bits to clear until the end. */
1960 bitmap_initialize (&done_head, &bitmap_default_obstack);
1961 cleared_any = false;
1962
1963 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1964 {
1965 tree var = referenced_var (i);
1966 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1967 if (elt)
1968 {
1969 decide_instantiation_1 (elt, 0, 0);
1970 if (!decide_block_copy (elt))
1971 elt = NULL;
1972 }
1973 if (!elt)
1974 {
1975 bitmap_set_bit (&done_head, i);
1976 cleared_any = true;
1977 }
1978 }
1979
1980 if (cleared_any)
1981 {
1982 bitmap_and_compl_into (sra_candidates, &done_head);
1983 bitmap_and_compl_into (needs_copy_in, &done_head);
1984 }
1985 bitmap_clear (&done_head);
1986
1987 mark_set_for_renaming (sra_candidates);
1988
1989 if (dump_file)
1990 fputc ('\n', dump_file);
1991 }
1992
1993 \f
1994 /* Phase Four: Update the function to match the replacements created. */
1995
1996 /* Mark all the variables in VDEF/VUSE operators for STMT for
1997 renaming. This becomes necessary when we modify all of a
1998 non-scalar. */
1999
2000 static void
2001 mark_all_v_defs_1 (tree stmt)
2002 {
2003 tree sym;
2004 ssa_op_iter iter;
2005
2006 update_stmt_if_modified (stmt);
2007
2008 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
2009 {
2010 if (TREE_CODE (sym) == SSA_NAME)
2011 sym = SSA_NAME_VAR (sym);
2012 mark_sym_for_renaming (sym);
2013 }
2014 }
2015
2016
2017 /* Mark all the variables in virtual operands in all the statements in
2018 LIST for renaming. */
2019
2020 static void
2021 mark_all_v_defs (tree list)
2022 {
2023 if (TREE_CODE (list) != STATEMENT_LIST)
2024 mark_all_v_defs_1 (list);
2025 else
2026 {
2027 tree_stmt_iterator i;
2028 for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
2029 mark_all_v_defs_1 (tsi_stmt (i));
2030 }
2031 }
2032
2033
2034 /* Mark every replacement under ELT with TREE_NO_WARNING. */
2035
2036 static void
2037 mark_no_warning (struct sra_elt *elt)
2038 {
2039 if (!elt->all_no_warning)
2040 {
2041 if (elt->replacement)
2042 TREE_NO_WARNING (elt->replacement) = 1;
2043 else
2044 {
2045 struct sra_elt *c;
2046 FOR_EACH_ACTUAL_CHILD (c, elt)
2047 mark_no_warning (c);
2048 }
2049 elt->all_no_warning = true;
2050 }
2051 }
2052
2053 /* Build a single level component reference to ELT rooted at BASE. */
2054
2055 static tree
2056 generate_one_element_ref (struct sra_elt *elt, tree base)
2057 {
2058 switch (TREE_CODE (TREE_TYPE (base)))
2059 {
2060 case RECORD_TYPE:
2061 {
2062 tree field = elt->element;
2063
2064 /* We can't test elt->in_bitfld_blk here because, when this is
2065 called from instantiate_element, we haven't set this field
2066 yet. */
2067 if (TREE_CODE (field) == BIT_FIELD_REF)
2068 {
2069 tree ret = unshare_expr (field);
2070 TREE_OPERAND (ret, 0) = base;
2071 return ret;
2072 }
2073
2074 /* Watch out for compatible records with differing field lists. */
2075 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
2076 field = find_compatible_field (TREE_TYPE (base), field);
2077
2078 return build3 (COMPONENT_REF, elt->type, base, field, NULL);
2079 }
2080
2081 case ARRAY_TYPE:
2082 if (TREE_CODE (elt->element) == RANGE_EXPR)
2083 return build4 (ARRAY_RANGE_REF, elt->type, base,
2084 TREE_OPERAND (elt->element, 0), NULL, NULL);
2085 else
2086 return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
2087
2088 case COMPLEX_TYPE:
2089 if (elt->element == integer_zero_node)
2090 return build1 (REALPART_EXPR, elt->type, base);
2091 else
2092 return build1 (IMAGPART_EXPR, elt->type, base);
2093
2094 default:
2095 gcc_unreachable ();
2096 }
2097 }
2098
2099 /* Build a full component reference to ELT rooted at its native variable. */
2100
2101 static tree
2102 generate_element_ref (struct sra_elt *elt)
2103 {
2104 if (elt->parent)
2105 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
2106 else
2107 return elt->element;
2108 }
2109
2110 /* Return true if BF is a bit-field that we can handle like a scalar. */
2111
2112 static bool
2113 scalar_bitfield_p (tree bf)
2114 {
2115 return (TREE_CODE (bf) == BIT_FIELD_REF
2116 && (is_gimple_reg (TREE_OPERAND (bf, 0))
2117 || (TYPE_MODE (TREE_TYPE (TREE_OPERAND (bf, 0))) != BLKmode
2118 && (!TREE_SIDE_EFFECTS (TREE_OPERAND (bf, 0))
2119 || (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE
2120 (TREE_OPERAND (bf, 0))))
2121 <= BITS_PER_WORD)))));
2122 }
2123
2124 /* Create an assignment statement from SRC to DST. */
2125
2126 static tree
2127 sra_build_assignment (tree dst, tree src)
2128 {
2129 /* Turning BIT_FIELD_REFs into bit operations enables other passes
2130 to do a much better job at optimizing the code.
2131 From dst = BIT_FIELD_REF <var, sz, off> we produce
2132
2133 SR.1 = (scalar type) var;
2134 SR.2 = SR.1 >> off;
2135 SR.3 = SR.2 & ((1 << sz) - 1);
2136 ... possible sign extension of SR.3 ...
2137 dst = (destination type) SR.3;
2138 */
2139 if (scalar_bitfield_p (src))
2140 {
2141 tree var, shift, width;
2142 tree utype, stype, stmp, utmp, dtmp;
2143 tree list, stmt;
2144 bool unsignedp = BIT_FIELD_REF_UNSIGNED (src);
2145
2146 var = TREE_OPERAND (src, 0);
2147 width = TREE_OPERAND (src, 1);
2148 /* The offset needs to be adjusted to a right shift quantity
2149 depending on the endianess. */
2150 if (BYTES_BIG_ENDIAN)
2151 {
2152 tree tmp = size_binop (PLUS_EXPR, width, TREE_OPERAND (src, 2));
2153 shift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), tmp);
2154 }
2155 else
2156 shift = TREE_OPERAND (src, 2);
2157
2158 /* In weird cases we have non-integral types for the source or
2159 destination object.
2160 ??? For unknown reasons we also want an unsigned scalar type. */
2161 stype = TREE_TYPE (var);
2162 if (!INTEGRAL_TYPE_P (stype))
2163 stype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2164 (TYPE_SIZE (stype)), 1);
2165 else if (!TYPE_UNSIGNED (stype))
2166 stype = unsigned_type_for (stype);
2167
2168 utype = TREE_TYPE (dst);
2169 if (!INTEGRAL_TYPE_P (utype))
2170 utype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2171 (TYPE_SIZE (utype)), 1);
2172 else if (!TYPE_UNSIGNED (utype))
2173 utype = unsigned_type_for (utype);
2174
2175 list = NULL;
2176 stmp = make_rename_temp (stype, "SR");
2177
2178 /* Convert the base var of the BIT_FIELD_REF to the scalar type
2179 we use for computation if we cannot use it directly. */
2180 if (!useless_type_conversion_p (stype, TREE_TYPE (var)))
2181 {
2182 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2183 stmt = build_gimple_modify_stmt (stmp,
2184 fold_convert (stype, var));
2185 else
2186 stmt = build_gimple_modify_stmt (stmp,
2187 fold_build1 (VIEW_CONVERT_EXPR,
2188 stype, var));
2189 append_to_statement_list (stmt, &list);
2190 var = stmp;
2191 }
2192
2193 if (!integer_zerop (shift))
2194 {
2195 stmt = build_gimple_modify_stmt (stmp,
2196 fold_build2 (RSHIFT_EXPR, stype,
2197 var, shift));
2198 append_to_statement_list (stmt, &list);
2199 var = stmp;
2200 }
2201
2202 /* If we need a masking operation, produce one. */
2203 if (TREE_INT_CST_LOW (width) == TYPE_PRECISION (stype))
2204 unsignedp = true;
2205 else
2206 {
2207 tree one = build_int_cst_wide (stype, 1, 0);
2208 tree mask = int_const_binop (LSHIFT_EXPR, one, width, 0);
2209 mask = int_const_binop (MINUS_EXPR, mask, one, 0);
2210
2211 stmt = build_gimple_modify_stmt (stmp,
2212 fold_build2 (BIT_AND_EXPR, stype,
2213 var, mask));
2214 append_to_statement_list (stmt, &list);
2215 var = stmp;
2216 }
2217
2218 /* After shifting and masking, convert to the target type. */
2219 utmp = stmp;
2220 if (!useless_type_conversion_p (utype, stype))
2221 {
2222 utmp = make_rename_temp (utype, "SR");
2223
2224 stmt = build_gimple_modify_stmt (utmp, fold_convert (utype, var));
2225 append_to_statement_list (stmt, &list);
2226
2227 var = utmp;
2228 }
2229
2230 /* Perform sign extension, if required.
2231 ??? This should never be necessary. */
2232 if (!unsignedp)
2233 {
2234 tree signbit = int_const_binop (LSHIFT_EXPR,
2235 build_int_cst_wide (utype, 1, 0),
2236 size_binop (MINUS_EXPR, width,
2237 bitsize_int (1)), 0);
2238
2239 stmt = build_gimple_modify_stmt (utmp,
2240 fold_build2 (BIT_XOR_EXPR, utype,
2241 var, signbit));
2242 append_to_statement_list (stmt, &list);
2243
2244 stmt = build_gimple_modify_stmt (utmp,
2245 fold_build2 (MINUS_EXPR, utype,
2246 utmp, signbit));
2247 append_to_statement_list (stmt, &list);
2248
2249 var = utmp;
2250 }
2251
2252 /* Finally, move and convert to the destination. */
2253 if (!useless_type_conversion_p (TREE_TYPE (dst), TREE_TYPE (var)))
2254 {
2255 if (INTEGRAL_TYPE_P (TREE_TYPE (dst)))
2256 var = fold_convert (TREE_TYPE (dst), var);
2257 else
2258 var = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (dst), var);
2259
2260 /* If the destination is not a register the conversion needs
2261 to be a separate statement. */
2262 if (!is_gimple_reg (dst))
2263 {
2264 dtmp = make_rename_temp (TREE_TYPE (dst), "SR");
2265 stmt = build_gimple_modify_stmt (dtmp, var);
2266 append_to_statement_list (stmt, &list);
2267 var = dtmp;
2268 }
2269 }
2270 stmt = build_gimple_modify_stmt (dst, var);
2271 append_to_statement_list (stmt, &list);
2272
2273 return list;
2274 }
2275
2276 /* It was hoped that we could perform some type sanity checking
2277 here, but since front-ends can emit accesses of fields in types
2278 different from their nominal types and copy structures containing
2279 them as a whole, we'd have to handle such differences here.
2280 Since such accesses under different types require compatibility
2281 anyway, there's little point in making tests and/or adding
2282 conversions to ensure the types of src and dst are the same.
2283 So we just assume type differences at this point are ok.
2284 The only exception we make here are pointer types, which can be different
2285 in e.g. structurally equal, but non-identical RECORD_TYPEs. */
2286 if (POINTER_TYPE_P (TREE_TYPE (dst))
2287 && !useless_type_conversion_p (TREE_TYPE (dst), TREE_TYPE (src)))
2288 src = fold_convert (TREE_TYPE (dst), src);
2289
2290 return build_gimple_modify_stmt (dst, src);
2291 }
2292
2293 /* BIT_FIELD_REFs must not be shared. sra_build_elt_assignment()
2294 takes care of assignments, but we must create copies for uses. */
2295 #define REPLDUP(t) (TREE_CODE (t) != BIT_FIELD_REF ? (t) : unshare_expr (t))
2296
2297 /* Emit an assignment from SRC to DST, but if DST is a scalarizable
2298 BIT_FIELD_REF, turn it into bit operations. */
2299
2300 static tree
2301 sra_build_bf_assignment (tree dst, tree src)
2302 {
2303 tree var, type, utype, tmp, tmp2, tmp3;
2304 tree list, stmt;
2305 tree cst, cst2, mask;
2306 tree minshift, maxshift;
2307
2308 if (TREE_CODE (dst) != BIT_FIELD_REF)
2309 return sra_build_assignment (dst, src);
2310
2311 var = TREE_OPERAND (dst, 0);
2312
2313 if (!scalar_bitfield_p (dst))
2314 return sra_build_assignment (REPLDUP (dst), src);
2315
2316 list = NULL;
2317
2318 cst = fold_convert (bitsizetype, TREE_OPERAND (dst, 2));
2319 cst2 = size_binop (PLUS_EXPR,
2320 fold_convert (bitsizetype, TREE_OPERAND (dst, 1)),
2321 cst);
2322
2323 if (BYTES_BIG_ENDIAN)
2324 {
2325 maxshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst);
2326 minshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst2);
2327 }
2328 else
2329 {
2330 maxshift = cst2;
2331 minshift = cst;
2332 }
2333
2334 type = TREE_TYPE (var);
2335 if (!INTEGRAL_TYPE_P (type))
2336 type = lang_hooks.types.type_for_size
2337 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (var))), 1);
2338 if (TYPE_UNSIGNED (type))
2339 utype = type;
2340 else
2341 utype = unsigned_type_for (type);
2342
2343 mask = build_int_cst_wide (utype, 1, 0);
2344 if (TREE_INT_CST_LOW (maxshift) == TYPE_PRECISION (utype))
2345 cst = build_int_cst_wide (utype, 0, 0);
2346 else
2347 cst = int_const_binop (LSHIFT_EXPR, mask, maxshift, true);
2348 if (integer_zerop (minshift))
2349 cst2 = mask;
2350 else
2351 cst2 = int_const_binop (LSHIFT_EXPR, mask, minshift, true);
2352 mask = int_const_binop (MINUS_EXPR, cst, cst2, true);
2353 mask = fold_build1 (BIT_NOT_EXPR, utype, mask);
2354
2355 if (TYPE_MAIN_VARIANT (utype) != TYPE_MAIN_VARIANT (TREE_TYPE (var))
2356 && !integer_zerop (mask))
2357 {
2358 tmp = var;
2359 if (!is_gimple_variable (tmp))
2360 tmp = unshare_expr (var);
2361
2362 tmp2 = make_rename_temp (utype, "SR");
2363
2364 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2365 stmt = build_gimple_modify_stmt (tmp2, fold_convert (utype, tmp));
2366 else
2367 stmt = build_gimple_modify_stmt (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
2368 utype, tmp));
2369 append_to_statement_list (stmt, &list);
2370 }
2371 else
2372 tmp2 = var;
2373
2374 if (!integer_zerop (mask))
2375 {
2376 tmp = make_rename_temp (utype, "SR");
2377 stmt = build_gimple_modify_stmt (tmp,
2378 fold_build2 (BIT_AND_EXPR, utype,
2379 tmp2, mask));
2380 append_to_statement_list (stmt, &list);
2381 }
2382 else
2383 tmp = mask;
2384
2385 if (is_gimple_reg (src) && INTEGRAL_TYPE_P (TREE_TYPE (src)))
2386 tmp2 = src;
2387 else if (INTEGRAL_TYPE_P (TREE_TYPE (src)))
2388 {
2389 tmp2 = make_rename_temp (TREE_TYPE (src), "SR");
2390 stmt = sra_build_assignment (tmp2, src);
2391 append_to_statement_list (stmt, &list);
2392 }
2393 else
2394 {
2395 tmp2 = make_rename_temp
2396 (lang_hooks.types.type_for_size
2397 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (src))),
2398 1), "SR");
2399 stmt = sra_build_assignment (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
2400 TREE_TYPE (tmp2), src));
2401 append_to_statement_list (stmt, &list);
2402 }
2403
2404 if (!TYPE_UNSIGNED (TREE_TYPE (tmp2)))
2405 {
2406 tree ut = unsigned_type_for (TREE_TYPE (tmp2));
2407 tmp3 = make_rename_temp (ut, "SR");
2408 tmp2 = fold_convert (ut, tmp2);
2409 stmt = sra_build_assignment (tmp3, tmp2);
2410 append_to_statement_list (stmt, &list);
2411
2412 tmp2 = fold_build1 (BIT_NOT_EXPR, utype, mask);
2413 tmp2 = int_const_binop (RSHIFT_EXPR, tmp2, minshift, true);
2414 tmp2 = fold_convert (ut, tmp2);
2415 tmp2 = fold_build2 (BIT_AND_EXPR, ut, tmp3, tmp2);
2416
2417 if (tmp3 != tmp2)
2418 {
2419 tmp3 = make_rename_temp (ut, "SR");
2420 stmt = sra_build_assignment (tmp3, tmp2);
2421 append_to_statement_list (stmt, &list);
2422 }
2423
2424 tmp2 = tmp3;
2425 }
2426
2427 if (TYPE_MAIN_VARIANT (TREE_TYPE (tmp2)) != TYPE_MAIN_VARIANT (utype))
2428 {
2429 tmp3 = make_rename_temp (utype, "SR");
2430 tmp2 = fold_convert (utype, tmp2);
2431 stmt = sra_build_assignment (tmp3, tmp2);
2432 append_to_statement_list (stmt, &list);
2433 tmp2 = tmp3;
2434 }
2435
2436 if (!integer_zerop (minshift))
2437 {
2438 tmp3 = make_rename_temp (utype, "SR");
2439 stmt = build_gimple_modify_stmt (tmp3,
2440 fold_build2 (LSHIFT_EXPR, utype,
2441 tmp2, minshift));
2442 append_to_statement_list (stmt, &list);
2443 tmp2 = tmp3;
2444 }
2445
2446 if (utype != TREE_TYPE (var))
2447 tmp3 = make_rename_temp (utype, "SR");
2448 else
2449 tmp3 = var;
2450 stmt = build_gimple_modify_stmt (tmp3,
2451 fold_build2 (BIT_IOR_EXPR, utype,
2452 tmp, tmp2));
2453 append_to_statement_list (stmt, &list);
2454
2455 if (tmp3 != var)
2456 {
2457 if (TREE_TYPE (var) == type)
2458 stmt = build_gimple_modify_stmt (var,
2459 fold_convert (type, tmp3));
2460 else
2461 stmt = build_gimple_modify_stmt (var,
2462 fold_build1 (VIEW_CONVERT_EXPR,
2463 TREE_TYPE (var), tmp3));
2464 append_to_statement_list (stmt, &list);
2465 }
2466
2467 return list;
2468 }
2469
2470 /* Expand an assignment of SRC to the scalarized representation of
2471 ELT. If it is a field group, try to widen the assignment to cover
2472 the full variable. */
2473
2474 static tree
2475 sra_build_elt_assignment (struct sra_elt *elt, tree src)
2476 {
2477 tree dst = elt->replacement;
2478 tree var, tmp, cst, cst2, list, stmt;
2479
2480 if (TREE_CODE (dst) != BIT_FIELD_REF
2481 || !elt->in_bitfld_block)
2482 return sra_build_assignment (REPLDUP (dst), src);
2483
2484 var = TREE_OPERAND (dst, 0);
2485
2486 /* Try to widen the assignment to the entire variable.
2487 We need the source to be a BIT_FIELD_REF as well, such that, for
2488 BIT_FIELD_REF<d,sz,dp> = BIT_FIELD_REF<s,sz,sp>,
2489 by design, conditions are met such that we can turn it into
2490 d = BIT_FIELD_REF<s,dw,sp-dp>. */
2491 if (elt->in_bitfld_block == 2
2492 && TREE_CODE (src) == BIT_FIELD_REF)
2493 {
2494 cst = TYPE_SIZE (TREE_TYPE (var));
2495 cst2 = size_binop (MINUS_EXPR, TREE_OPERAND (src, 2),
2496 TREE_OPERAND (dst, 2));
2497
2498 src = TREE_OPERAND (src, 0);
2499
2500 /* Avoid full-width bit-fields. */
2501 if (integer_zerop (cst2)
2502 && tree_int_cst_equal (cst, TYPE_SIZE (TREE_TYPE (src))))
2503 {
2504 if (INTEGRAL_TYPE_P (TREE_TYPE (src))
2505 && !TYPE_UNSIGNED (TREE_TYPE (src)))
2506 src = fold_convert (unsigned_type_for (TREE_TYPE (src)), src);
2507
2508 /* If a single conversion won't do, we'll need a statement
2509 list. */
2510 if (TYPE_MAIN_VARIANT (TREE_TYPE (var))
2511 != TYPE_MAIN_VARIANT (TREE_TYPE (src)))
2512 {
2513 list = NULL;
2514
2515 if (!INTEGRAL_TYPE_P (TREE_TYPE (src)))
2516 src = fold_build1 (VIEW_CONVERT_EXPR,
2517 lang_hooks.types.type_for_size
2518 (TREE_INT_CST_LOW
2519 (TYPE_SIZE (TREE_TYPE (src))),
2520 1), src);
2521 gcc_assert (TYPE_UNSIGNED (TREE_TYPE (src)));
2522
2523 tmp = make_rename_temp (TREE_TYPE (src), "SR");
2524 stmt = build_gimple_modify_stmt (tmp, src);
2525 append_to_statement_list (stmt, &list);
2526
2527 stmt = sra_build_assignment (var,
2528 fold_convert (TREE_TYPE (var),
2529 tmp));
2530 append_to_statement_list (stmt, &list);
2531
2532 return list;
2533 }
2534
2535 src = fold_convert (TREE_TYPE (var), src);
2536 }
2537 else
2538 {
2539 src = fold_build3 (BIT_FIELD_REF, TREE_TYPE (var), src, cst, cst2);
2540 BIT_FIELD_REF_UNSIGNED (src) = 1;
2541 }
2542
2543 return sra_build_assignment (var, src);
2544 }
2545
2546 return sra_build_bf_assignment (dst, src);
2547 }
2548
2549 /* Generate a set of assignment statements in *LIST_P to copy all
2550 instantiated elements under ELT to or from the equivalent structure
2551 rooted at EXPR. COPY_OUT controls the direction of the copy, with
2552 true meaning to copy out of EXPR into ELT. */
2553
2554 static void
2555 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
2556 tree *list_p)
2557 {
2558 struct sra_elt *c;
2559 tree t;
2560
2561 if (!copy_out && TREE_CODE (expr) == SSA_NAME
2562 && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
2563 {
2564 tree r, i;
2565
2566 c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
2567 r = c->replacement;
2568 c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
2569 i = c->replacement;
2570
2571 t = build2 (COMPLEX_EXPR, elt->type, r, i);
2572 t = sra_build_bf_assignment (expr, t);
2573 SSA_NAME_DEF_STMT (expr) = t;
2574 append_to_statement_list (t, list_p);
2575 }
2576 else if (elt->replacement)
2577 {
2578 if (copy_out)
2579 t = sra_build_elt_assignment (elt, expr);
2580 else
2581 t = sra_build_bf_assignment (expr, REPLDUP (elt->replacement));
2582 append_to_statement_list (t, list_p);
2583 }
2584 else
2585 {
2586 FOR_EACH_ACTUAL_CHILD (c, elt)
2587 {
2588 t = generate_one_element_ref (c, unshare_expr (expr));
2589 generate_copy_inout (c, copy_out, t, list_p);
2590 }
2591 }
2592 }
2593
2594 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
2595 elements under SRC to their counterparts under DST. There must be a 1-1
2596 correspondence of instantiated elements. */
2597
2598 static void
2599 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
2600 {
2601 struct sra_elt *dc, *sc;
2602
2603 FOR_EACH_ACTUAL_CHILD (dc, dst)
2604 {
2605 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
2606 if (!sc && dc->in_bitfld_block == 2)
2607 {
2608 struct sra_elt *dcs;
2609
2610 FOR_EACH_ACTUAL_CHILD (dcs, dc)
2611 {
2612 sc = lookup_element (src, dcs->element, NULL, NO_INSERT);
2613 gcc_assert (sc);
2614 generate_element_copy (dcs, sc, list_p);
2615 }
2616
2617 continue;
2618 }
2619
2620 /* If DST and SRC are structs with the same elements, but do not have
2621 the same TYPE_MAIN_VARIANT, then lookup of DST FIELD_DECL in SRC
2622 will fail. Try harder by finding the corresponding FIELD_DECL
2623 in SRC. */
2624 if (!sc)
2625 {
2626 tree f;
2627
2628 gcc_assert (useless_type_conversion_p (dst->type, src->type));
2629 gcc_assert (TREE_CODE (dc->element) == FIELD_DECL);
2630 for (f = TYPE_FIELDS (src->type); f ; f = TREE_CHAIN (f))
2631 if (simple_cst_equal (DECL_FIELD_OFFSET (f),
2632 DECL_FIELD_OFFSET (dc->element)) > 0
2633 && simple_cst_equal (DECL_FIELD_BIT_OFFSET (f),
2634 DECL_FIELD_BIT_OFFSET (dc->element)) > 0
2635 && simple_cst_equal (DECL_SIZE (f),
2636 DECL_SIZE (dc->element)) > 0
2637 && (useless_type_conversion_p (TREE_TYPE (dc->element),
2638 TREE_TYPE (f))
2639 || (POINTER_TYPE_P (TREE_TYPE (dc->element))
2640 && POINTER_TYPE_P (TREE_TYPE (f)))))
2641 break;
2642 gcc_assert (f != NULL_TREE);
2643 sc = lookup_element (src, f, NULL, NO_INSERT);
2644 }
2645
2646 generate_element_copy (dc, sc, list_p);
2647 }
2648
2649 if (dst->replacement)
2650 {
2651 tree t;
2652
2653 gcc_assert (src->replacement);
2654
2655 t = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
2656 append_to_statement_list (t, list_p);
2657 }
2658 }
2659
2660 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
2661 elements under ELT. In addition, do not assign to elements that have been
2662 marked VISITED but do reset the visited flag; this allows easy coordination
2663 with generate_element_init. */
2664
2665 static void
2666 generate_element_zero (struct sra_elt *elt, tree *list_p)
2667 {
2668 struct sra_elt *c;
2669
2670 if (elt->visited)
2671 {
2672 elt->visited = false;
2673 return;
2674 }
2675
2676 if (!elt->in_bitfld_block)
2677 FOR_EACH_ACTUAL_CHILD (c, elt)
2678 generate_element_zero (c, list_p);
2679
2680 if (elt->replacement)
2681 {
2682 tree t;
2683
2684 gcc_assert (elt->is_scalar);
2685 t = fold_convert (elt->type, integer_zero_node);
2686
2687 t = sra_build_elt_assignment (elt, t);
2688 append_to_statement_list (t, list_p);
2689 }
2690 }
2691
2692 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
2693 Add the result to *LIST_P. */
2694
2695 static void
2696 generate_one_element_init (struct sra_elt *elt, tree init, tree *list_p)
2697 {
2698 /* The replacement can be almost arbitrarily complex. Gimplify. */
2699 tree stmt = sra_build_elt_assignment (elt, init);
2700 gimplify_and_add (stmt, list_p);
2701 }
2702
2703 /* Generate a set of assignment statements in *LIST_P to set all instantiated
2704 elements under ELT with the contents of the initializer INIT. In addition,
2705 mark all assigned elements VISITED; this allows easy coordination with
2706 generate_element_zero. Return false if we found a case we couldn't
2707 handle. */
2708
2709 static bool
2710 generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
2711 {
2712 bool result = true;
2713 enum tree_code init_code;
2714 struct sra_elt *sub;
2715 tree t;
2716 unsigned HOST_WIDE_INT idx;
2717 tree value, purpose;
2718
2719 /* We can be passed DECL_INITIAL of a static variable. It might have a
2720 conversion, which we strip off here. */
2721 STRIP_USELESS_TYPE_CONVERSION (init);
2722 init_code = TREE_CODE (init);
2723
2724 if (elt->is_scalar)
2725 {
2726 if (elt->replacement)
2727 {
2728 generate_one_element_init (elt, init, list_p);
2729 elt->visited = true;
2730 }
2731 return result;
2732 }
2733
2734 switch (init_code)
2735 {
2736 case COMPLEX_CST:
2737 case COMPLEX_EXPR:
2738 FOR_EACH_ACTUAL_CHILD (sub, elt)
2739 {
2740 if (sub->element == integer_zero_node)
2741 t = (init_code == COMPLEX_EXPR
2742 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
2743 else
2744 t = (init_code == COMPLEX_EXPR
2745 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
2746 result &= generate_element_init_1 (sub, t, list_p);
2747 }
2748 break;
2749
2750 case CONSTRUCTOR:
2751 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
2752 {
2753 if (TREE_CODE (purpose) == RANGE_EXPR)
2754 {
2755 tree lower = TREE_OPERAND (purpose, 0);
2756 tree upper = TREE_OPERAND (purpose, 1);
2757
2758 while (1)
2759 {
2760 sub = lookup_element (elt, lower, NULL, NO_INSERT);
2761 if (sub != NULL)
2762 result &= generate_element_init_1 (sub, value, list_p);
2763 if (tree_int_cst_equal (lower, upper))
2764 break;
2765 lower = int_const_binop (PLUS_EXPR, lower,
2766 integer_one_node, true);
2767 }
2768 }
2769 else
2770 {
2771 sub = lookup_element (elt, purpose, NULL, NO_INSERT);
2772 if (sub != NULL)
2773 result &= generate_element_init_1 (sub, value, list_p);
2774 }
2775 }
2776 break;
2777
2778 default:
2779 elt->visited = true;
2780 result = false;
2781 }
2782
2783 return result;
2784 }
2785
2786 /* A wrapper function for generate_element_init_1 that handles cleanup after
2787 gimplification. */
2788
2789 static bool
2790 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
2791 {
2792 bool ret;
2793
2794 push_gimplify_context ();
2795 ret = generate_element_init_1 (elt, init, list_p);
2796 pop_gimplify_context (NULL);
2797
2798 /* The replacement can expose previously unreferenced variables. */
2799 if (ret && *list_p)
2800 {
2801 tree_stmt_iterator i;
2802
2803 for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
2804 find_new_referenced_vars (tsi_stmt_ptr (i));
2805 }
2806
2807 return ret;
2808 }
2809
2810 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
2811 has more than one edge, STMT will be replicated for each edge. Also,
2812 abnormal edges will be ignored. */
2813
2814 void
2815 insert_edge_copies (tree stmt, basic_block bb)
2816 {
2817 edge e;
2818 edge_iterator ei;
2819 bool first_copy;
2820
2821 first_copy = true;
2822 FOR_EACH_EDGE (e, ei, bb->succs)
2823 {
2824 /* We don't need to insert copies on abnormal edges. The
2825 value of the scalar replacement is not guaranteed to
2826 be valid through an abnormal edge. */
2827 if (!(e->flags & EDGE_ABNORMAL))
2828 {
2829 if (first_copy)
2830 {
2831 bsi_insert_on_edge (e, stmt);
2832 first_copy = false;
2833 }
2834 else
2835 bsi_insert_on_edge (e, unsave_expr_now (stmt));
2836 }
2837 }
2838 }
2839
2840 /* Helper function to insert LIST before BSI, and set up line number info. */
2841
2842 void
2843 sra_insert_before (block_stmt_iterator *bsi, tree list)
2844 {
2845 tree stmt = bsi_stmt (*bsi);
2846
2847 if (EXPR_HAS_LOCATION (stmt))
2848 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
2849 bsi_insert_before (bsi, list, BSI_SAME_STMT);
2850 }
2851
2852 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
2853
2854 void
2855 sra_insert_after (block_stmt_iterator *bsi, tree list)
2856 {
2857 tree stmt = bsi_stmt (*bsi);
2858
2859 if (EXPR_HAS_LOCATION (stmt))
2860 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
2861
2862 if (stmt_ends_bb_p (stmt))
2863 insert_edge_copies (list, bsi->bb);
2864 else
2865 bsi_insert_after (bsi, list, BSI_SAME_STMT);
2866 }
2867
2868 /* Similarly, but replace the statement at BSI. */
2869
2870 static void
2871 sra_replace (block_stmt_iterator *bsi, tree list)
2872 {
2873 sra_insert_before (bsi, list);
2874 bsi_remove (bsi, false);
2875 if (bsi_end_p (*bsi))
2876 *bsi = bsi_last (bsi->bb);
2877 else
2878 bsi_prev (bsi);
2879 }
2880
2881 /* Data structure that bitfield_overlaps_p fills in with information
2882 about the element passed in and how much of it overlaps with the
2883 bit-range passed it to. */
2884
2885 struct bitfield_overlap_info
2886 {
2887 /* The bit-length of an element. */
2888 tree field_len;
2889
2890 /* The bit-position of the element in its parent. */
2891 tree field_pos;
2892
2893 /* The number of bits of the element that overlap with the incoming
2894 bit range. */
2895 tree overlap_len;
2896
2897 /* The first bit of the element that overlaps with the incoming bit
2898 range. */
2899 tree overlap_pos;
2900 };
2901
2902 /* Return true if a BIT_FIELD_REF<(FLD->parent), BLEN, BPOS>
2903 expression (referenced as BF below) accesses any of the bits in FLD,
2904 false if it doesn't. If DATA is non-null, its field_len and
2905 field_pos are filled in such that BIT_FIELD_REF<(FLD->parent),
2906 field_len, field_pos> (referenced as BFLD below) represents the
2907 entire field FLD->element, and BIT_FIELD_REF<BFLD, overlap_len,
2908 overlap_pos> represents the portion of the entire field that
2909 overlaps with BF. */
2910
2911 static bool
2912 bitfield_overlaps_p (tree blen, tree bpos, struct sra_elt *fld,
2913 struct bitfield_overlap_info *data)
2914 {
2915 tree flen, fpos;
2916 bool ret;
2917
2918 if (TREE_CODE (fld->element) == FIELD_DECL)
2919 {
2920 flen = fold_convert (bitsizetype, DECL_SIZE (fld->element));
2921 fpos = fold_convert (bitsizetype, DECL_FIELD_OFFSET (fld->element));
2922 fpos = size_binop (MULT_EXPR, fpos, bitsize_int (BITS_PER_UNIT));
2923 fpos = size_binop (PLUS_EXPR, fpos, DECL_FIELD_BIT_OFFSET (fld->element));
2924 }
2925 else if (TREE_CODE (fld->element) == BIT_FIELD_REF)
2926 {
2927 flen = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 1));
2928 fpos = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 2));
2929 }
2930 else if (TREE_CODE (fld->element) == INTEGER_CST)
2931 {
2932 flen = fold_convert (bitsizetype, TYPE_SIZE (fld->type));
2933 fpos = fold_convert (bitsizetype, fld->element);
2934 fpos = size_binop (MULT_EXPR, flen, fpos);
2935 }
2936 else
2937 gcc_unreachable ();
2938
2939 gcc_assert (host_integerp (blen, 1)
2940 && host_integerp (bpos, 1)
2941 && host_integerp (flen, 1)
2942 && host_integerp (fpos, 1));
2943
2944 ret = ((!tree_int_cst_lt (fpos, bpos)
2945 && tree_int_cst_lt (size_binop (MINUS_EXPR, fpos, bpos),
2946 blen))
2947 || (!tree_int_cst_lt (bpos, fpos)
2948 && tree_int_cst_lt (size_binop (MINUS_EXPR, bpos, fpos),
2949 flen)));
2950
2951 if (!ret)
2952 return ret;
2953
2954 if (data)
2955 {
2956 tree bend, fend;
2957
2958 data->field_len = flen;
2959 data->field_pos = fpos;
2960
2961 fend = size_binop (PLUS_EXPR, fpos, flen);
2962 bend = size_binop (PLUS_EXPR, bpos, blen);
2963
2964 if (tree_int_cst_lt (bend, fend))
2965 data->overlap_len = size_binop (MINUS_EXPR, bend, fpos);
2966 else
2967 data->overlap_len = NULL;
2968
2969 if (tree_int_cst_lt (fpos, bpos))
2970 {
2971 data->overlap_pos = size_binop (MINUS_EXPR, bpos, fpos);
2972 data->overlap_len = size_binop (MINUS_EXPR,
2973 data->overlap_len
2974 ? data->overlap_len
2975 : data->field_len,
2976 data->overlap_pos);
2977 }
2978 else
2979 data->overlap_pos = NULL;
2980 }
2981
2982 return ret;
2983 }
2984
2985 /* Add to LISTP a sequence of statements that copies BLEN bits between
2986 VAR and the scalarized elements of ELT, starting a bit VPOS of VAR
2987 and at bit BPOS of ELT. The direction of the copy is given by
2988 TO_VAR. */
2989
2990 static void
2991 sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
2992 tree *listp, tree blen, tree bpos,
2993 struct sra_elt *elt)
2994 {
2995 struct sra_elt *fld;
2996 struct bitfield_overlap_info flp;
2997
2998 FOR_EACH_ACTUAL_CHILD (fld, elt)
2999 {
3000 tree flen, fpos;
3001
3002 if (!bitfield_overlaps_p (blen, bpos, fld, &flp))
3003 continue;
3004
3005 flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
3006 fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
3007
3008 if (fld->replacement)
3009 {
3010 tree infld, invar, st, type;
3011
3012 infld = fld->replacement;
3013
3014 type = TREE_TYPE (infld);
3015 if (TYPE_PRECISION (type) != TREE_INT_CST_LOW (flen))
3016 type = lang_hooks.types.type_for_size (TREE_INT_CST_LOW (flen), 1);
3017
3018 if (TREE_CODE (infld) == BIT_FIELD_REF)
3019 {
3020 fpos = size_binop (PLUS_EXPR, fpos, TREE_OPERAND (infld, 2));
3021 infld = TREE_OPERAND (infld, 0);
3022 }
3023 else if (BYTES_BIG_ENDIAN && DECL_P (fld->element)
3024 && !tree_int_cst_equal (TYPE_SIZE (TREE_TYPE (infld)),
3025 DECL_SIZE (fld->element)))
3026 {
3027 fpos = size_binop (PLUS_EXPR, fpos,
3028 TYPE_SIZE (TREE_TYPE (infld)));
3029 fpos = size_binop (MINUS_EXPR, fpos,
3030 DECL_SIZE (fld->element));
3031 }
3032
3033 infld = fold_build3 (BIT_FIELD_REF, type, infld, flen, fpos);
3034 BIT_FIELD_REF_UNSIGNED (infld) = 1;
3035
3036 invar = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3037 if (flp.overlap_pos)
3038 invar = size_binop (PLUS_EXPR, invar, flp.overlap_pos);
3039 invar = size_binop (PLUS_EXPR, invar, vpos);
3040
3041 invar = fold_build3 (BIT_FIELD_REF, type, var, flen, invar);
3042 BIT_FIELD_REF_UNSIGNED (invar) = 1;
3043
3044 if (to_var)
3045 st = sra_build_bf_assignment (invar, infld);
3046 else
3047 st = sra_build_bf_assignment (infld, invar);
3048
3049 append_to_statement_list (st, listp);
3050 }
3051 else
3052 {
3053 tree sub = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3054 sub = size_binop (PLUS_EXPR, vpos, sub);
3055 if (flp.overlap_pos)
3056 sub = size_binop (PLUS_EXPR, sub, flp.overlap_pos);
3057
3058 sra_explode_bitfield_assignment (var, sub, to_var, listp,
3059 flen, fpos, fld);
3060 }
3061 }
3062 }
3063
3064 /* Add to LISTBEFOREP statements that copy scalarized members of ELT
3065 that overlap with BIT_FIELD_REF<(ELT->element), BLEN, BPOS> back
3066 into the full variable, and to LISTAFTERP, if non-NULL, statements
3067 that copy the (presumably modified) overlapping portions of the
3068 full variable back to the scalarized variables. */
3069
3070 static void
3071 sra_sync_for_bitfield_assignment (tree *listbeforep, tree *listafterp,
3072 tree blen, tree bpos,
3073 struct sra_elt *elt)
3074 {
3075 struct sra_elt *fld;
3076 struct bitfield_overlap_info flp;
3077
3078 FOR_EACH_ACTUAL_CHILD (fld, elt)
3079 if (bitfield_overlaps_p (blen, bpos, fld, &flp))
3080 {
3081 if (fld->replacement || (!flp.overlap_len && !flp.overlap_pos))
3082 {
3083 generate_copy_inout (fld, false, generate_element_ref (fld),
3084 listbeforep);
3085 mark_no_warning (fld);
3086 if (listafterp)
3087 generate_copy_inout (fld, true, generate_element_ref (fld),
3088 listafterp);
3089 }
3090 else
3091 {
3092 tree flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
3093 tree fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
3094
3095 sra_sync_for_bitfield_assignment (listbeforep, listafterp,
3096 flen, fpos, fld);
3097 }
3098 }
3099 }
3100
3101 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
3102 if elt is scalar, or some occurrence of ELT that requires a complete
3103 aggregate. IS_OUTPUT is true if ELT is being modified. */
3104
3105 static void
3106 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
3107 bool is_output, bool use_all)
3108 {
3109 tree stmt = bsi_stmt (*bsi);
3110 tree bfexpr;
3111
3112 if (elt->replacement)
3113 {
3114 tree replacement = elt->replacement;
3115
3116 /* If we have a replacement, then updating the reference is as
3117 simple as modifying the existing statement in place. */
3118 if (is_output
3119 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3120 && is_gimple_reg (TREE_OPERAND (elt->replacement, 0))
3121 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3122 && &GIMPLE_STMT_OPERAND (stmt, 0) == expr_p)
3123 {
3124 tree newstmt = sra_build_elt_assignment
3125 (elt, GIMPLE_STMT_OPERAND (stmt, 1));
3126 if (TREE_CODE (newstmt) != STATEMENT_LIST)
3127 {
3128 tree list = NULL;
3129 append_to_statement_list (newstmt, &list);
3130 newstmt = list;
3131 }
3132 sra_replace (bsi, newstmt);
3133 return;
3134 }
3135 else if (!is_output
3136 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3137 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3138 && &GIMPLE_STMT_OPERAND (stmt, 1) == expr_p)
3139 {
3140 tree tmp = make_rename_temp
3141 (TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 0)), "SR");
3142 tree newstmt = sra_build_assignment (tmp, REPLDUP (elt->replacement));
3143
3144 if (TREE_CODE (newstmt) != STATEMENT_LIST)
3145 {
3146 tree list = NULL;
3147 append_to_statement_list (newstmt, &list);
3148 newstmt = list;
3149 }
3150 sra_insert_before (bsi, newstmt);
3151 replacement = tmp;
3152 }
3153 if (is_output)
3154 mark_all_v_defs (stmt);
3155 *expr_p = REPLDUP (replacement);
3156 update_stmt (stmt);
3157 }
3158 else if (use_all && is_output
3159 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3160 && TREE_CODE (bfexpr
3161 = GIMPLE_STMT_OPERAND (stmt, 0)) == BIT_FIELD_REF
3162 && &TREE_OPERAND (bfexpr, 0) == expr_p
3163 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3164 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3165 {
3166 tree listbefore = NULL, listafter = NULL;
3167 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3168 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3169 bool update = false;
3170
3171 if (!elt->use_block_copy)
3172 {
3173 tree type = TREE_TYPE (bfexpr);
3174 tree var = make_rename_temp (type, "SR"), tmp, st, vpos;
3175
3176 GIMPLE_STMT_OPERAND (stmt, 0) = var;
3177 update = true;
3178
3179 if (!TYPE_UNSIGNED (type))
3180 {
3181 type = unsigned_type_for (type);
3182 tmp = make_rename_temp (type, "SR");
3183 st = build_gimple_modify_stmt (tmp,
3184 fold_convert (type, var));
3185 append_to_statement_list (st, &listafter);
3186 var = tmp;
3187 }
3188
3189 /* If VAR is wider than BLEN bits, it is padded at the
3190 most-significant end. We want to set VPOS such that
3191 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3192 least-significant BLEN bits of VAR. */
3193 if (BYTES_BIG_ENDIAN)
3194 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3195 else
3196 vpos = bitsize_int (0);
3197 sra_explode_bitfield_assignment
3198 (var, vpos, false, &listafter, blen, bpos, elt);
3199 }
3200 else
3201 sra_sync_for_bitfield_assignment
3202 (&listbefore, &listafter, blen, bpos, elt);
3203
3204 if (listbefore)
3205 {
3206 mark_all_v_defs (listbefore);
3207 sra_insert_before (bsi, listbefore);
3208 }
3209 if (listafter)
3210 {
3211 mark_all_v_defs (listafter);
3212 sra_insert_after (bsi, listafter);
3213 }
3214
3215 if (update)
3216 update_stmt (stmt);
3217 }
3218 else if (use_all && !is_output
3219 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3220 && TREE_CODE (bfexpr
3221 = GIMPLE_STMT_OPERAND (stmt, 1)) == BIT_FIELD_REF
3222 && &TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0) == expr_p
3223 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3224 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3225 {
3226 tree list = NULL;
3227 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3228 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3229 bool update = false;
3230
3231 if (!elt->use_block_copy)
3232 {
3233 tree type = TREE_TYPE (bfexpr);
3234 tree var, vpos;
3235
3236 if (!TYPE_UNSIGNED (type))
3237 type = unsigned_type_for (type);
3238
3239 var = make_rename_temp (type, "SR");
3240
3241 append_to_statement_list (build_gimple_modify_stmt
3242 (var, build_int_cst_wide (type, 0, 0)),
3243 &list);
3244
3245 /* If VAR is wider than BLEN bits, it is padded at the
3246 most-significant end. We want to set VPOS such that
3247 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3248 least-significant BLEN bits of VAR. */
3249 if (BYTES_BIG_ENDIAN)
3250 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3251 else
3252 vpos = bitsize_int (0);
3253 sra_explode_bitfield_assignment
3254 (var, vpos, true, &list, blen, bpos, elt);
3255
3256 GIMPLE_STMT_OPERAND (stmt, 1) = var;
3257 update = true;
3258 }
3259 else
3260 sra_sync_for_bitfield_assignment
3261 (&list, NULL, blen, bpos, elt);
3262
3263 if (list)
3264 {
3265 mark_all_v_defs (list);
3266 sra_insert_before (bsi, list);
3267 }
3268
3269 if (update)
3270 update_stmt (stmt);
3271 }
3272 else
3273 {
3274 tree list = NULL;
3275
3276 /* Otherwise we need some copies. If ELT is being read, then we
3277 want to store all (modified) sub-elements back into the
3278 structure before the reference takes place. If ELT is being
3279 written, then we want to load the changed values back into
3280 our shadow variables. */
3281 /* ??? We don't check modified for reads, we just always write all of
3282 the values. We should be able to record the SSA number of the VOP
3283 for which the values were last read. If that number matches the
3284 SSA number of the VOP in the current statement, then we needn't
3285 emit an assignment. This would also eliminate double writes when
3286 a structure is passed as more than one argument to a function call.
3287 This optimization would be most effective if sra_walk_function
3288 processed the blocks in dominator order. */
3289
3290 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
3291 if (list == NULL)
3292 return;
3293 mark_all_v_defs (list);
3294 if (is_output)
3295 sra_insert_after (bsi, list);
3296 else
3297 {
3298 sra_insert_before (bsi, list);
3299 if (use_all)
3300 mark_no_warning (elt);
3301 }
3302 }
3303 }
3304
3305 /* Scalarize a COPY. To recap, this is an assignment statement between
3306 two scalarizable references, LHS_ELT and RHS_ELT. */
3307
3308 static void
3309 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
3310 block_stmt_iterator *bsi)
3311 {
3312 tree list, stmt;
3313
3314 if (lhs_elt->replacement && rhs_elt->replacement)
3315 {
3316 /* If we have two scalar operands, modify the existing statement. */
3317 stmt = bsi_stmt (*bsi);
3318
3319 /* See the commentary in sra_walk_function concerning
3320 RETURN_EXPR, and why we should never see one here. */
3321 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
3322
3323 GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement;
3324 GIMPLE_STMT_OPERAND (stmt, 1) = REPLDUP (rhs_elt->replacement);
3325 update_stmt (stmt);
3326 }
3327 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
3328 {
3329 /* If either side requires a block copy, then sync the RHS back
3330 to the original structure, leave the original assignment
3331 statement (which will perform the block copy), then load the
3332 LHS values out of its now-updated original structure. */
3333 /* ??? Could perform a modified pair-wise element copy. That
3334 would at least allow those elements that are instantiated in
3335 both structures to be optimized well. */
3336
3337 list = NULL;
3338 generate_copy_inout (rhs_elt, false,
3339 generate_element_ref (rhs_elt), &list);
3340 if (list)
3341 {
3342 mark_all_v_defs (list);
3343 sra_insert_before (bsi, list);
3344 }
3345
3346 list = NULL;
3347 generate_copy_inout (lhs_elt, true,
3348 generate_element_ref (lhs_elt), &list);
3349 if (list)
3350 {
3351 mark_all_v_defs (list);
3352 sra_insert_after (bsi, list);
3353 }
3354 }
3355 else
3356 {
3357 /* Otherwise both sides must be fully instantiated. In which
3358 case perform pair-wise element assignments and replace the
3359 original block copy statement. */
3360
3361 stmt = bsi_stmt (*bsi);
3362 mark_all_v_defs (stmt);
3363
3364 list = NULL;
3365 generate_element_copy (lhs_elt, rhs_elt, &list);
3366 gcc_assert (list);
3367 mark_all_v_defs (list);
3368 sra_replace (bsi, list);
3369 }
3370 }
3371
3372 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
3373 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
3374 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
3375 CONSTRUCTOR. */
3376
3377 static void
3378 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
3379 {
3380 bool result = true;
3381 tree list = NULL, init_list = NULL;
3382
3383 /* Generate initialization statements for all members extant in the RHS. */
3384 if (rhs)
3385 {
3386 /* Unshare the expression just in case this is from a decl's initial. */
3387 rhs = unshare_expr (rhs);
3388 result = generate_element_init (lhs_elt, rhs, &init_list);
3389 }
3390
3391 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
3392 a zero value. Initialize the rest of the instantiated elements. */
3393 generate_element_zero (lhs_elt, &list);
3394 append_to_statement_list (init_list, &list);
3395
3396 if (!result)
3397 {
3398 /* If we failed to convert the entire initializer, then we must
3399 leave the structure assignment in place and must load values
3400 from the structure into the slots for which we did not find
3401 constants. The easiest way to do this is to generate a complete
3402 copy-out, and then follow that with the constant assignments
3403 that we were able to build. DCE will clean things up. */
3404 tree list0 = NULL;
3405 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
3406 &list0);
3407 append_to_statement_list (list, &list0);
3408 list = list0;
3409 }
3410
3411 if (lhs_elt->use_block_copy || !result)
3412 {
3413 /* Since LHS is not fully instantiated, we must leave the structure
3414 assignment in place. Treating this case differently from a USE
3415 exposes constants to later optimizations. */
3416 if (list)
3417 {
3418 mark_all_v_defs (list);
3419 sra_insert_after (bsi, list);
3420 }
3421 }
3422 else
3423 {
3424 /* The LHS is fully instantiated. The list of initializations
3425 replaces the original structure assignment. */
3426 gcc_assert (list);
3427 mark_all_v_defs (bsi_stmt (*bsi));
3428 mark_all_v_defs (list);
3429 sra_replace (bsi, list);
3430 }
3431 }
3432
3433 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
3434 on all INDIRECT_REFs. */
3435
3436 static tree
3437 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3438 {
3439 tree t = *tp;
3440
3441 if (TREE_CODE (t) == INDIRECT_REF)
3442 {
3443 TREE_THIS_NOTRAP (t) = 1;
3444 *walk_subtrees = 0;
3445 }
3446 else if (IS_TYPE_OR_DECL_P (t))
3447 *walk_subtrees = 0;
3448
3449 return NULL;
3450 }
3451
3452 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
3453 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
3454 if ELT is on the left-hand side. */
3455
3456 static void
3457 scalarize_ldst (struct sra_elt *elt, tree other,
3458 block_stmt_iterator *bsi, bool is_output)
3459 {
3460 /* Shouldn't have gotten called for a scalar. */
3461 gcc_assert (!elt->replacement);
3462
3463 if (elt->use_block_copy)
3464 {
3465 /* Since ELT is not fully instantiated, we have to leave the
3466 block copy in place. Treat this as a USE. */
3467 scalarize_use (elt, NULL, bsi, is_output, false);
3468 }
3469 else
3470 {
3471 /* The interesting case is when ELT is fully instantiated. In this
3472 case we can have each element stored/loaded directly to/from the
3473 corresponding slot in OTHER. This avoids a block copy. */
3474
3475 tree list = NULL, stmt = bsi_stmt (*bsi);
3476
3477 mark_all_v_defs (stmt);
3478 generate_copy_inout (elt, is_output, other, &list);
3479 gcc_assert (list);
3480 mark_all_v_defs (list);
3481
3482 /* Preserve EH semantics. */
3483 if (stmt_ends_bb_p (stmt))
3484 {
3485 tree_stmt_iterator tsi;
3486 tree first, blist = NULL;
3487 bool thr = tree_could_throw_p (stmt);
3488
3489 /* If the last statement of this BB created an EH edge
3490 before scalarization, we have to locate the first
3491 statement that can throw in the new statement list and
3492 use that as the last statement of this BB, such that EH
3493 semantics is preserved. All statements up to this one
3494 are added to the same BB. All other statements in the
3495 list will be added to normal outgoing edges of the same
3496 BB. If they access any memory, it's the same memory, so
3497 we can assume they won't throw. */
3498 tsi = tsi_start (list);
3499 for (first = tsi_stmt (tsi);
3500 thr && !tsi_end_p (tsi) && !tree_could_throw_p (first);
3501 first = tsi_stmt (tsi))
3502 {
3503 tsi_delink (&tsi);
3504 append_to_statement_list (first, &blist);
3505 }
3506
3507 /* Extract the first remaining statement from LIST, this is
3508 the EH statement if there is one. */
3509 tsi_delink (&tsi);
3510
3511 if (blist)
3512 sra_insert_before (bsi, blist);
3513
3514 /* Replace the old statement with this new representative. */
3515 bsi_replace (bsi, first, true);
3516
3517 if (!tsi_end_p (tsi))
3518 {
3519 /* If any reference would trap, then they all would. And more
3520 to the point, the first would. Therefore none of the rest
3521 will trap since the first didn't. Indicate this by
3522 iterating over the remaining statements and set
3523 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
3524 do
3525 {
3526 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
3527 tsi_next (&tsi);
3528 }
3529 while (!tsi_end_p (tsi));
3530
3531 insert_edge_copies (list, bsi->bb);
3532 }
3533 }
3534 else
3535 sra_replace (bsi, list);
3536 }
3537 }
3538
3539 /* Generate initializations for all scalarizable parameters. */
3540
3541 static void
3542 scalarize_parms (void)
3543 {
3544 tree list = NULL;
3545 unsigned i;
3546 bitmap_iterator bi;
3547
3548 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
3549 {
3550 tree var = referenced_var (i);
3551 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
3552 generate_copy_inout (elt, true, var, &list);
3553 }
3554
3555 if (list)
3556 {
3557 insert_edge_copies (list, ENTRY_BLOCK_PTR);
3558 mark_all_v_defs (list);
3559 }
3560 }
3561
3562 /* Entry point to phase 4. Update the function to match replacements. */
3563
3564 static void
3565 scalarize_function (void)
3566 {
3567 static const struct sra_walk_fns fns = {
3568 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
3569 };
3570
3571 sra_walk_function (&fns);
3572 scalarize_parms ();
3573 bsi_commit_edge_inserts ();
3574 }
3575
3576 \f
3577 /* Debug helper function. Print ELT in a nice human-readable format. */
3578
3579 static void
3580 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
3581 {
3582 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
3583 {
3584 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
3585 dump_sra_elt_name (f, elt->parent);
3586 }
3587 else
3588 {
3589 if (elt->parent)
3590 dump_sra_elt_name (f, elt->parent);
3591 if (DECL_P (elt->element))
3592 {
3593 if (TREE_CODE (elt->element) == FIELD_DECL)
3594 fputc ('.', f);
3595 print_generic_expr (f, elt->element, dump_flags);
3596 }
3597 else if (TREE_CODE (elt->element) == BIT_FIELD_REF)
3598 fprintf (f, "$B" HOST_WIDE_INT_PRINT_DEC "F" HOST_WIDE_INT_PRINT_DEC,
3599 tree_low_cst (TREE_OPERAND (elt->element, 2), 1),
3600 tree_low_cst (TREE_OPERAND (elt->element, 1), 1));
3601 else if (TREE_CODE (elt->element) == RANGE_EXPR)
3602 fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
3603 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
3604 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
3605 else
3606 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
3607 TREE_INT_CST_LOW (elt->element));
3608 }
3609 }
3610
3611 /* Likewise, but callable from the debugger. */
3612
3613 void
3614 debug_sra_elt_name (struct sra_elt *elt)
3615 {
3616 dump_sra_elt_name (stderr, elt);
3617 fputc ('\n', stderr);
3618 }
3619
3620 void
3621 sra_init_cache (void)
3622 {
3623 if (sra_type_decomp_cache)
3624 return;
3625
3626 sra_type_decomp_cache = BITMAP_ALLOC (NULL);
3627 sra_type_inst_cache = BITMAP_ALLOC (NULL);
3628 }
3629
3630 /* Main entry point. */
3631
3632 static unsigned int
3633 tree_sra (void)
3634 {
3635 /* Initialize local variables. */
3636 todoflags = 0;
3637 gcc_obstack_init (&sra_obstack);
3638 sra_candidates = BITMAP_ALLOC (NULL);
3639 needs_copy_in = BITMAP_ALLOC (NULL);
3640 sra_init_cache ();
3641 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
3642
3643 /* Scan. If we find anything, instantiate and scalarize. */
3644 if (find_candidates_for_sra ())
3645 {
3646 scan_function ();
3647 decide_instantiations ();
3648 scalarize_function ();
3649 if (!bitmap_empty_p (sra_candidates))
3650 todoflags |= TODO_rebuild_alias;
3651 }
3652
3653 /* Free allocated memory. */
3654 htab_delete (sra_map);
3655 sra_map = NULL;
3656 BITMAP_FREE (sra_candidates);
3657 BITMAP_FREE (needs_copy_in);
3658 BITMAP_FREE (sra_type_decomp_cache);
3659 BITMAP_FREE (sra_type_inst_cache);
3660 obstack_free (&sra_obstack, NULL);
3661 return todoflags;
3662 }
3663
3664 static unsigned int
3665 tree_sra_early (void)
3666 {
3667 unsigned int ret;
3668
3669 early_sra = true;
3670 ret = tree_sra ();
3671 early_sra = false;
3672
3673 return ret & ~TODO_rebuild_alias;
3674 }
3675
3676 static bool
3677 gate_sra (void)
3678 {
3679 return flag_tree_sra != 0;
3680 }
3681
3682 struct tree_opt_pass pass_sra_early =
3683 {
3684 "esra", /* name */
3685 gate_sra, /* gate */
3686 tree_sra_early, /* execute */
3687 NULL, /* sub */
3688 NULL, /* next */
3689 0, /* static_pass_number */
3690 TV_TREE_SRA, /* tv_id */
3691 PROP_cfg | PROP_ssa, /* properties_required */
3692 0, /* properties_provided */
3693 0, /* properties_destroyed */
3694 0, /* todo_flags_start */
3695 TODO_dump_func
3696 | TODO_update_ssa
3697 | TODO_ggc_collect
3698 | TODO_verify_ssa, /* todo_flags_finish */
3699 0 /* letter */
3700 };
3701
3702 struct tree_opt_pass pass_sra =
3703 {
3704 "sra", /* name */
3705 gate_sra, /* gate */
3706 tree_sra, /* execute */
3707 NULL, /* sub */
3708 NULL, /* next */
3709 0, /* static_pass_number */
3710 TV_TREE_SRA, /* tv_id */
3711 PROP_cfg | PROP_ssa, /* properties_required */
3712 0, /* properties_provided */
3713 0, /* properties_destroyed */
3714 0, /* todo_flags_start */
3715 TODO_dump_func
3716 | TODO_update_ssa
3717 | TODO_ggc_collect
3718 | TODO_verify_ssa, /* todo_flags_finish */
3719 0 /* letter */
3720 };