* tree-sra.c (bitfield_overlaps_p): Fix oversight.
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
5 Free Software Foundation, Inc.
6 Contributed by Diego Novillo <dnovillo@redhat.com>
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 3, or (at your option) any
13 later version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "ggc.h"
29 #include "tree.h"
30
31 /* These RTL headers are needed for basic-block.h. */
32 #include "rtl.h"
33 #include "tm_p.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
40 #include "gimple.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "timevar.h"
44 #include "flags.h"
45 #include "bitmap.h"
46 #include "obstack.h"
47 #include "target.h"
48 /* expr.h is needed for MOVE_RATIO. */
49 #include "expr.h"
50 #include "params.h"
51
52
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
58
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
61
62 The optimization proceeds in phases:
63
64 (1) Identify variables that have types that are candidates for
65 decomposition.
66
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
71
72 (3) Based on the usage profile, instantiate substitution variables.
73
74 (4) Scan the function making replacements.
75 */
76
77
78 /* True if this is the "early" pass, before inlining. */
79 static bool early_sra;
80
81 /* The set of todo flags to return from tree_sra. */
82 static unsigned int todoflags;
83
84 /* The set of aggregate variables that are candidates for scalarization. */
85 static bitmap sra_candidates;
86
87 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
88 beginning of the function. */
89 static bitmap needs_copy_in;
90
91 /* Sets of bit pairs that cache type decomposition and instantiation. */
92 static bitmap sra_type_decomp_cache;
93 static bitmap sra_type_inst_cache;
94
95 /* One of these structures is created for each candidate aggregate and
96 each (accessed) member or group of members of such an aggregate. */
97 struct sra_elt
98 {
99 /* A tree of the elements. Used when we want to traverse everything. */
100 struct sra_elt *parent;
101 struct sra_elt *groups;
102 struct sra_elt *children;
103 struct sra_elt *sibling;
104
105 /* If this element is a root, then this is the VAR_DECL. If this is
106 a sub-element, this is some token used to identify the reference.
107 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
108 of an ARRAY_REF, this is the (constant) index. In the case of an
109 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
110 of a complex number, this is a zero or one. */
111 tree element;
112
113 /* The type of the element. */
114 tree type;
115
116 /* A VAR_DECL, for any sub-element we've decided to replace. */
117 tree replacement;
118
119 /* The number of times the element is referenced as a whole. I.e.
120 given "a.b.c", this would be incremented for C, but not for A or B. */
121 unsigned int n_uses;
122
123 /* The number of times the element is copied to or from another
124 scalarizable element. */
125 unsigned int n_copies;
126
127 /* True if TYPE is scalar. */
128 bool is_scalar;
129
130 /* True if this element is a group of members of its parent. */
131 bool is_group;
132
133 /* True if we saw something about this element that prevents scalarization,
134 such as non-constant indexing. */
135 bool cannot_scalarize;
136
137 /* True if we've decided that structure-to-structure assignment
138 should happen via memcpy and not per-element. */
139 bool use_block_copy;
140
141 /* True if everything under this element has been marked TREE_NO_WARNING. */
142 bool all_no_warning;
143
144 /* A flag for use with/after random access traversals. */
145 bool visited;
146
147 /* True if there is BIT_FIELD_REF on the lhs with a vector. */
148 bool is_vector_lhs;
149
150 /* 1 if the element is a field that is part of a block, 2 if the field
151 is the block itself, 0 if it's neither. */
152 char in_bitfld_block;
153 };
154
155 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
156
157 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
158 for ((CHILD) = (ELT)->is_group \
159 ? next_child_for_group (NULL, (ELT)) \
160 : (ELT)->children; \
161 (CHILD); \
162 (CHILD) = (ELT)->is_group \
163 ? next_child_for_group ((CHILD), (ELT)) \
164 : (CHILD)->sibling)
165
166 /* Helper function for above macro. Return next child in group. */
167 static struct sra_elt *
168 next_child_for_group (struct sra_elt *child, struct sra_elt *group)
169 {
170 gcc_assert (group->is_group);
171
172 /* Find the next child in the parent. */
173 if (child)
174 child = child->sibling;
175 else
176 child = group->parent->children;
177
178 /* Skip siblings that do not belong to the group. */
179 while (child)
180 {
181 tree g_elt = group->element;
182 if (TREE_CODE (g_elt) == RANGE_EXPR)
183 {
184 if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
185 && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
186 break;
187 }
188 else
189 gcc_unreachable ();
190
191 child = child->sibling;
192 }
193
194 return child;
195 }
196
197 /* Random access to the child of a parent is performed by hashing.
198 This prevents quadratic behavior, and allows SRA to function
199 reasonably on larger records. */
200 static htab_t sra_map;
201
202 /* All structures are allocated out of the following obstack. */
203 static struct obstack sra_obstack;
204
205 /* Debugging functions. */
206 static void dump_sra_elt_name (FILE *, struct sra_elt *);
207 extern void debug_sra_elt_name (struct sra_elt *);
208
209 /* Forward declarations. */
210 static tree generate_element_ref (struct sra_elt *);
211 static gimple_seq sra_build_assignment (tree dst, tree src);
212 static void mark_all_v_defs_seq (gimple_seq);
213 static void mark_all_v_defs_stmt (gimple);
214
215 \f
216 /* Return true if DECL is an SRA candidate. */
217
218 static bool
219 is_sra_candidate_decl (tree decl)
220 {
221 return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
222 }
223
224 /* Return true if TYPE is a scalar type. */
225
226 static bool
227 is_sra_scalar_type (tree type)
228 {
229 enum tree_code code = TREE_CODE (type);
230 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
231 || code == FIXED_POINT_TYPE
232 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
233 || code == POINTER_TYPE || code == OFFSET_TYPE
234 || code == REFERENCE_TYPE);
235 }
236
237 /* Return true if TYPE can be decomposed into a set of independent variables.
238
239 Note that this doesn't imply that all elements of TYPE can be
240 instantiated, just that if we decide to break up the type into
241 separate pieces that it can be done. */
242
243 bool
244 sra_type_can_be_decomposed_p (tree type)
245 {
246 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
247 tree t;
248
249 /* Avoid searching the same type twice. */
250 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
251 return true;
252 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
253 return false;
254
255 /* The type must have a definite nonzero size. */
256 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
257 || integer_zerop (TYPE_SIZE (type)))
258 goto fail;
259
260 /* The type must be a non-union aggregate. */
261 switch (TREE_CODE (type))
262 {
263 case RECORD_TYPE:
264 {
265 bool saw_one_field = false;
266
267 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
268 if (TREE_CODE (t) == FIELD_DECL)
269 {
270 /* Reject incorrectly represented bit fields. */
271 if (DECL_BIT_FIELD (t)
272 && INTEGRAL_TYPE_P (TREE_TYPE (t))
273 && (tree_low_cst (DECL_SIZE (t), 1)
274 != TYPE_PRECISION (TREE_TYPE (t))))
275 goto fail;
276
277 saw_one_field = true;
278 }
279
280 /* Record types must have at least one field. */
281 if (!saw_one_field)
282 goto fail;
283 }
284 break;
285
286 case ARRAY_TYPE:
287 /* Array types must have a fixed lower and upper bound. */
288 t = TYPE_DOMAIN (type);
289 if (t == NULL)
290 goto fail;
291 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
292 goto fail;
293 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
294 goto fail;
295 break;
296
297 case COMPLEX_TYPE:
298 break;
299
300 default:
301 goto fail;
302 }
303
304 bitmap_set_bit (sra_type_decomp_cache, cache+0);
305 return true;
306
307 fail:
308 bitmap_set_bit (sra_type_decomp_cache, cache+1);
309 return false;
310 }
311
312 /* Returns true if the TYPE is one of the available va_list types.
313 Otherwise it returns false.
314 Note, that for multiple calling conventions there can be more
315 than just one va_list type present. */
316
317 static bool
318 is_va_list_type (tree type)
319 {
320 tree h;
321
322 if (type == NULL_TREE)
323 return false;
324 h = targetm.canonical_va_list_type (type);
325 if (h == NULL_TREE)
326 return false;
327 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (h))
328 return true;
329 return false;
330 }
331
332 /* Return true if DECL can be decomposed into a set of independent
333 (though not necessarily scalar) variables. */
334
335 static bool
336 decl_can_be_decomposed_p (tree var)
337 {
338 /* Early out for scalars. */
339 if (is_sra_scalar_type (TREE_TYPE (var)))
340 return false;
341
342 /* The variable must not be aliased. */
343 if (!is_gimple_non_addressable (var))
344 {
345 if (dump_file && (dump_flags & TDF_DETAILS))
346 {
347 fprintf (dump_file, "Cannot scalarize variable ");
348 print_generic_expr (dump_file, var, dump_flags);
349 fprintf (dump_file, " because it must live in memory\n");
350 }
351 return false;
352 }
353
354 /* The variable must not be volatile. */
355 if (TREE_THIS_VOLATILE (var))
356 {
357 if (dump_file && (dump_flags & TDF_DETAILS))
358 {
359 fprintf (dump_file, "Cannot scalarize variable ");
360 print_generic_expr (dump_file, var, dump_flags);
361 fprintf (dump_file, " because it is declared volatile\n");
362 }
363 return false;
364 }
365
366 /* We must be able to decompose the variable's type. */
367 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
368 {
369 if (dump_file && (dump_flags & TDF_DETAILS))
370 {
371 fprintf (dump_file, "Cannot scalarize variable ");
372 print_generic_expr (dump_file, var, dump_flags);
373 fprintf (dump_file, " because its type cannot be decomposed\n");
374 }
375 return false;
376 }
377
378 /* HACK: if we decompose a va_list_type_node before inlining, then we'll
379 confuse tree-stdarg.c, and we won't be able to figure out which and
380 how many arguments are accessed. This really should be improved in
381 tree-stdarg.c, as the decomposition is truly a win. This could also
382 be fixed if the stdarg pass ran early, but this can't be done until
383 we've aliasing information early too. See PR 30791. */
384 if (early_sra && is_va_list_type (TREE_TYPE (var)))
385 return false;
386
387 return true;
388 }
389
390 /* Return true if TYPE can be *completely* decomposed into scalars. */
391
392 static bool
393 type_can_instantiate_all_elements (tree type)
394 {
395 if (is_sra_scalar_type (type))
396 return true;
397 if (!sra_type_can_be_decomposed_p (type))
398 return false;
399
400 switch (TREE_CODE (type))
401 {
402 case RECORD_TYPE:
403 {
404 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
405 tree f;
406
407 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
408 return true;
409 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
410 return false;
411
412 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
413 if (TREE_CODE (f) == FIELD_DECL)
414 {
415 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
416 {
417 bitmap_set_bit (sra_type_inst_cache, cache+1);
418 return false;
419 }
420 }
421
422 bitmap_set_bit (sra_type_inst_cache, cache+0);
423 return true;
424 }
425
426 case ARRAY_TYPE:
427 return type_can_instantiate_all_elements (TREE_TYPE (type));
428
429 case COMPLEX_TYPE:
430 return true;
431
432 default:
433 gcc_unreachable ();
434 }
435 }
436
437 /* Test whether ELT or some sub-element cannot be scalarized. */
438
439 static bool
440 can_completely_scalarize_p (struct sra_elt *elt)
441 {
442 struct sra_elt *c;
443
444 if (elt->cannot_scalarize)
445 return false;
446
447 for (c = elt->children; c; c = c->sibling)
448 if (!can_completely_scalarize_p (c))
449 return false;
450
451 for (c = elt->groups; c; c = c->sibling)
452 if (!can_completely_scalarize_p (c))
453 return false;
454
455 return true;
456 }
457
458 \f
459 /* A simplified tree hashing algorithm that only handles the types of
460 trees we expect to find in sra_elt->element. */
461
462 static hashval_t
463 sra_hash_tree (tree t)
464 {
465 hashval_t h;
466
467 switch (TREE_CODE (t))
468 {
469 case VAR_DECL:
470 case PARM_DECL:
471 case RESULT_DECL:
472 h = DECL_UID (t);
473 break;
474
475 case INTEGER_CST:
476 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
477 break;
478
479 case RANGE_EXPR:
480 h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
481 h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
482 break;
483
484 case FIELD_DECL:
485 /* We can have types that are compatible, but have different member
486 lists, so we can't hash fields by ID. Use offsets instead. */
487 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
488 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
489 break;
490
491 case BIT_FIELD_REF:
492 /* Don't take operand 0 into account, that's our parent. */
493 h = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
494 h = iterative_hash_expr (TREE_OPERAND (t, 2), h);
495 break;
496
497 default:
498 gcc_unreachable ();
499 }
500
501 return h;
502 }
503
504 /* Hash function for type SRA_PAIR. */
505
506 static hashval_t
507 sra_elt_hash (const void *x)
508 {
509 const struct sra_elt *const e = (const struct sra_elt *) x;
510 const struct sra_elt *p;
511 hashval_t h;
512
513 h = sra_hash_tree (e->element);
514
515 /* Take into account everything except bitfield blocks back up the
516 chain. Given that chain lengths are rarely very long, this
517 should be acceptable. If we truly identify this as a performance
518 problem, it should work to hash the pointer value
519 "e->parent". */
520 for (p = e->parent; p ; p = p->parent)
521 if (!p->in_bitfld_block)
522 h = (h * 65521) ^ sra_hash_tree (p->element);
523
524 return h;
525 }
526
527 /* Equality function for type SRA_PAIR. */
528
529 static int
530 sra_elt_eq (const void *x, const void *y)
531 {
532 const struct sra_elt *const a = (const struct sra_elt *) x;
533 const struct sra_elt *const b = (const struct sra_elt *) y;
534 tree ae, be;
535 const struct sra_elt *ap = a->parent;
536 const struct sra_elt *bp = b->parent;
537
538 if (ap)
539 while (ap->in_bitfld_block)
540 ap = ap->parent;
541 if (bp)
542 while (bp->in_bitfld_block)
543 bp = bp->parent;
544
545 if (ap != bp)
546 return false;
547
548 ae = a->element;
549 be = b->element;
550
551 if (ae == be)
552 return true;
553 if (TREE_CODE (ae) != TREE_CODE (be))
554 return false;
555
556 switch (TREE_CODE (ae))
557 {
558 case VAR_DECL:
559 case PARM_DECL:
560 case RESULT_DECL:
561 /* These are all pointer unique. */
562 return false;
563
564 case INTEGER_CST:
565 /* Integers are not pointer unique, so compare their values. */
566 return tree_int_cst_equal (ae, be);
567
568 case RANGE_EXPR:
569 return
570 tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
571 && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
572
573 case FIELD_DECL:
574 /* Fields are unique within a record, but not between
575 compatible records. */
576 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
577 return false;
578 return fields_compatible_p (ae, be);
579
580 case BIT_FIELD_REF:
581 return
582 tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1))
583 && tree_int_cst_equal (TREE_OPERAND (ae, 2), TREE_OPERAND (be, 2));
584
585 default:
586 gcc_unreachable ();
587 }
588 }
589
590 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
591 may be null, in which case CHILD must be a DECL. */
592
593 static struct sra_elt *
594 lookup_element (struct sra_elt *parent, tree child, tree type,
595 enum insert_option insert)
596 {
597 struct sra_elt dummy;
598 struct sra_elt **slot;
599 struct sra_elt *elt;
600
601 if (parent)
602 dummy.parent = parent->is_group ? parent->parent : parent;
603 else
604 dummy.parent = NULL;
605 dummy.element = child;
606
607 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
608 if (!slot && insert == NO_INSERT)
609 return NULL;
610
611 elt = *slot;
612 if (!elt && insert == INSERT)
613 {
614 *slot = elt = XOBNEW (&sra_obstack, struct sra_elt);
615 memset (elt, 0, sizeof (*elt));
616
617 elt->parent = parent;
618 elt->element = child;
619 elt->type = type;
620 elt->is_scalar = is_sra_scalar_type (type);
621
622 if (parent)
623 {
624 if (IS_ELEMENT_FOR_GROUP (elt->element))
625 {
626 elt->is_group = true;
627 elt->sibling = parent->groups;
628 parent->groups = elt;
629 }
630 else
631 {
632 elt->sibling = parent->children;
633 parent->children = elt;
634 }
635 }
636
637 /* If this is a parameter, then if we want to scalarize, we have
638 one copy from the true function parameter. Count it now. */
639 if (TREE_CODE (child) == PARM_DECL)
640 {
641 elt->n_copies = 1;
642 bitmap_set_bit (needs_copy_in, DECL_UID (child));
643 }
644 }
645
646 return elt;
647 }
648
649 /* Create or return the SRA_ELT structure for EXPR if the expression
650 refers to a scalarizable variable. */
651
652 static struct sra_elt *
653 maybe_lookup_element_for_expr (tree expr)
654 {
655 struct sra_elt *elt;
656 tree child;
657
658 switch (TREE_CODE (expr))
659 {
660 case VAR_DECL:
661 case PARM_DECL:
662 case RESULT_DECL:
663 if (is_sra_candidate_decl (expr))
664 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
665 return NULL;
666
667 case ARRAY_REF:
668 /* We can't scalarize variable array indices. */
669 if (in_array_bounds_p (expr))
670 child = TREE_OPERAND (expr, 1);
671 else
672 return NULL;
673 break;
674
675 case ARRAY_RANGE_REF:
676 /* We can't scalarize variable array indices. */
677 if (range_in_array_bounds_p (expr))
678 {
679 tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
680 child = build2 (RANGE_EXPR, integer_type_node,
681 TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
682 }
683 else
684 return NULL;
685 break;
686
687 case COMPONENT_REF:
688 {
689 tree type = TREE_TYPE (TREE_OPERAND (expr, 0));
690 /* Don't look through unions. */
691 if (TREE_CODE (type) != RECORD_TYPE)
692 return NULL;
693 /* Neither through variable-sized records. */
694 if (TYPE_SIZE (type) == NULL_TREE
695 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
696 return NULL;
697 child = TREE_OPERAND (expr, 1);
698 }
699 break;
700
701 case REALPART_EXPR:
702 child = integer_zero_node;
703 break;
704 case IMAGPART_EXPR:
705 child = integer_one_node;
706 break;
707
708 default:
709 return NULL;
710 }
711
712 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
713 if (elt)
714 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
715 return NULL;
716 }
717
718 \f
719 /* Functions to walk just enough of the tree to see all scalarizable
720 references, and categorize them. */
721
722 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
723 various kinds of references seen. In all cases, *GSI is an iterator
724 pointing to the statement being processed. */
725 struct sra_walk_fns
726 {
727 /* Invoked when ELT is required as a unit. Note that ELT might refer to
728 a leaf node, in which case this is a simple scalar reference. *EXPR_P
729 points to the location of the expression. IS_OUTPUT is true if this
730 is a left-hand-side reference. USE_ALL is true if we saw something we
731 couldn't quite identify and had to force the use of the entire object. */
732 void (*use) (struct sra_elt *elt, tree *expr_p,
733 gimple_stmt_iterator *gsi, bool is_output, bool use_all);
734
735 /* Invoked when we have a copy between two scalarizable references. */
736 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
737 gimple_stmt_iterator *gsi);
738
739 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
740 in which case it should be treated as an empty CONSTRUCTOR. */
741 void (*init) (struct sra_elt *elt, tree value, gimple_stmt_iterator *gsi);
742
743 /* Invoked when we have a copy between one scalarizable reference ELT
744 and one non-scalarizable reference OTHER without side-effects.
745 IS_OUTPUT is true if ELT is on the left-hand side. */
746 void (*ldst) (struct sra_elt *elt, tree other,
747 gimple_stmt_iterator *gsi, bool is_output);
748
749 /* True during phase 2, false during phase 4. */
750 /* ??? This is a hack. */
751 bool initial_scan;
752 };
753
754 #ifdef ENABLE_CHECKING
755 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
756
757 static tree
758 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
759 void *data ATTRIBUTE_UNUSED)
760 {
761 tree t = *tp;
762 enum tree_code code = TREE_CODE (t);
763
764 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
765 {
766 *walk_subtrees = 0;
767 if (is_sra_candidate_decl (t))
768 return t;
769 }
770 else if (TYPE_P (t))
771 *walk_subtrees = 0;
772
773 return NULL;
774 }
775 #endif
776
777 /* Walk most expressions looking for a scalarizable aggregate.
778 If we find one, invoke FNS->USE. */
779
780 static void
781 sra_walk_expr (tree *expr_p, gimple_stmt_iterator *gsi, bool is_output,
782 const struct sra_walk_fns *fns)
783 {
784 tree expr = *expr_p;
785 tree inner = expr;
786 bool disable_scalarization = false;
787 bool use_all_p = false;
788
789 /* We're looking to collect a reference expression between EXPR and INNER,
790 such that INNER is a scalarizable decl and all other nodes through EXPR
791 are references that we can scalarize. If we come across something that
792 we can't scalarize, we reset EXPR. This has the effect of making it
793 appear that we're referring to the larger expression as a whole. */
794
795 while (1)
796 switch (TREE_CODE (inner))
797 {
798 case VAR_DECL:
799 case PARM_DECL:
800 case RESULT_DECL:
801 /* If there is a scalarizable decl at the bottom, then process it. */
802 if (is_sra_candidate_decl (inner))
803 {
804 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
805 if (disable_scalarization)
806 elt->cannot_scalarize = true;
807 else
808 fns->use (elt, expr_p, gsi, is_output, use_all_p);
809 }
810 return;
811
812 case ARRAY_REF:
813 /* Non-constant index means any member may be accessed. Prevent the
814 expression from being scalarized. If we were to treat this as a
815 reference to the whole array, we can wind up with a single dynamic
816 index reference inside a loop being overridden by several constant
817 index references during loop setup. It's possible that this could
818 be avoided by using dynamic usage counts based on BB trip counts
819 (based on loop analysis or profiling), but that hardly seems worth
820 the effort. */
821 /* ??? Hack. Figure out how to push this into the scan routines
822 without duplicating too much code. */
823 if (!in_array_bounds_p (inner))
824 {
825 disable_scalarization = true;
826 goto use_all;
827 }
828 /* ??? Are we assured that non-constant bounds and stride will have
829 the same value everywhere? I don't think Fortran will... */
830 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
831 goto use_all;
832 inner = TREE_OPERAND (inner, 0);
833 break;
834
835 case ARRAY_RANGE_REF:
836 if (!range_in_array_bounds_p (inner))
837 {
838 disable_scalarization = true;
839 goto use_all;
840 }
841 /* ??? See above non-constant bounds and stride . */
842 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
843 goto use_all;
844 inner = TREE_OPERAND (inner, 0);
845 break;
846
847 case COMPONENT_REF:
848 {
849 tree type = TREE_TYPE (TREE_OPERAND (inner, 0));
850 /* Don't look through unions. */
851 if (TREE_CODE (type) != RECORD_TYPE)
852 goto use_all;
853 /* Neither through variable-sized records. */
854 if (TYPE_SIZE (type) == NULL_TREE
855 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
856 goto use_all;
857 inner = TREE_OPERAND (inner, 0);
858 }
859 break;
860
861 case REALPART_EXPR:
862 case IMAGPART_EXPR:
863 inner = TREE_OPERAND (inner, 0);
864 break;
865
866 case BIT_FIELD_REF:
867 /* A bit field reference to a specific vector is scalarized but for
868 ones for inputs need to be marked as used on the left hand size so
869 when we scalarize it, we can mark that variable as non renamable. */
870 if (is_output
871 && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
872 {
873 struct sra_elt *elt
874 = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
875 if (elt)
876 elt->is_vector_lhs = true;
877 }
878
879 /* A bit field reference (access to *multiple* fields simultaneously)
880 is not currently scalarized. Consider this an access to the full
881 outer element, to which walk_tree will bring us next. */
882 goto use_all;
883
884 CASE_CONVERT:
885 /* Similarly, a nop explicitly wants to look at an object in a
886 type other than the one we've scalarized. */
887 goto use_all;
888
889 case VIEW_CONVERT_EXPR:
890 /* Likewise for a view conversion, but with an additional twist:
891 it can be on the LHS and, in this case, an access to the full
892 outer element would mean a killing def. So we need to punt
893 if we haven't already a full access to the current element,
894 because we cannot pretend to have a killing def if we only
895 have a partial access at some level. */
896 if (is_output && !use_all_p && inner != expr)
897 disable_scalarization = true;
898 goto use_all;
899
900 case WITH_SIZE_EXPR:
901 /* This is a transparent wrapper. The entire inner expression really
902 is being used. */
903 goto use_all;
904
905 use_all:
906 expr_p = &TREE_OPERAND (inner, 0);
907 inner = expr = *expr_p;
908 use_all_p = true;
909 break;
910
911 default:
912 #ifdef ENABLE_CHECKING
913 /* Validate that we're not missing any references. */
914 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
915 #endif
916 return;
917 }
918 }
919
920 /* Walk the arguments of a GIMPLE_CALL looking for scalarizable aggregates.
921 If we find one, invoke FNS->USE. */
922
923 static void
924 sra_walk_gimple_call (gimple stmt, gimple_stmt_iterator *gsi,
925 const struct sra_walk_fns *fns)
926 {
927 int i;
928 int nargs = gimple_call_num_args (stmt);
929
930 for (i = 0; i < nargs; i++)
931 sra_walk_expr (gimple_call_arg_ptr (stmt, i), gsi, false, fns);
932
933 if (gimple_call_lhs (stmt))
934 sra_walk_expr (gimple_call_lhs_ptr (stmt), gsi, true, fns);
935 }
936
937 /* Walk the inputs and outputs of a GIMPLE_ASM looking for scalarizable
938 aggregates. If we find one, invoke FNS->USE. */
939
940 static void
941 sra_walk_gimple_asm (gimple stmt, gimple_stmt_iterator *gsi,
942 const struct sra_walk_fns *fns)
943 {
944 size_t i;
945 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
946 sra_walk_expr (&TREE_VALUE (gimple_asm_input_op (stmt, i)), gsi, false, fns);
947 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
948 sra_walk_expr (&TREE_VALUE (gimple_asm_output_op (stmt, i)), gsi, true, fns);
949 }
950
951 /* Walk a GIMPLE_ASSIGN and categorize the assignment appropriately. */
952
953 static void
954 sra_walk_gimple_assign (gimple stmt, gimple_stmt_iterator *gsi,
955 const struct sra_walk_fns *fns)
956 {
957 struct sra_elt *lhs_elt = NULL, *rhs_elt = NULL;
958 tree lhs, rhs;
959
960 /* If there is more than 1 element on the RHS, only walk the lhs. */
961 if (!gimple_assign_single_p (stmt))
962 {
963 sra_walk_expr (gimple_assign_lhs_ptr (stmt), gsi, true, fns);
964 return;
965 }
966
967 lhs = gimple_assign_lhs (stmt);
968 rhs = gimple_assign_rhs1 (stmt);
969 lhs_elt = maybe_lookup_element_for_expr (lhs);
970 rhs_elt = maybe_lookup_element_for_expr (rhs);
971
972 /* If both sides are scalarizable, this is a COPY operation. */
973 if (lhs_elt && rhs_elt)
974 {
975 fns->copy (lhs_elt, rhs_elt, gsi);
976 return;
977 }
978
979 /* If the RHS is scalarizable, handle it. There are only two cases. */
980 if (rhs_elt)
981 {
982 if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
983 fns->ldst (rhs_elt, lhs, gsi, false);
984 else
985 fns->use (rhs_elt, gimple_assign_rhs1_ptr (stmt), gsi, false, false);
986 }
987
988 /* If it isn't scalarizable, there may be scalarizable variables within, so
989 check for a call or else walk the RHS to see if we need to do any
990 copy-in operations. We need to do it before the LHS is scalarized so
991 that the statements get inserted in the proper place, before any
992 copy-out operations. */
993 else
994 sra_walk_expr (gimple_assign_rhs1_ptr (stmt), gsi, false, fns);
995
996 /* Likewise, handle the LHS being scalarizable. We have cases similar
997 to those above, but also want to handle RHS being constant. */
998 if (lhs_elt)
999 {
1000 /* If this is an assignment from a constant, or constructor, then
1001 we have access to all of the elements individually. Invoke INIT. */
1002 if (TREE_CODE (rhs) == COMPLEX_EXPR
1003 || TREE_CODE (rhs) == COMPLEX_CST
1004 || TREE_CODE (rhs) == CONSTRUCTOR)
1005 fns->init (lhs_elt, rhs, gsi);
1006
1007 /* If this is an assignment from read-only memory, treat this as if
1008 we'd been passed the constructor directly. Invoke INIT. */
1009 else if (TREE_CODE (rhs) == VAR_DECL
1010 && TREE_STATIC (rhs)
1011 && TREE_READONLY (rhs)
1012 && targetm.binds_local_p (rhs))
1013 fns->init (lhs_elt, DECL_INITIAL (rhs), gsi);
1014
1015 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
1016 The lvalue requirement prevents us from trying to directly scalarize
1017 the result of a function call. Which would result in trying to call
1018 the function multiple times, and other evil things. */
1019 else if (!lhs_elt->is_scalar
1020 && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
1021 fns->ldst (lhs_elt, rhs, gsi, true);
1022
1023 /* Otherwise we're being used in some context that requires the
1024 aggregate to be seen as a whole. Invoke USE. */
1025 else
1026 fns->use (lhs_elt, gimple_assign_lhs_ptr (stmt), gsi, true, false);
1027 }
1028
1029 /* Similarly to above, LHS_ELT being null only means that the LHS as a
1030 whole is not a scalarizable reference. There may be occurrences of
1031 scalarizable variables within, which implies a USE. */
1032 else
1033 sra_walk_expr (gimple_assign_lhs_ptr (stmt), gsi, true, fns);
1034 }
1035
1036 /* Entry point to the walk functions. Search the entire function,
1037 invoking the callbacks in FNS on each of the references to
1038 scalarizable variables. */
1039
1040 static void
1041 sra_walk_function (const struct sra_walk_fns *fns)
1042 {
1043 basic_block bb;
1044 gimple_stmt_iterator si, ni;
1045
1046 /* ??? Phase 4 could derive some benefit to walking the function in
1047 dominator tree order. */
1048
1049 FOR_EACH_BB (bb)
1050 for (si = gsi_start_bb (bb); !gsi_end_p (si); si = ni)
1051 {
1052 gimple stmt;
1053
1054 stmt = gsi_stmt (si);
1055
1056 ni = si;
1057 gsi_next (&ni);
1058
1059 /* If the statement has no virtual operands, then it doesn't
1060 make any structure references that we care about. */
1061 if (gimple_aliases_computed_p (cfun)
1062 && ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
1063 continue;
1064
1065 switch (gimple_code (stmt))
1066 {
1067 case GIMPLE_RETURN:
1068 /* If we have "return <retval>" then the return value is
1069 already exposed for our pleasure. Walk it as a USE to
1070 force all the components back in place for the return.
1071 */
1072 if (gimple_return_retval (stmt) == NULL_TREE)
1073 ;
1074 else
1075 sra_walk_expr (gimple_return_retval_ptr (stmt), &si, false,
1076 fns);
1077 break;
1078
1079 case GIMPLE_ASSIGN:
1080 sra_walk_gimple_assign (stmt, &si, fns);
1081 break;
1082 case GIMPLE_CALL:
1083 sra_walk_gimple_call (stmt, &si, fns);
1084 break;
1085 case GIMPLE_ASM:
1086 sra_walk_gimple_asm (stmt, &si, fns);
1087 break;
1088
1089 default:
1090 break;
1091 }
1092 }
1093 }
1094 \f
1095 /* Phase One: Scan all referenced variables in the program looking for
1096 structures that could be decomposed. */
1097
1098 static bool
1099 find_candidates_for_sra (void)
1100 {
1101 bool any_set = false;
1102 tree var;
1103 referenced_var_iterator rvi;
1104
1105 FOR_EACH_REFERENCED_VAR (var, rvi)
1106 {
1107 if (decl_can_be_decomposed_p (var))
1108 {
1109 bitmap_set_bit (sra_candidates, DECL_UID (var));
1110 any_set = true;
1111 }
1112 }
1113
1114 return any_set;
1115 }
1116
1117 \f
1118 /* Phase Two: Scan all references to scalarizable variables. Count the
1119 number of times they are used or copied respectively. */
1120
1121 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1122 considered a copy, because we can decompose the reference such that
1123 the sub-elements needn't be contiguous. */
1124
1125 static void
1126 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
1127 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
1128 bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
1129 {
1130 elt->n_uses += 1;
1131 }
1132
1133 static void
1134 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1135 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
1136 {
1137 lhs_elt->n_copies += 1;
1138 rhs_elt->n_copies += 1;
1139 }
1140
1141 static void
1142 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
1143 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
1144 {
1145 lhs_elt->n_copies += 1;
1146 }
1147
1148 static void
1149 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
1150 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
1151 bool is_output ATTRIBUTE_UNUSED)
1152 {
1153 elt->n_copies += 1;
1154 }
1155
1156 /* Dump the values we collected during the scanning phase. */
1157
1158 static void
1159 scan_dump (struct sra_elt *elt)
1160 {
1161 struct sra_elt *c;
1162
1163 dump_sra_elt_name (dump_file, elt);
1164 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1165
1166 for (c = elt->children; c ; c = c->sibling)
1167 scan_dump (c);
1168
1169 for (c = elt->groups; c ; c = c->sibling)
1170 scan_dump (c);
1171 }
1172
1173 /* Entry point to phase 2. Scan the entire function, building up
1174 scalarization data structures, recording copies and uses. */
1175
1176 static void
1177 scan_function (void)
1178 {
1179 static const struct sra_walk_fns fns = {
1180 scan_use, scan_copy, scan_init, scan_ldst, true
1181 };
1182 bitmap_iterator bi;
1183
1184 sra_walk_function (&fns);
1185
1186 if (dump_file && (dump_flags & TDF_DETAILS))
1187 {
1188 unsigned i;
1189
1190 fputs ("\nScan results:\n", dump_file);
1191 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1192 {
1193 tree var = referenced_var (i);
1194 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1195 if (elt)
1196 scan_dump (elt);
1197 }
1198 fputc ('\n', dump_file);
1199 }
1200 }
1201 \f
1202 /* Phase Three: Make decisions about which variables to scalarize, if any.
1203 All elements to be scalarized have replacement variables made for them. */
1204
1205 /* A subroutine of build_element_name. Recursively build the element
1206 name on the obstack. */
1207
1208 static void
1209 build_element_name_1 (struct sra_elt *elt)
1210 {
1211 tree t;
1212 char buffer[32];
1213
1214 if (elt->parent)
1215 {
1216 build_element_name_1 (elt->parent);
1217 obstack_1grow (&sra_obstack, '$');
1218
1219 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1220 {
1221 if (elt->element == integer_zero_node)
1222 obstack_grow (&sra_obstack, "real", 4);
1223 else
1224 obstack_grow (&sra_obstack, "imag", 4);
1225 return;
1226 }
1227 }
1228
1229 t = elt->element;
1230 if (TREE_CODE (t) == INTEGER_CST)
1231 {
1232 /* ??? Eh. Don't bother doing double-wide printing. */
1233 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1234 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1235 }
1236 else if (TREE_CODE (t) == BIT_FIELD_REF)
1237 {
1238 sprintf (buffer, "B" HOST_WIDE_INT_PRINT_DEC,
1239 tree_low_cst (TREE_OPERAND (t, 2), 1));
1240 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1241 sprintf (buffer, "F" HOST_WIDE_INT_PRINT_DEC,
1242 tree_low_cst (TREE_OPERAND (t, 1), 1));
1243 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1244 }
1245 else
1246 {
1247 tree name = DECL_NAME (t);
1248 if (name)
1249 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1250 IDENTIFIER_LENGTH (name));
1251 else
1252 {
1253 sprintf (buffer, "D%u", DECL_UID (t));
1254 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1255 }
1256 }
1257 }
1258
1259 /* Construct a pretty variable name for an element's replacement variable.
1260 The name is built on the obstack. */
1261
1262 static char *
1263 build_element_name (struct sra_elt *elt)
1264 {
1265 build_element_name_1 (elt);
1266 obstack_1grow (&sra_obstack, '\0');
1267 return XOBFINISH (&sra_obstack, char *);
1268 }
1269
1270 /* Instantiate an element as an independent variable. */
1271
1272 static void
1273 instantiate_element (struct sra_elt *elt)
1274 {
1275 struct sra_elt *base_elt;
1276 tree var, base;
1277 bool nowarn = TREE_NO_WARNING (elt->element);
1278
1279 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1280 if (!nowarn)
1281 nowarn = TREE_NO_WARNING (base_elt->parent->element);
1282 base = base_elt->element;
1283
1284 elt->replacement = var = make_rename_temp (elt->type, "SR");
1285
1286 if (DECL_P (elt->element)
1287 && !tree_int_cst_equal (DECL_SIZE (var), DECL_SIZE (elt->element)))
1288 {
1289 DECL_SIZE (var) = DECL_SIZE (elt->element);
1290 DECL_SIZE_UNIT (var) = DECL_SIZE_UNIT (elt->element);
1291
1292 elt->in_bitfld_block = 1;
1293 elt->replacement = fold_build3 (BIT_FIELD_REF, elt->type, var,
1294 DECL_SIZE (var),
1295 BYTES_BIG_ENDIAN
1296 ? size_binop (MINUS_EXPR,
1297 TYPE_SIZE (elt->type),
1298 DECL_SIZE (var))
1299 : bitsize_int (0));
1300 }
1301
1302 /* For vectors, if used on the left hand side with BIT_FIELD_REF,
1303 they are not a gimple register. */
1304 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE && elt->is_vector_lhs)
1305 DECL_GIMPLE_REG_P (var) = 0;
1306
1307 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1308 DECL_ARTIFICIAL (var) = 1;
1309
1310 if (TREE_THIS_VOLATILE (elt->type))
1311 {
1312 TREE_THIS_VOLATILE (var) = 1;
1313 TREE_SIDE_EFFECTS (var) = 1;
1314 }
1315
1316 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1317 {
1318 char *pretty_name = build_element_name (elt);
1319 DECL_NAME (var) = get_identifier (pretty_name);
1320 obstack_free (&sra_obstack, pretty_name);
1321
1322 SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1323 DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1324
1325 DECL_IGNORED_P (var) = 0;
1326 TREE_NO_WARNING (var) = nowarn;
1327 }
1328 else
1329 {
1330 DECL_IGNORED_P (var) = 1;
1331 /* ??? We can't generate any warning that would be meaningful. */
1332 TREE_NO_WARNING (var) = 1;
1333 }
1334
1335 /* Zero-initialize bit-field scalarization variables, to avoid
1336 triggering undefined behavior. */
1337 if (TREE_CODE (elt->element) == BIT_FIELD_REF
1338 || (var != elt->replacement
1339 && TREE_CODE (elt->replacement) == BIT_FIELD_REF))
1340 {
1341 gimple_seq init = sra_build_assignment (var,
1342 fold_convert (TREE_TYPE (var),
1343 integer_zero_node)
1344 );
1345 insert_edge_copies_seq (init, ENTRY_BLOCK_PTR);
1346 mark_all_v_defs_seq (init);
1347 }
1348
1349 if (dump_file)
1350 {
1351 fputs (" ", dump_file);
1352 dump_sra_elt_name (dump_file, elt);
1353 fputs (" -> ", dump_file);
1354 print_generic_expr (dump_file, var, dump_flags);
1355 fputc ('\n', dump_file);
1356 }
1357 }
1358
1359 /* Make one pass across an element tree deciding whether or not it's
1360 profitable to instantiate individual leaf scalars.
1361
1362 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1363 fields all the way up the tree. */
1364
1365 static void
1366 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1367 unsigned int parent_copies)
1368 {
1369 if (dump_file && !elt->parent)
1370 {
1371 fputs ("Initial instantiation for ", dump_file);
1372 dump_sra_elt_name (dump_file, elt);
1373 fputc ('\n', dump_file);
1374 }
1375
1376 if (elt->cannot_scalarize)
1377 return;
1378
1379 if (elt->is_scalar)
1380 {
1381 /* The decision is simple: instantiate if we're used more frequently
1382 than the parent needs to be seen as a complete unit. */
1383 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1384 instantiate_element (elt);
1385 }
1386 else
1387 {
1388 struct sra_elt *c, *group;
1389 unsigned int this_uses = elt->n_uses + parent_uses;
1390 unsigned int this_copies = elt->n_copies + parent_copies;
1391
1392 /* Consider groups of sub-elements as weighing in favour of
1393 instantiation whatever their size. */
1394 for (group = elt->groups; group ; group = group->sibling)
1395 FOR_EACH_ACTUAL_CHILD (c, group)
1396 {
1397 c->n_uses += group->n_uses;
1398 c->n_copies += group->n_copies;
1399 }
1400
1401 for (c = elt->children; c ; c = c->sibling)
1402 decide_instantiation_1 (c, this_uses, this_copies);
1403 }
1404 }
1405
1406 /* Compute the size and number of all instantiated elements below ELT.
1407 We will only care about this if the size of the complete structure
1408 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1409
1410 static unsigned int
1411 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1412 {
1413 if (elt->replacement)
1414 {
1415 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1416 return 1;
1417 }
1418 else
1419 {
1420 struct sra_elt *c;
1421 unsigned int count = 0;
1422
1423 for (c = elt->children; c ; c = c->sibling)
1424 count += sum_instantiated_sizes (c, sizep);
1425
1426 return count;
1427 }
1428 }
1429
1430 /* Instantiate fields in ELT->TYPE that are not currently present as
1431 children of ELT. */
1432
1433 static void instantiate_missing_elements (struct sra_elt *elt);
1434
1435 static struct sra_elt *
1436 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1437 {
1438 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1439 if (sub->is_scalar)
1440 {
1441 if (sub->replacement == NULL)
1442 instantiate_element (sub);
1443 }
1444 else
1445 instantiate_missing_elements (sub);
1446 return sub;
1447 }
1448
1449 /* Obtain the canonical type for field F of ELEMENT. */
1450
1451 static tree
1452 canon_type_for_field (tree f, tree element)
1453 {
1454 tree field_type = TREE_TYPE (f);
1455
1456 /* canonicalize_component_ref() unwidens some bit-field types (not
1457 marked as DECL_BIT_FIELD in C++), so we must do the same, lest we
1458 may introduce type mismatches. */
1459 if (INTEGRAL_TYPE_P (field_type)
1460 && DECL_MODE (f) != TYPE_MODE (field_type))
1461 field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
1462 field_type,
1463 element,
1464 f, NULL_TREE),
1465 NULL_TREE));
1466
1467 return field_type;
1468 }
1469
1470 /* Look for adjacent fields of ELT starting at F that we'd like to
1471 scalarize as a single variable. Return the last field of the
1472 group. */
1473
1474 static tree
1475 try_instantiate_multiple_fields (struct sra_elt *elt, tree f)
1476 {
1477 int count;
1478 unsigned HOST_WIDE_INT align, bit, size, alchk;
1479 enum machine_mode mode;
1480 tree first = f, prev;
1481 tree type, var;
1482 struct sra_elt *block;
1483
1484 /* Point fields are typically best handled as standalone entities. */
1485 if (POINTER_TYPE_P (TREE_TYPE (f)))
1486 return f;
1487
1488 if (!is_sra_scalar_type (TREE_TYPE (f))
1489 || !host_integerp (DECL_FIELD_OFFSET (f), 1)
1490 || !host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1491 || !host_integerp (DECL_SIZE (f), 1)
1492 || lookup_element (elt, f, NULL, NO_INSERT))
1493 return f;
1494
1495 block = elt;
1496
1497 /* For complex and array objects, there are going to be integer
1498 literals as child elements. In this case, we can't just take the
1499 alignment and mode of the decl, so we instead rely on the element
1500 type.
1501
1502 ??? We could try to infer additional alignment from the full
1503 object declaration and the location of the sub-elements we're
1504 accessing. */
1505 for (count = 0; !DECL_P (block->element); count++)
1506 block = block->parent;
1507
1508 align = DECL_ALIGN (block->element);
1509 alchk = GET_MODE_BITSIZE (DECL_MODE (block->element));
1510
1511 if (count)
1512 {
1513 type = TREE_TYPE (block->element);
1514 while (count--)
1515 type = TREE_TYPE (type);
1516
1517 align = TYPE_ALIGN (type);
1518 alchk = GET_MODE_BITSIZE (TYPE_MODE (type));
1519 }
1520
1521 if (align < alchk)
1522 align = alchk;
1523
1524 /* Coalescing wider fields is probably pointless and
1525 inefficient. */
1526 if (align > BITS_PER_WORD)
1527 align = BITS_PER_WORD;
1528
1529 bit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1530 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1531 size = tree_low_cst (DECL_SIZE (f), 1);
1532
1533 alchk = align - 1;
1534 alchk = ~alchk;
1535
1536 if ((bit & alchk) != ((bit + size - 1) & alchk))
1537 return f;
1538
1539 /* Find adjacent fields in the same alignment word. */
1540
1541 for (prev = f, f = TREE_CHAIN (f);
1542 f && TREE_CODE (f) == FIELD_DECL
1543 && is_sra_scalar_type (TREE_TYPE (f))
1544 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1545 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1546 && host_integerp (DECL_SIZE (f), 1)
1547 && !lookup_element (elt, f, NULL, NO_INSERT);
1548 prev = f, f = TREE_CHAIN (f))
1549 {
1550 unsigned HOST_WIDE_INT nbit, nsize;
1551
1552 nbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1553 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1554 nsize = tree_low_cst (DECL_SIZE (f), 1);
1555
1556 if (bit + size == nbit)
1557 {
1558 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1559 {
1560 /* If we're at an alignment boundary, don't bother
1561 growing alignment such that we can include this next
1562 field. */
1563 if ((nbit & alchk)
1564 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1565 break;
1566
1567 align = GET_MODE_BITSIZE (DECL_MODE (f));
1568 alchk = align - 1;
1569 alchk = ~alchk;
1570
1571 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1572 break;
1573 }
1574 size += nsize;
1575 }
1576 else if (nbit + nsize == bit)
1577 {
1578 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1579 {
1580 if ((bit & alchk)
1581 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1582 break;
1583
1584 align = GET_MODE_BITSIZE (DECL_MODE (f));
1585 alchk = align - 1;
1586 alchk = ~alchk;
1587
1588 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1589 break;
1590 }
1591 bit = nbit;
1592 size += nsize;
1593 }
1594 else
1595 break;
1596 }
1597
1598 f = prev;
1599
1600 if (f == first)
1601 return f;
1602
1603 gcc_assert ((bit & alchk) == ((bit + size - 1) & alchk));
1604
1605 /* Try to widen the bit range so as to cover padding bits as well. */
1606
1607 if ((bit & ~alchk) || size != align)
1608 {
1609 unsigned HOST_WIDE_INT mbit = bit & alchk;
1610 unsigned HOST_WIDE_INT msize = align;
1611
1612 for (f = TYPE_FIELDS (elt->type);
1613 f; f = TREE_CHAIN (f))
1614 {
1615 unsigned HOST_WIDE_INT fbit, fsize;
1616
1617 /* Skip the fields from first to prev. */
1618 if (f == first)
1619 {
1620 f = prev;
1621 continue;
1622 }
1623
1624 if (!(TREE_CODE (f) == FIELD_DECL
1625 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1626 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)))
1627 continue;
1628
1629 fbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1630 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1631
1632 /* If we're past the selected word, we're fine. */
1633 if ((bit & alchk) < (fbit & alchk))
1634 continue;
1635
1636 if (host_integerp (DECL_SIZE (f), 1))
1637 fsize = tree_low_cst (DECL_SIZE (f), 1);
1638 else
1639 /* Assume a variable-sized field takes up all space till
1640 the end of the word. ??? Endianness issues? */
1641 fsize = align - (fbit & alchk);
1642
1643 if ((fbit & alchk) < (bit & alchk))
1644 {
1645 /* A large field might start at a previous word and
1646 extend into the selected word. Exclude those
1647 bits. ??? Endianness issues? */
1648 HOST_WIDE_INT diff = fbit + fsize - mbit;
1649
1650 if (diff <= 0)
1651 continue;
1652
1653 mbit += diff;
1654 msize -= diff;
1655 }
1656 else
1657 {
1658 /* Non-overlapping, great. */
1659 if (fbit + fsize <= mbit
1660 || mbit + msize <= fbit)
1661 continue;
1662
1663 if (fbit <= mbit)
1664 {
1665 unsigned HOST_WIDE_INT diff = fbit + fsize - mbit;
1666 mbit += diff;
1667 msize -= diff;
1668 }
1669 else if (fbit > mbit)
1670 msize -= (mbit + msize - fbit);
1671 else
1672 gcc_unreachable ();
1673 }
1674 }
1675
1676 bit = mbit;
1677 size = msize;
1678 }
1679
1680 /* Now we know the bit range we're interested in. Find the smallest
1681 machine mode we can use to access it. */
1682
1683 for (mode = smallest_mode_for_size (size, MODE_INT);
1684 ;
1685 mode = GET_MODE_WIDER_MODE (mode))
1686 {
1687 gcc_assert (mode != VOIDmode);
1688
1689 alchk = GET_MODE_PRECISION (mode) - 1;
1690 alchk = ~alchk;
1691
1692 if ((bit & alchk) == ((bit + size - 1) & alchk))
1693 break;
1694 }
1695
1696 gcc_assert (~alchk < align);
1697
1698 /* Create the field group as a single variable. */
1699
1700 /* We used to create a type for the mode above, but size turns
1701 to be out not of mode-size. As we need a matching type
1702 to build a BIT_FIELD_REF, use a nonstandard integer type as
1703 fallback. */
1704 type = lang_hooks.types.type_for_size (size, 1);
1705 if (!type || TYPE_PRECISION (type) != size)
1706 type = build_nonstandard_integer_type (size, 1);
1707 gcc_assert (type);
1708 var = build3 (BIT_FIELD_REF, type, NULL_TREE,
1709 bitsize_int (size), bitsize_int (bit));
1710
1711 block = instantiate_missing_elements_1 (elt, var, type);
1712 gcc_assert (block && block->is_scalar);
1713
1714 var = block->replacement;
1715
1716 if ((bit & ~alchk)
1717 || (HOST_WIDE_INT)size != tree_low_cst (DECL_SIZE (var), 1))
1718 {
1719 block->replacement = fold_build3 (BIT_FIELD_REF,
1720 TREE_TYPE (block->element), var,
1721 bitsize_int (size),
1722 bitsize_int (bit & ~alchk));
1723 }
1724
1725 block->in_bitfld_block = 2;
1726
1727 /* Add the member fields to the group, such that they access
1728 portions of the group variable. */
1729
1730 for (f = first; f != TREE_CHAIN (prev); f = TREE_CHAIN (f))
1731 {
1732 tree field_type = canon_type_for_field (f, elt->element);
1733 struct sra_elt *fld = lookup_element (block, f, field_type, INSERT);
1734
1735 gcc_assert (fld && fld->is_scalar && !fld->replacement);
1736
1737 fld->replacement = fold_build3 (BIT_FIELD_REF, field_type, var,
1738 DECL_SIZE (f),
1739 bitsize_int
1740 ((TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f))
1741 * BITS_PER_UNIT
1742 + (TREE_INT_CST_LOW
1743 (DECL_FIELD_BIT_OFFSET (f))))
1744 & ~alchk));
1745 fld->in_bitfld_block = 1;
1746 }
1747
1748 return prev;
1749 }
1750
1751 static void
1752 instantiate_missing_elements (struct sra_elt *elt)
1753 {
1754 tree type = elt->type;
1755
1756 switch (TREE_CODE (type))
1757 {
1758 case RECORD_TYPE:
1759 {
1760 tree f;
1761 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1762 if (TREE_CODE (f) == FIELD_DECL)
1763 {
1764 tree last = try_instantiate_multiple_fields (elt, f);
1765
1766 if (last != f)
1767 {
1768 f = last;
1769 continue;
1770 }
1771
1772 instantiate_missing_elements_1 (elt, f,
1773 canon_type_for_field
1774 (f, elt->element));
1775 }
1776 break;
1777 }
1778
1779 case ARRAY_TYPE:
1780 {
1781 tree i, max, subtype;
1782
1783 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1784 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1785 subtype = TREE_TYPE (type);
1786
1787 while (1)
1788 {
1789 instantiate_missing_elements_1 (elt, i, subtype);
1790 if (tree_int_cst_equal (i, max))
1791 break;
1792 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1793 }
1794
1795 break;
1796 }
1797
1798 case COMPLEX_TYPE:
1799 type = TREE_TYPE (type);
1800 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1801 instantiate_missing_elements_1 (elt, integer_one_node, type);
1802 break;
1803
1804 default:
1805 gcc_unreachable ();
1806 }
1807 }
1808
1809 /* Return true if there is only one non aggregate field in the record, TYPE.
1810 Return false otherwise. */
1811
1812 static bool
1813 single_scalar_field_in_record_p (tree type)
1814 {
1815 int num_fields = 0;
1816 tree field;
1817 if (TREE_CODE (type) != RECORD_TYPE)
1818 return false;
1819
1820 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1821 if (TREE_CODE (field) == FIELD_DECL)
1822 {
1823 num_fields++;
1824
1825 if (num_fields == 2)
1826 return false;
1827
1828 if (AGGREGATE_TYPE_P (TREE_TYPE (field)))
1829 return false;
1830 }
1831
1832 return true;
1833 }
1834
1835 /* Make one pass across an element tree deciding whether to perform block
1836 or element copies. If we decide on element copies, instantiate all
1837 elements. Return true if there are any instantiated sub-elements. */
1838
1839 static bool
1840 decide_block_copy (struct sra_elt *elt)
1841 {
1842 struct sra_elt *c;
1843 bool any_inst;
1844
1845 /* We shouldn't be invoked on groups of sub-elements as they must
1846 behave like their parent as far as block copy is concerned. */
1847 gcc_assert (!elt->is_group);
1848
1849 /* If scalarization is disabled, respect it. */
1850 if (elt->cannot_scalarize)
1851 {
1852 elt->use_block_copy = 1;
1853
1854 if (dump_file)
1855 {
1856 fputs ("Scalarization disabled for ", dump_file);
1857 dump_sra_elt_name (dump_file, elt);
1858 fputc ('\n', dump_file);
1859 }
1860
1861 /* Disable scalarization of sub-elements */
1862 for (c = elt->children; c; c = c->sibling)
1863 {
1864 c->cannot_scalarize = 1;
1865 decide_block_copy (c);
1866 }
1867
1868 /* Groups behave like their parent. */
1869 for (c = elt->groups; c; c = c->sibling)
1870 {
1871 c->cannot_scalarize = 1;
1872 c->use_block_copy = 1;
1873 }
1874
1875 return false;
1876 }
1877
1878 /* Don't decide if we've no uses and no groups. */
1879 if (elt->n_uses == 0 && elt->n_copies == 0 && elt->groups == NULL)
1880 ;
1881
1882 else if (!elt->is_scalar)
1883 {
1884 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1885 bool use_block_copy = true;
1886
1887 /* Tradeoffs for COMPLEX types pretty much always make it better
1888 to go ahead and split the components. */
1889 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1890 use_block_copy = false;
1891
1892 /* Don't bother trying to figure out the rest if the structure is
1893 so large we can't do easy arithmetic. This also forces block
1894 copies for variable sized structures. */
1895 else if (host_integerp (size_tree, 1))
1896 {
1897 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1898 unsigned int max_size, max_count, inst_count, full_count;
1899
1900 /* If the sra-max-structure-size parameter is 0, then the
1901 user has not overridden the parameter and we can choose a
1902 sensible default. */
1903 max_size = SRA_MAX_STRUCTURE_SIZE
1904 ? SRA_MAX_STRUCTURE_SIZE
1905 : MOVE_RATIO (optimize_function_for_speed_p (cfun)) * UNITS_PER_WORD;
1906 max_count = SRA_MAX_STRUCTURE_COUNT
1907 ? SRA_MAX_STRUCTURE_COUNT
1908 : MOVE_RATIO (optimize_function_for_speed_p (cfun));
1909
1910 full_size = tree_low_cst (size_tree, 1);
1911 full_count = count_type_elements (elt->type, false);
1912 inst_count = sum_instantiated_sizes (elt, &inst_size);
1913
1914 /* If there is only one scalar field in the record, don't block copy. */
1915 if (single_scalar_field_in_record_p (elt->type))
1916 use_block_copy = false;
1917
1918 /* ??? What to do here. If there are two fields, and we've only
1919 instantiated one, then instantiating the other is clearly a win.
1920 If there are a large number of fields then the size of the copy
1921 is much more of a factor. */
1922
1923 /* If the structure is small, and we've made copies, go ahead
1924 and instantiate, hoping that the copies will go away. */
1925 if (full_size <= max_size
1926 && (full_count - inst_count) <= max_count
1927 && elt->n_copies > elt->n_uses)
1928 use_block_copy = false;
1929 else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
1930 && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1931 use_block_copy = false;
1932
1933 /* In order to avoid block copy, we have to be able to instantiate
1934 all elements of the type. See if this is possible. */
1935 if (!use_block_copy
1936 && (!can_completely_scalarize_p (elt)
1937 || !type_can_instantiate_all_elements (elt->type)))
1938 use_block_copy = true;
1939 }
1940
1941 elt->use_block_copy = use_block_copy;
1942
1943 /* Groups behave like their parent. */
1944 for (c = elt->groups; c; c = c->sibling)
1945 c->use_block_copy = use_block_copy;
1946
1947 if (dump_file)
1948 {
1949 fprintf (dump_file, "Using %s for ",
1950 use_block_copy ? "block-copy" : "element-copy");
1951 dump_sra_elt_name (dump_file, elt);
1952 fputc ('\n', dump_file);
1953 }
1954
1955 if (!use_block_copy)
1956 {
1957 instantiate_missing_elements (elt);
1958 return true;
1959 }
1960 }
1961
1962 any_inst = elt->replacement != NULL;
1963
1964 for (c = elt->children; c ; c = c->sibling)
1965 any_inst |= decide_block_copy (c);
1966
1967 return any_inst;
1968 }
1969
1970 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1971
1972 static void
1973 decide_instantiations (void)
1974 {
1975 unsigned int i;
1976 bool cleared_any;
1977 bitmap_head done_head;
1978 bitmap_iterator bi;
1979
1980 /* We cannot clear bits from a bitmap we're iterating over,
1981 so save up all the bits to clear until the end. */
1982 bitmap_initialize (&done_head, &bitmap_default_obstack);
1983 cleared_any = false;
1984
1985 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1986 {
1987 tree var = referenced_var (i);
1988 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1989 if (elt)
1990 {
1991 decide_instantiation_1 (elt, 0, 0);
1992 if (!decide_block_copy (elt))
1993 elt = NULL;
1994 }
1995 if (!elt)
1996 {
1997 bitmap_set_bit (&done_head, i);
1998 cleared_any = true;
1999 }
2000 }
2001
2002 if (cleared_any)
2003 {
2004 bitmap_and_compl_into (sra_candidates, &done_head);
2005 bitmap_and_compl_into (needs_copy_in, &done_head);
2006 }
2007 bitmap_clear (&done_head);
2008
2009 mark_set_for_renaming (sra_candidates);
2010
2011 if (dump_file)
2012 fputc ('\n', dump_file);
2013 }
2014
2015 \f
2016 /* Phase Four: Update the function to match the replacements created. */
2017
2018 /* Mark all the variables in VDEF/VUSE operators for STMT for
2019 renaming. This becomes necessary when we modify all of a
2020 non-scalar. */
2021
2022 static void
2023 mark_all_v_defs_stmt (gimple stmt)
2024 {
2025 tree sym;
2026 ssa_op_iter iter;
2027
2028 update_stmt_if_modified (stmt);
2029
2030 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
2031 {
2032 if (TREE_CODE (sym) == SSA_NAME)
2033 sym = SSA_NAME_VAR (sym);
2034 mark_sym_for_renaming (sym);
2035 }
2036 }
2037
2038
2039 /* Mark all the variables in virtual operands in all the statements in
2040 LIST for renaming. */
2041
2042 static void
2043 mark_all_v_defs_seq (gimple_seq seq)
2044 {
2045 gimple_stmt_iterator gsi;
2046
2047 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
2048 mark_all_v_defs_stmt (gsi_stmt (gsi));
2049 }
2050
2051 /* Mark every replacement under ELT with TREE_NO_WARNING. */
2052
2053 static void
2054 mark_no_warning (struct sra_elt *elt)
2055 {
2056 if (!elt->all_no_warning)
2057 {
2058 if (elt->replacement)
2059 TREE_NO_WARNING (elt->replacement) = 1;
2060 else
2061 {
2062 struct sra_elt *c;
2063 FOR_EACH_ACTUAL_CHILD (c, elt)
2064 mark_no_warning (c);
2065 }
2066 elt->all_no_warning = true;
2067 }
2068 }
2069
2070 /* Build a single level component reference to ELT rooted at BASE. */
2071
2072 static tree
2073 generate_one_element_ref (struct sra_elt *elt, tree base)
2074 {
2075 switch (TREE_CODE (TREE_TYPE (base)))
2076 {
2077 case RECORD_TYPE:
2078 {
2079 tree field = elt->element;
2080
2081 /* We can't test elt->in_bitfld_block here because, when this is
2082 called from instantiate_element, we haven't set this field
2083 yet. */
2084 if (TREE_CODE (field) == BIT_FIELD_REF)
2085 {
2086 tree ret = unshare_expr (field);
2087 TREE_OPERAND (ret, 0) = base;
2088 return ret;
2089 }
2090
2091 /* Watch out for compatible records with differing field lists. */
2092 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
2093 field = find_compatible_field (TREE_TYPE (base), field);
2094
2095 return build3 (COMPONENT_REF, elt->type, base, field, NULL);
2096 }
2097
2098 case ARRAY_TYPE:
2099 if (TREE_CODE (elt->element) == RANGE_EXPR)
2100 return build4 (ARRAY_RANGE_REF, elt->type, base,
2101 TREE_OPERAND (elt->element, 0), NULL, NULL);
2102 else
2103 return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
2104
2105 case COMPLEX_TYPE:
2106 if (elt->element == integer_zero_node)
2107 return build1 (REALPART_EXPR, elt->type, base);
2108 else
2109 return build1 (IMAGPART_EXPR, elt->type, base);
2110
2111 default:
2112 gcc_unreachable ();
2113 }
2114 }
2115
2116 /* Build a full component reference to ELT rooted at its native variable. */
2117
2118 static tree
2119 generate_element_ref (struct sra_elt *elt)
2120 {
2121 if (elt->parent)
2122 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
2123 else
2124 return elt->element;
2125 }
2126
2127 /* Return true if BF is a bit-field that we can handle like a scalar. */
2128
2129 static bool
2130 scalar_bitfield_p (tree bf)
2131 {
2132 return (TREE_CODE (bf) == BIT_FIELD_REF
2133 && (is_gimple_reg (TREE_OPERAND (bf, 0))
2134 || (TYPE_MODE (TREE_TYPE (TREE_OPERAND (bf, 0))) != BLKmode
2135 && (!TREE_SIDE_EFFECTS (TREE_OPERAND (bf, 0))
2136 || (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE
2137 (TREE_OPERAND (bf, 0))))
2138 <= BITS_PER_WORD)))));
2139 }
2140
2141 /* Create an assignment statement from SRC to DST. */
2142
2143 static gimple_seq
2144 sra_build_assignment (tree dst, tree src)
2145 {
2146 gimple stmt;
2147 gimple_seq seq = NULL, seq2 = NULL;
2148 /* Turning BIT_FIELD_REFs into bit operations enables other passes
2149 to do a much better job at optimizing the code.
2150 From dst = BIT_FIELD_REF <var, sz, off> we produce
2151
2152 SR.1 = (scalar type) var;
2153 SR.2 = SR.1 >> off;
2154 SR.3 = SR.2 & ((1 << sz) - 1);
2155 ... possible sign extension of SR.3 ...
2156 dst = (destination type) SR.3;
2157 */
2158 if (scalar_bitfield_p (src))
2159 {
2160 tree var, shift, width;
2161 tree utype, stype, stmp, utmp, dtmp;
2162 bool unsignedp = (INTEGRAL_TYPE_P (TREE_TYPE (src))
2163 ? TYPE_UNSIGNED (TREE_TYPE (src)) : true);
2164
2165 var = TREE_OPERAND (src, 0);
2166 width = TREE_OPERAND (src, 1);
2167 /* The offset needs to be adjusted to a right shift quantity
2168 depending on the endianness. */
2169 if (BYTES_BIG_ENDIAN)
2170 {
2171 tree tmp = size_binop (PLUS_EXPR, width, TREE_OPERAND (src, 2));
2172 shift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), tmp);
2173 }
2174 else
2175 shift = TREE_OPERAND (src, 2);
2176
2177 /* In weird cases we have non-integral types for the source or
2178 destination object.
2179 ??? For unknown reasons we also want an unsigned scalar type. */
2180 stype = TREE_TYPE (var);
2181 if (!INTEGRAL_TYPE_P (stype))
2182 stype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2183 (TYPE_SIZE (stype)), 1);
2184 else if (!TYPE_UNSIGNED (stype))
2185 stype = unsigned_type_for (stype);
2186
2187 utype = TREE_TYPE (dst);
2188 if (!INTEGRAL_TYPE_P (utype))
2189 utype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2190 (TYPE_SIZE (utype)), 1);
2191 else if (!TYPE_UNSIGNED (utype))
2192 utype = unsigned_type_for (utype);
2193
2194 stmp = make_rename_temp (stype, "SR");
2195
2196 /* Convert the base var of the BIT_FIELD_REF to the scalar type
2197 we use for computation if we cannot use it directly. */
2198 if (!useless_type_conversion_p (stype, TREE_TYPE (var)))
2199 {
2200 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2201 stmt = gimple_build_assign (stmp, fold_convert (stype, var));
2202 else
2203 stmt = gimple_build_assign (stmp, fold_build1 (VIEW_CONVERT_EXPR,
2204 stype, var));
2205 gimple_seq_add_stmt (&seq, stmt);
2206 var = stmp;
2207 }
2208
2209 if (!integer_zerop (shift))
2210 {
2211 stmt = gimple_build_assign (stmp, fold_build2 (RSHIFT_EXPR, stype,
2212 var, shift));
2213 gimple_seq_add_stmt (&seq, stmt);
2214 var = stmp;
2215 }
2216
2217 /* If we need a masking operation, produce one. */
2218 if (TREE_INT_CST_LOW (width) == TYPE_PRECISION (stype))
2219 unsignedp = true;
2220 else
2221 {
2222 tree one = build_int_cst_wide (stype, 1, 0);
2223 tree mask = int_const_binop (LSHIFT_EXPR, one, width, 0);
2224 mask = int_const_binop (MINUS_EXPR, mask, one, 0);
2225
2226 stmt = gimple_build_assign (stmp, fold_build2 (BIT_AND_EXPR, stype,
2227 var, mask));
2228 gimple_seq_add_stmt (&seq, stmt);
2229 var = stmp;
2230 }
2231
2232 /* After shifting and masking, convert to the target type. */
2233 utmp = stmp;
2234 if (!useless_type_conversion_p (utype, stype))
2235 {
2236 utmp = make_rename_temp (utype, "SR");
2237
2238 stmt = gimple_build_assign (utmp, fold_convert (utype, var));
2239 gimple_seq_add_stmt (&seq, stmt);
2240
2241 var = utmp;
2242 }
2243
2244 /* Perform sign extension, if required.
2245 ??? This should never be necessary. */
2246 if (!unsignedp)
2247 {
2248 tree signbit = int_const_binop (LSHIFT_EXPR,
2249 build_int_cst_wide (utype, 1, 0),
2250 size_binop (MINUS_EXPR, width,
2251 bitsize_int (1)), 0);
2252
2253 stmt = gimple_build_assign (utmp, fold_build2 (BIT_XOR_EXPR, utype,
2254 var, signbit));
2255 gimple_seq_add_stmt (&seq, stmt);
2256
2257 stmt = gimple_build_assign (utmp, fold_build2 (MINUS_EXPR, utype,
2258 utmp, signbit));
2259 gimple_seq_add_stmt (&seq, stmt);
2260
2261 var = utmp;
2262 }
2263
2264 /* fold_build3 (BIT_FIELD_REF, ...) sometimes returns a cast. */
2265 STRIP_NOPS (dst);
2266
2267 /* Finally, move and convert to the destination. */
2268 if (!useless_type_conversion_p (TREE_TYPE (dst), TREE_TYPE (var)))
2269 {
2270 if (INTEGRAL_TYPE_P (TREE_TYPE (dst)))
2271 var = fold_convert (TREE_TYPE (dst), var);
2272 else
2273 var = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (dst), var);
2274
2275 /* If the destination is not a register the conversion needs
2276 to be a separate statement. */
2277 if (!is_gimple_reg (dst))
2278 {
2279 dtmp = make_rename_temp (TREE_TYPE (dst), "SR");
2280 stmt = gimple_build_assign (dtmp, var);
2281 gimple_seq_add_stmt (&seq, stmt);
2282 var = dtmp;
2283 }
2284 }
2285 stmt = gimple_build_assign (dst, var);
2286 gimple_seq_add_stmt (&seq, stmt);
2287
2288 return seq;
2289 }
2290
2291 /* fold_build3 (BIT_FIELD_REF, ...) sometimes returns a cast. */
2292 if (CONVERT_EXPR_P (dst))
2293 {
2294 STRIP_NOPS (dst);
2295 src = fold_convert (TREE_TYPE (dst), src);
2296 }
2297 /* It was hoped that we could perform some type sanity checking
2298 here, but since front-ends can emit accesses of fields in types
2299 different from their nominal types and copy structures containing
2300 them as a whole, we'd have to handle such differences here.
2301 Since such accesses under different types require compatibility
2302 anyway, there's little point in making tests and/or adding
2303 conversions to ensure the types of src and dst are the same.
2304 So we just assume type differences at this point are ok.
2305 The only exception we make here are pointer types, which can be different
2306 in e.g. structurally equal, but non-identical RECORD_TYPEs. */
2307 else if (POINTER_TYPE_P (TREE_TYPE (dst))
2308 && !useless_type_conversion_p (TREE_TYPE (dst), TREE_TYPE (src)))
2309 src = fold_convert (TREE_TYPE (dst), src);
2310
2311 /* ??? Only call the gimplifier if we need to. Otherwise we may
2312 end up substituting with DECL_VALUE_EXPR - see PR37380. */
2313 if (!handled_component_p (src)
2314 && !SSA_VAR_P (src))
2315 {
2316 src = force_gimple_operand (src, &seq2, false, NULL_TREE);
2317 gimple_seq_add_seq (&seq, seq2);
2318 }
2319 stmt = gimple_build_assign (dst, src);
2320 gimple_seq_add_stmt (&seq, stmt);
2321 return seq;
2322 }
2323
2324 /* BIT_FIELD_REFs must not be shared. sra_build_elt_assignment()
2325 takes care of assignments, but we must create copies for uses. */
2326 #define REPLDUP(t) (TREE_CODE (t) != BIT_FIELD_REF ? (t) : unshare_expr (t))
2327
2328 /* Emit an assignment from SRC to DST, but if DST is a scalarizable
2329 BIT_FIELD_REF, turn it into bit operations. */
2330
2331 static gimple_seq
2332 sra_build_bf_assignment (tree dst, tree src)
2333 {
2334 tree var, type, utype, tmp, tmp2, tmp3;
2335 gimple_seq seq;
2336 gimple stmt;
2337 tree cst, cst2, mask;
2338 tree minshift, maxshift;
2339
2340 if (TREE_CODE (dst) != BIT_FIELD_REF)
2341 return sra_build_assignment (dst, src);
2342
2343 var = TREE_OPERAND (dst, 0);
2344
2345 if (!scalar_bitfield_p (dst))
2346 return sra_build_assignment (REPLDUP (dst), src);
2347
2348 seq = NULL;
2349
2350 cst = fold_convert (bitsizetype, TREE_OPERAND (dst, 2));
2351 cst2 = size_binop (PLUS_EXPR,
2352 fold_convert (bitsizetype, TREE_OPERAND (dst, 1)),
2353 cst);
2354
2355 if (BYTES_BIG_ENDIAN)
2356 {
2357 maxshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst);
2358 minshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst2);
2359 }
2360 else
2361 {
2362 maxshift = cst2;
2363 minshift = cst;
2364 }
2365
2366 type = TREE_TYPE (var);
2367 if (!INTEGRAL_TYPE_P (type))
2368 type = lang_hooks.types.type_for_size
2369 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (var))), 1);
2370 if (TYPE_UNSIGNED (type))
2371 utype = type;
2372 else
2373 utype = unsigned_type_for (type);
2374
2375 mask = build_int_cst_wide (utype, 1, 0);
2376 if (TREE_INT_CST_LOW (maxshift) == TYPE_PRECISION (utype))
2377 cst = build_int_cst_wide (utype, 0, 0);
2378 else
2379 cst = int_const_binop (LSHIFT_EXPR, mask, maxshift, true);
2380 if (integer_zerop (minshift))
2381 cst2 = mask;
2382 else
2383 cst2 = int_const_binop (LSHIFT_EXPR, mask, minshift, true);
2384 mask = int_const_binop (MINUS_EXPR, cst, cst2, true);
2385 mask = fold_build1 (BIT_NOT_EXPR, utype, mask);
2386
2387 if (TYPE_MAIN_VARIANT (utype) != TYPE_MAIN_VARIANT (TREE_TYPE (var))
2388 && !integer_zerop (mask))
2389 {
2390 tmp = var;
2391 if (!is_gimple_variable (tmp))
2392 tmp = unshare_expr (var);
2393
2394 tmp2 = make_rename_temp (utype, "SR");
2395
2396 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2397 stmt = gimple_build_assign (tmp2, fold_convert (utype, tmp));
2398 else
2399 stmt = gimple_build_assign (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
2400 utype, tmp));
2401 gimple_seq_add_stmt (&seq, stmt);
2402 }
2403 else
2404 tmp2 = var;
2405
2406 if (!integer_zerop (mask))
2407 {
2408 tmp = make_rename_temp (utype, "SR");
2409 stmt = gimple_build_assign (tmp, fold_build2 (BIT_AND_EXPR, utype,
2410 tmp2, mask));
2411 gimple_seq_add_stmt (&seq, stmt);
2412 }
2413 else
2414 tmp = mask;
2415
2416 if (is_gimple_reg (src) && INTEGRAL_TYPE_P (TREE_TYPE (src)))
2417 tmp2 = src;
2418 else if (INTEGRAL_TYPE_P (TREE_TYPE (src)))
2419 {
2420 gimple_seq tmp_seq;
2421 tmp2 = make_rename_temp (TREE_TYPE (src), "SR");
2422 tmp_seq = sra_build_assignment (tmp2, src);
2423 gimple_seq_add_seq (&seq, tmp_seq);
2424 }
2425 else
2426 {
2427 gimple_seq tmp_seq;
2428 tmp2 = make_rename_temp
2429 (lang_hooks.types.type_for_size
2430 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (src))),
2431 1), "SR");
2432 tmp_seq = sra_build_assignment (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
2433 TREE_TYPE (tmp2), src));
2434 gimple_seq_add_seq (&seq, tmp_seq);
2435 }
2436
2437 if (!TYPE_UNSIGNED (TREE_TYPE (tmp2)))
2438 {
2439 gimple_seq tmp_seq;
2440 tree ut = unsigned_type_for (TREE_TYPE (tmp2));
2441 tmp3 = make_rename_temp (ut, "SR");
2442 tmp2 = fold_convert (ut, tmp2);
2443 tmp_seq = sra_build_assignment (tmp3, tmp2);
2444 gimple_seq_add_seq (&seq, tmp_seq);
2445
2446 tmp2 = fold_build1 (BIT_NOT_EXPR, utype, mask);
2447 tmp2 = int_const_binop (RSHIFT_EXPR, tmp2, minshift, true);
2448 tmp2 = fold_convert (ut, tmp2);
2449 tmp2 = fold_build2 (BIT_AND_EXPR, ut, tmp3, tmp2);
2450
2451 if (tmp3 != tmp2)
2452 {
2453 tmp3 = make_rename_temp (ut, "SR");
2454 tmp_seq = sra_build_assignment (tmp3, tmp2);
2455 gimple_seq_add_seq (&seq, tmp_seq);
2456 }
2457
2458 tmp2 = tmp3;
2459 }
2460
2461 if (TYPE_MAIN_VARIANT (TREE_TYPE (tmp2)) != TYPE_MAIN_VARIANT (utype))
2462 {
2463 gimple_seq tmp_seq;
2464 tmp3 = make_rename_temp (utype, "SR");
2465 tmp2 = fold_convert (utype, tmp2);
2466 tmp_seq = sra_build_assignment (tmp3, tmp2);
2467 gimple_seq_add_seq (&seq, tmp_seq);
2468 tmp2 = tmp3;
2469 }
2470
2471 if (!integer_zerop (minshift))
2472 {
2473 tmp3 = make_rename_temp (utype, "SR");
2474 stmt = gimple_build_assign (tmp3, fold_build2 (LSHIFT_EXPR, utype,
2475 tmp2, minshift));
2476 gimple_seq_add_stmt (&seq, stmt);
2477 tmp2 = tmp3;
2478 }
2479
2480 if (utype != TREE_TYPE (var))
2481 tmp3 = make_rename_temp (utype, "SR");
2482 else
2483 tmp3 = var;
2484 stmt = gimple_build_assign (tmp3, fold_build2 (BIT_IOR_EXPR, utype,
2485 tmp, tmp2));
2486 gimple_seq_add_stmt (&seq, stmt);
2487
2488 if (tmp3 != var)
2489 {
2490 if (TREE_TYPE (var) == type)
2491 stmt = gimple_build_assign (var, fold_convert (type, tmp3));
2492 else
2493 stmt = gimple_build_assign (var, fold_build1 (VIEW_CONVERT_EXPR,
2494 TREE_TYPE (var), tmp3));
2495 gimple_seq_add_stmt (&seq, stmt);
2496 }
2497
2498 return seq;
2499 }
2500
2501 /* Expand an assignment of SRC to the scalarized representation of
2502 ELT. If it is a field group, try to widen the assignment to cover
2503 the full variable. */
2504
2505 static gimple_seq
2506 sra_build_elt_assignment (struct sra_elt *elt, tree src)
2507 {
2508 tree dst = elt->replacement;
2509 tree var, tmp, cst, cst2;
2510 gimple stmt;
2511 gimple_seq seq;
2512
2513 if (TREE_CODE (dst) != BIT_FIELD_REF
2514 || !elt->in_bitfld_block)
2515 return sra_build_assignment (REPLDUP (dst), src);
2516
2517 var = TREE_OPERAND (dst, 0);
2518
2519 /* Try to widen the assignment to the entire variable.
2520 We need the source to be a BIT_FIELD_REF as well, such that, for
2521 BIT_FIELD_REF<d,sz,dp> = BIT_FIELD_REF<s,sz,sp>,
2522 by design, conditions are met such that we can turn it into
2523 d = BIT_FIELD_REF<s,dw,sp-dp>. */
2524 if (elt->in_bitfld_block == 2
2525 && TREE_CODE (src) == BIT_FIELD_REF)
2526 {
2527 tmp = src;
2528 cst = TYPE_SIZE (TREE_TYPE (var));
2529 cst2 = size_binop (MINUS_EXPR, TREE_OPERAND (src, 2),
2530 TREE_OPERAND (dst, 2));
2531
2532 src = TREE_OPERAND (src, 0);
2533
2534 /* Avoid full-width bit-fields. */
2535 if (integer_zerop (cst2)
2536 && tree_int_cst_equal (cst, TYPE_SIZE (TREE_TYPE (src))))
2537 {
2538 if (INTEGRAL_TYPE_P (TREE_TYPE (src))
2539 && !TYPE_UNSIGNED (TREE_TYPE (src)))
2540 src = fold_convert (unsigned_type_for (TREE_TYPE (src)), src);
2541
2542 /* If a single conversion won't do, we'll need a statement
2543 list. */
2544 if (TYPE_MAIN_VARIANT (TREE_TYPE (var))
2545 != TYPE_MAIN_VARIANT (TREE_TYPE (src)))
2546 {
2547 gimple_seq tmp_seq;
2548 seq = NULL;
2549
2550 if (!INTEGRAL_TYPE_P (TREE_TYPE (src)))
2551 src = fold_build1 (VIEW_CONVERT_EXPR,
2552 lang_hooks.types.type_for_size
2553 (TREE_INT_CST_LOW
2554 (TYPE_SIZE (TREE_TYPE (src))),
2555 1), src);
2556 gcc_assert (TYPE_UNSIGNED (TREE_TYPE (src)));
2557
2558 tmp = make_rename_temp (TREE_TYPE (src), "SR");
2559 stmt = gimple_build_assign (tmp, src);
2560 gimple_seq_add_stmt (&seq, stmt);
2561
2562 tmp_seq = sra_build_assignment (var,
2563 fold_convert (TREE_TYPE (var),
2564 tmp));
2565 gimple_seq_add_seq (&seq, tmp_seq);
2566
2567 return seq;
2568 }
2569
2570 src = fold_convert (TREE_TYPE (var), src);
2571 }
2572 else
2573 {
2574 src = fold_convert (TREE_TYPE (var), tmp);
2575 }
2576
2577 return sra_build_assignment (var, src);
2578 }
2579
2580 return sra_build_bf_assignment (dst, src);
2581 }
2582
2583 /* Generate a set of assignment statements in *LIST_P to copy all
2584 instantiated elements under ELT to or from the equivalent structure
2585 rooted at EXPR. COPY_OUT controls the direction of the copy, with
2586 true meaning to copy out of EXPR into ELT. */
2587
2588 static void
2589 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
2590 gimple_seq *seq_p)
2591 {
2592 struct sra_elt *c;
2593 gimple_seq tmp_seq;
2594 tree t;
2595
2596 if (!copy_out && TREE_CODE (expr) == SSA_NAME
2597 && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
2598 {
2599 tree r, i;
2600
2601 c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
2602 r = c->replacement;
2603 c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
2604 i = c->replacement;
2605
2606 t = build2 (COMPLEX_EXPR, elt->type, r, i);
2607 tmp_seq = sra_build_bf_assignment (expr, t);
2608 SSA_NAME_DEF_STMT (expr) = gimple_seq_last_stmt (tmp_seq);
2609 gimple_seq_add_seq (seq_p, tmp_seq);
2610 }
2611 else if (elt->replacement)
2612 {
2613 if (copy_out)
2614 tmp_seq = sra_build_elt_assignment (elt, expr);
2615 else
2616 tmp_seq = sra_build_bf_assignment (expr, REPLDUP (elt->replacement));
2617 gimple_seq_add_seq (seq_p, tmp_seq);
2618 }
2619 else
2620 {
2621 FOR_EACH_ACTUAL_CHILD (c, elt)
2622 {
2623 t = generate_one_element_ref (c, unshare_expr (expr));
2624 generate_copy_inout (c, copy_out, t, seq_p);
2625 }
2626 }
2627 }
2628
2629 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
2630 elements under SRC to their counterparts under DST. There must be a 1-1
2631 correspondence of instantiated elements. */
2632
2633 static void
2634 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, gimple_seq *seq_p)
2635 {
2636 struct sra_elt *dc, *sc;
2637
2638 FOR_EACH_ACTUAL_CHILD (dc, dst)
2639 {
2640 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
2641 if (!sc && dc->in_bitfld_block == 2)
2642 {
2643 struct sra_elt *dcs;
2644
2645 FOR_EACH_ACTUAL_CHILD (dcs, dc)
2646 {
2647 sc = lookup_element (src, dcs->element, NULL, NO_INSERT);
2648 gcc_assert (sc);
2649 generate_element_copy (dcs, sc, seq_p);
2650 }
2651
2652 continue;
2653 }
2654
2655 /* If DST and SRC are structs with the same elements, but do not have
2656 the same TYPE_MAIN_VARIANT, then lookup of DST FIELD_DECL in SRC
2657 will fail. Try harder by finding the corresponding FIELD_DECL
2658 in SRC. */
2659 if (!sc)
2660 {
2661 tree f;
2662
2663 gcc_assert (useless_type_conversion_p (dst->type, src->type));
2664 gcc_assert (TREE_CODE (dc->element) == FIELD_DECL);
2665 for (f = TYPE_FIELDS (src->type); f ; f = TREE_CHAIN (f))
2666 if (simple_cst_equal (DECL_FIELD_OFFSET (f),
2667 DECL_FIELD_OFFSET (dc->element)) > 0
2668 && simple_cst_equal (DECL_FIELD_BIT_OFFSET (f),
2669 DECL_FIELD_BIT_OFFSET (dc->element)) > 0
2670 && simple_cst_equal (DECL_SIZE (f),
2671 DECL_SIZE (dc->element)) > 0
2672 && (useless_type_conversion_p (TREE_TYPE (dc->element),
2673 TREE_TYPE (f))
2674 || (POINTER_TYPE_P (TREE_TYPE (dc->element))
2675 && POINTER_TYPE_P (TREE_TYPE (f)))))
2676 break;
2677 gcc_assert (f != NULL_TREE);
2678 sc = lookup_element (src, f, NULL, NO_INSERT);
2679 }
2680
2681 generate_element_copy (dc, sc, seq_p);
2682 }
2683
2684 if (dst->replacement)
2685 {
2686 gimple_seq tmp_seq;
2687
2688 gcc_assert (src->replacement);
2689
2690 tmp_seq = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
2691 gimple_seq_add_seq (seq_p, tmp_seq);
2692 }
2693 }
2694
2695 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
2696 elements under ELT. In addition, do not assign to elements that have been
2697 marked VISITED but do reset the visited flag; this allows easy coordination
2698 with generate_element_init. */
2699
2700 static void
2701 generate_element_zero (struct sra_elt *elt, gimple_seq *seq_p)
2702 {
2703 struct sra_elt *c;
2704
2705 if (elt->visited)
2706 {
2707 elt->visited = false;
2708 return;
2709 }
2710
2711 if (!elt->in_bitfld_block)
2712 FOR_EACH_ACTUAL_CHILD (c, elt)
2713 generate_element_zero (c, seq_p);
2714
2715 if (elt->replacement)
2716 {
2717 tree t;
2718 gimple_seq tmp_seq;
2719
2720 gcc_assert (elt->is_scalar);
2721 t = fold_convert (elt->type, integer_zero_node);
2722
2723 tmp_seq = sra_build_elt_assignment (elt, t);
2724 gimple_seq_add_seq (seq_p, tmp_seq);
2725 }
2726 }
2727
2728 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
2729 Add the result to *LIST_P. */
2730
2731 static void
2732 generate_one_element_init (struct sra_elt *elt, tree init, gimple_seq *seq_p)
2733 {
2734 gimple_seq tmp_seq = sra_build_elt_assignment (elt, init);
2735 gimple_seq_add_seq (seq_p, tmp_seq);
2736 }
2737
2738 /* Generate a set of assignment statements in *LIST_P to set all instantiated
2739 elements under ELT with the contents of the initializer INIT. In addition,
2740 mark all assigned elements VISITED; this allows easy coordination with
2741 generate_element_zero. Return false if we found a case we couldn't
2742 handle. */
2743
2744 static bool
2745 generate_element_init_1 (struct sra_elt *elt, tree init, gimple_seq *seq_p)
2746 {
2747 bool result = true;
2748 enum tree_code init_code;
2749 struct sra_elt *sub;
2750 tree t;
2751 unsigned HOST_WIDE_INT idx;
2752 tree value, purpose;
2753
2754 /* We can be passed DECL_INITIAL of a static variable. It might have a
2755 conversion, which we strip off here. */
2756 STRIP_USELESS_TYPE_CONVERSION (init);
2757 init_code = TREE_CODE (init);
2758
2759 if (elt->is_scalar)
2760 {
2761 if (elt->replacement)
2762 {
2763 generate_one_element_init (elt, init, seq_p);
2764 elt->visited = true;
2765 }
2766 return result;
2767 }
2768
2769 switch (init_code)
2770 {
2771 case COMPLEX_CST:
2772 case COMPLEX_EXPR:
2773 FOR_EACH_ACTUAL_CHILD (sub, elt)
2774 {
2775 if (sub->element == integer_zero_node)
2776 t = (init_code == COMPLEX_EXPR
2777 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
2778 else
2779 t = (init_code == COMPLEX_EXPR
2780 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
2781 result &= generate_element_init_1 (sub, t, seq_p);
2782 }
2783 break;
2784
2785 case CONSTRUCTOR:
2786 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
2787 {
2788 /* Array constructors are routinely created with NULL indices. */
2789 if (purpose == NULL_TREE)
2790 {
2791 result = false;
2792 break;
2793 }
2794 if (TREE_CODE (purpose) == RANGE_EXPR)
2795 {
2796 tree lower = TREE_OPERAND (purpose, 0);
2797 tree upper = TREE_OPERAND (purpose, 1);
2798
2799 while (1)
2800 {
2801 sub = lookup_element (elt, lower, NULL, NO_INSERT);
2802 if (sub != NULL)
2803 result &= generate_element_init_1 (sub, value, seq_p);
2804 if (tree_int_cst_equal (lower, upper))
2805 break;
2806 lower = int_const_binop (PLUS_EXPR, lower,
2807 integer_one_node, true);
2808 }
2809 }
2810 else
2811 {
2812 sub = lookup_element (elt, purpose, NULL, NO_INSERT);
2813 if (sub != NULL)
2814 result &= generate_element_init_1 (sub, value, seq_p);
2815 }
2816 }
2817 break;
2818
2819 default:
2820 elt->visited = true;
2821 result = false;
2822 }
2823
2824 return result;
2825 }
2826
2827 /* A wrapper function for generate_element_init_1 that handles cleanup after
2828 gimplification. */
2829
2830 static bool
2831 generate_element_init (struct sra_elt *elt, tree init, gimple_seq *seq_p)
2832 {
2833 bool ret;
2834 struct gimplify_ctx gctx;
2835
2836 push_gimplify_context (&gctx);
2837 ret = generate_element_init_1 (elt, init, seq_p);
2838 pop_gimplify_context (NULL);
2839
2840 /* The replacement can expose previously unreferenced variables. */
2841 if (ret && *seq_p)
2842 {
2843 gimple_stmt_iterator i;
2844
2845 for (i = gsi_start (*seq_p); !gsi_end_p (i); gsi_next (&i))
2846 find_new_referenced_vars (gsi_stmt (i));
2847 }
2848
2849 return ret;
2850 }
2851
2852 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
2853 if BB has more than one edge, STMT will be replicated for each edge.
2854 Also, abnormal edges will be ignored. */
2855
2856 void
2857 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
2858 {
2859 edge e;
2860 edge_iterator ei;
2861 unsigned n_copies = -1;
2862
2863 FOR_EACH_EDGE (e, ei, bb->succs)
2864 if (!(e->flags & EDGE_ABNORMAL))
2865 n_copies++;
2866
2867 FOR_EACH_EDGE (e, ei, bb->succs)
2868 if (!(e->flags & EDGE_ABNORMAL))
2869 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
2870 }
2871
2872 /* Helper function to insert LIST before GSI, and set up line number info. */
2873
2874 void
2875 sra_insert_before (gimple_stmt_iterator *gsi, gimple_seq seq)
2876 {
2877 gimple stmt = gsi_stmt (*gsi);
2878
2879 if (gimple_has_location (stmt))
2880 annotate_all_with_location (seq, gimple_location (stmt));
2881 gsi_insert_seq_before (gsi, seq, GSI_SAME_STMT);
2882 }
2883
2884 /* Similarly, but insert after GSI. Handles insertion onto edges as well. */
2885
2886 void
2887 sra_insert_after (gimple_stmt_iterator *gsi, gimple_seq seq)
2888 {
2889 gimple stmt = gsi_stmt (*gsi);
2890
2891 if (gimple_has_location (stmt))
2892 annotate_all_with_location (seq, gimple_location (stmt));
2893
2894 if (stmt_ends_bb_p (stmt))
2895 insert_edge_copies_seq (seq, gsi_bb (*gsi));
2896 else
2897 gsi_insert_seq_after (gsi, seq, GSI_SAME_STMT);
2898 }
2899
2900 /* Similarly, but replace the statement at GSI. */
2901
2902 static void
2903 sra_replace (gimple_stmt_iterator *gsi, gimple_seq seq)
2904 {
2905 sra_insert_before (gsi, seq);
2906 gsi_remove (gsi, false);
2907 if (gsi_end_p (*gsi))
2908 *gsi = gsi_last (gsi_seq (*gsi));
2909 else
2910 gsi_prev (gsi);
2911 }
2912
2913 /* Data structure that bitfield_overlaps_p fills in with information
2914 about the element passed in and how much of it overlaps with the
2915 bit-range passed it to. */
2916
2917 struct bitfield_overlap_info
2918 {
2919 /* The bit-length of an element. */
2920 tree field_len;
2921
2922 /* The bit-position of the element in its parent. */
2923 tree field_pos;
2924
2925 /* The number of bits of the element that overlap with the incoming
2926 bit range. */
2927 tree overlap_len;
2928
2929 /* The first bit of the element that overlaps with the incoming bit
2930 range. */
2931 tree overlap_pos;
2932 };
2933
2934 /* Return true if a BIT_FIELD_REF<(FLD->parent), BLEN, BPOS>
2935 expression (referenced as BF below) accesses any of the bits in FLD,
2936 false if it doesn't. If DATA is non-null, its field_len and
2937 field_pos are filled in such that BIT_FIELD_REF<(FLD->parent),
2938 field_len, field_pos> (referenced as BFLD below) represents the
2939 entire field FLD->element, and BIT_FIELD_REF<BFLD, overlap_len,
2940 overlap_pos> represents the portion of the entire field that
2941 overlaps with BF. */
2942
2943 static bool
2944 bitfield_overlaps_p (tree blen, tree bpos, struct sra_elt *fld,
2945 struct bitfield_overlap_info *data)
2946 {
2947 tree flen, fpos;
2948 bool ret;
2949
2950 if (TREE_CODE (fld->element) == FIELD_DECL)
2951 {
2952 flen = fold_convert (bitsizetype, DECL_SIZE (fld->element));
2953 fpos = fold_convert (bitsizetype, DECL_FIELD_OFFSET (fld->element));
2954 fpos = size_binop (MULT_EXPR, fpos, bitsize_int (BITS_PER_UNIT));
2955 fpos = size_binop (PLUS_EXPR, fpos, DECL_FIELD_BIT_OFFSET (fld->element));
2956 }
2957 else if (TREE_CODE (fld->element) == BIT_FIELD_REF)
2958 {
2959 flen = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 1));
2960 fpos = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 2));
2961 }
2962 else if (TREE_CODE (fld->element) == INTEGER_CST)
2963 {
2964 tree domain_type = TYPE_DOMAIN (TREE_TYPE (fld->parent->element));
2965 flen = fold_convert (bitsizetype, TYPE_SIZE (fld->type));
2966 fpos = fold_convert (bitsizetype, fld->element);
2967 if (domain_type && TYPE_MIN_VALUE (domain_type))
2968 fpos = size_binop (MINUS_EXPR, fpos,
2969 fold_convert (bitsizetype,
2970 TYPE_MIN_VALUE (domain_type)));
2971 fpos = size_binop (MULT_EXPR, flen, fpos);
2972 }
2973 else
2974 gcc_unreachable ();
2975
2976 gcc_assert (host_integerp (blen, 1)
2977 && host_integerp (bpos, 1)
2978 && host_integerp (flen, 1)
2979 && host_integerp (fpos, 1));
2980
2981 ret = ((!tree_int_cst_lt (fpos, bpos)
2982 && tree_int_cst_lt (size_binop (MINUS_EXPR, fpos, bpos),
2983 blen))
2984 || (!tree_int_cst_lt (bpos, fpos)
2985 && tree_int_cst_lt (size_binop (MINUS_EXPR, bpos, fpos),
2986 flen)));
2987
2988 if (!ret)
2989 return ret;
2990
2991 if (data)
2992 {
2993 tree bend, fend;
2994
2995 data->field_len = flen;
2996 data->field_pos = fpos;
2997
2998 fend = size_binop (PLUS_EXPR, fpos, flen);
2999 bend = size_binop (PLUS_EXPR, bpos, blen);
3000
3001 if (tree_int_cst_lt (bend, fend))
3002 data->overlap_len = size_binop (MINUS_EXPR, bend, fpos);
3003 else
3004 data->overlap_len = NULL;
3005
3006 if (tree_int_cst_lt (fpos, bpos))
3007 {
3008 data->overlap_pos = size_binop (MINUS_EXPR, bpos, fpos);
3009 data->overlap_len = size_binop (MINUS_EXPR,
3010 data->overlap_len
3011 ? data->overlap_len
3012 : data->field_len,
3013 data->overlap_pos);
3014 }
3015 else
3016 data->overlap_pos = NULL;
3017 }
3018
3019 return ret;
3020 }
3021
3022 /* Add to LISTP a sequence of statements that copies BLEN bits between
3023 VAR and the scalarized elements of ELT, starting a bit VPOS of VAR
3024 and at bit BPOS of ELT. The direction of the copy is given by
3025 TO_VAR. */
3026
3027 static void
3028 sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
3029 gimple_seq *seq_p, tree blen, tree bpos,
3030 struct sra_elt *elt)
3031 {
3032 struct sra_elt *fld;
3033 struct bitfield_overlap_info flp;
3034
3035 FOR_EACH_ACTUAL_CHILD (fld, elt)
3036 {
3037 tree flen, fpos;
3038
3039 if (!bitfield_overlaps_p (blen, bpos, fld, &flp))
3040 continue;
3041
3042 flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
3043 fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
3044
3045 if (fld->replacement)
3046 {
3047 tree infld, invar, type;
3048 gimple_seq st;
3049
3050 infld = fld->replacement;
3051
3052 type = TREE_TYPE (infld);
3053 if (TYPE_PRECISION (type) != TREE_INT_CST_LOW (flen))
3054 type = lang_hooks.types.type_for_size (TREE_INT_CST_LOW (flen), 1);
3055 else
3056 type = unsigned_type_for (type);
3057
3058 if (TREE_CODE (infld) == BIT_FIELD_REF)
3059 {
3060 fpos = size_binop (PLUS_EXPR, fpos, TREE_OPERAND (infld, 2));
3061 infld = TREE_OPERAND (infld, 0);
3062 }
3063 else if (BYTES_BIG_ENDIAN && DECL_P (fld->element)
3064 && !tree_int_cst_equal (TYPE_SIZE (TREE_TYPE (infld)),
3065 DECL_SIZE (fld->element)))
3066 {
3067 fpos = size_binop (PLUS_EXPR, fpos,
3068 TYPE_SIZE (TREE_TYPE (infld)));
3069 fpos = size_binop (MINUS_EXPR, fpos,
3070 DECL_SIZE (fld->element));
3071 }
3072
3073 infld = fold_build3 (BIT_FIELD_REF, type, infld, flen, fpos);
3074
3075 invar = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3076 if (flp.overlap_pos)
3077 invar = size_binop (PLUS_EXPR, invar, flp.overlap_pos);
3078 invar = size_binop (PLUS_EXPR, invar, vpos);
3079
3080 invar = fold_build3 (BIT_FIELD_REF, type, var, flen, invar);
3081
3082 if (to_var)
3083 st = sra_build_bf_assignment (invar, infld);
3084 else
3085 st = sra_build_bf_assignment (infld, invar);
3086
3087 gimple_seq_add_seq (seq_p, st);
3088 }
3089 else
3090 {
3091 tree sub = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3092 sub = size_binop (PLUS_EXPR, vpos, sub);
3093 if (flp.overlap_pos)
3094 sub = size_binop (PLUS_EXPR, sub, flp.overlap_pos);
3095
3096 sra_explode_bitfield_assignment (var, sub, to_var, seq_p,
3097 flen, fpos, fld);
3098 }
3099 }
3100 }
3101
3102 /* Add to LISTBEFOREP statements that copy scalarized members of ELT
3103 that overlap with BIT_FIELD_REF<(ELT->element), BLEN, BPOS> back
3104 into the full variable, and to LISTAFTERP, if non-NULL, statements
3105 that copy the (presumably modified) overlapping portions of the
3106 full variable back to the scalarized variables. */
3107
3108 static void
3109 sra_sync_for_bitfield_assignment (gimple_seq *seq_before_p,
3110 gimple_seq *seq_after_p,
3111 tree blen, tree bpos,
3112 struct sra_elt *elt)
3113 {
3114 struct sra_elt *fld;
3115 struct bitfield_overlap_info flp;
3116
3117 FOR_EACH_ACTUAL_CHILD (fld, elt)
3118 if (bitfield_overlaps_p (blen, bpos, fld, &flp))
3119 {
3120 if (fld->replacement || (!flp.overlap_len && !flp.overlap_pos))
3121 {
3122 generate_copy_inout (fld, false, generate_element_ref (fld),
3123 seq_before_p);
3124 mark_no_warning (fld);
3125 if (seq_after_p)
3126 generate_copy_inout (fld, true, generate_element_ref (fld),
3127 seq_after_p);
3128 }
3129 else
3130 {
3131 tree flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
3132 tree fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
3133
3134 sra_sync_for_bitfield_assignment (seq_before_p, seq_after_p,
3135 flen, fpos, fld);
3136 }
3137 }
3138 }
3139
3140 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
3141 if elt is scalar, or some occurrence of ELT that requires a complete
3142 aggregate. IS_OUTPUT is true if ELT is being modified. */
3143
3144 static void
3145 scalarize_use (struct sra_elt *elt, tree *expr_p, gimple_stmt_iterator *gsi,
3146 bool is_output, bool use_all)
3147 {
3148 gimple stmt = gsi_stmt (*gsi);
3149 tree bfexpr;
3150
3151 if (elt->replacement)
3152 {
3153 tree replacement = elt->replacement;
3154
3155 /* If we have a replacement, then updating the reference is as
3156 simple as modifying the existing statement in place. */
3157 if (is_output
3158 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3159 && is_gimple_reg (TREE_OPERAND (elt->replacement, 0))
3160 && is_gimple_assign (stmt)
3161 && gimple_assign_lhs_ptr (stmt) == expr_p)
3162 {
3163 gimple_seq newseq;
3164 /* RHS must be a single operand. */
3165 gcc_assert (gimple_assign_single_p (stmt));
3166 newseq = sra_build_elt_assignment (elt, gimple_assign_rhs1 (stmt));
3167 sra_replace (gsi, newseq);
3168 return;
3169 }
3170 else if (!is_output
3171 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3172 && is_gimple_assign (stmt)
3173 && gimple_assign_rhs1_ptr (stmt) == expr_p)
3174 {
3175 tree tmp = make_rename_temp
3176 (TREE_TYPE (gimple_assign_lhs (stmt)), "SR");
3177 gimple_seq newseq = sra_build_assignment (tmp, REPLDUP (elt->replacement));
3178
3179 sra_insert_before (gsi, newseq);
3180 replacement = tmp;
3181 }
3182 if (is_output)
3183 mark_all_v_defs_stmt (stmt);
3184 *expr_p = REPLDUP (replacement);
3185 update_stmt (stmt);
3186 }
3187 else if (use_all && is_output
3188 && is_gimple_assign (stmt)
3189 && TREE_CODE (bfexpr
3190 = gimple_assign_lhs (stmt)) == BIT_FIELD_REF
3191 && &TREE_OPERAND (bfexpr, 0) == expr_p
3192 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3193 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3194 {
3195 gimple_seq seq_before = NULL;
3196 gimple_seq seq_after = NULL;
3197 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3198 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3199 bool update = false;
3200
3201 if (!elt->use_block_copy)
3202 {
3203 tree type = TREE_TYPE (bfexpr);
3204 tree var = make_rename_temp (type, "SR"), tmp, vpos;
3205 gimple st;
3206
3207 gimple_assign_set_lhs (stmt, var);
3208 update = true;
3209
3210 if (!TYPE_UNSIGNED (type))
3211 {
3212 type = unsigned_type_for (type);
3213 tmp = make_rename_temp (type, "SR");
3214 st = gimple_build_assign (tmp, fold_convert (type, var));
3215 gimple_seq_add_stmt (&seq_after, st);
3216 var = tmp;
3217 }
3218
3219 /* If VAR is wider than BLEN bits, it is padded at the
3220 most-significant end. We want to set VPOS such that
3221 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3222 least-significant BLEN bits of VAR. */
3223 if (BYTES_BIG_ENDIAN)
3224 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3225 else
3226 vpos = bitsize_int (0);
3227 sra_explode_bitfield_assignment
3228 (var, vpos, false, &seq_after, blen, bpos, elt);
3229 }
3230 else
3231 sra_sync_for_bitfield_assignment
3232 (&seq_before, &seq_after, blen, bpos, elt);
3233
3234 if (seq_before)
3235 {
3236 mark_all_v_defs_seq (seq_before);
3237 sra_insert_before (gsi, seq_before);
3238 }
3239 if (seq_after)
3240 {
3241 mark_all_v_defs_seq (seq_after);
3242 sra_insert_after (gsi, seq_after);
3243 }
3244
3245 if (update)
3246 update_stmt (stmt);
3247 }
3248 else if (use_all && !is_output
3249 && is_gimple_assign (stmt)
3250 && TREE_CODE (bfexpr
3251 = gimple_assign_rhs1 (stmt)) == BIT_FIELD_REF
3252 && &TREE_OPERAND (gimple_assign_rhs1 (stmt), 0) == expr_p
3253 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3254 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3255 {
3256 gimple_seq seq = NULL;
3257 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3258 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3259 bool update = false;
3260
3261 if (!elt->use_block_copy)
3262 {
3263 tree type = TREE_TYPE (bfexpr);
3264 tree var, vpos;
3265
3266 if (!TYPE_UNSIGNED (type))
3267 type = unsigned_type_for (type);
3268
3269 var = make_rename_temp (type, "SR");
3270
3271 gimple_seq_add_stmt (&seq,
3272 gimple_build_assign
3273 (var, build_int_cst_wide (type, 0, 0)));
3274
3275 /* If VAR is wider than BLEN bits, it is padded at the
3276 most-significant end. We want to set VPOS such that
3277 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3278 least-significant BLEN bits of VAR. */
3279 if (BYTES_BIG_ENDIAN)
3280 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3281 else
3282 vpos = bitsize_int (0);
3283 sra_explode_bitfield_assignment
3284 (var, vpos, true, &seq, blen, bpos, elt);
3285
3286 gimple_assign_set_rhs1 (stmt, var);
3287 update = true;
3288 }
3289 else
3290 sra_sync_for_bitfield_assignment
3291 (&seq, NULL, blen, bpos, elt);
3292
3293 if (seq)
3294 {
3295 mark_all_v_defs_seq (seq);
3296 sra_insert_before (gsi, seq);
3297 }
3298
3299 if (update)
3300 update_stmt (stmt);
3301 }
3302 else
3303 {
3304 gimple_seq seq = NULL;
3305
3306 /* Otherwise we need some copies. If ELT is being read, then we
3307 want to store all (modified) sub-elements back into the
3308 structure before the reference takes place. If ELT is being
3309 written, then we want to load the changed values back into
3310 our shadow variables. */
3311 /* ??? We don't check modified for reads, we just always write all of
3312 the values. We should be able to record the SSA number of the VOP
3313 for which the values were last read. If that number matches the
3314 SSA number of the VOP in the current statement, then we needn't
3315 emit an assignment. This would also eliminate double writes when
3316 a structure is passed as more than one argument to a function call.
3317 This optimization would be most effective if sra_walk_function
3318 processed the blocks in dominator order. */
3319
3320 generate_copy_inout (elt, is_output, generate_element_ref (elt), &seq);
3321 if (seq == NULL)
3322 return;
3323 mark_all_v_defs_seq (seq);
3324 if (is_output)
3325 sra_insert_after (gsi, seq);
3326 else
3327 {
3328 sra_insert_before (gsi, seq);
3329 if (use_all)
3330 mark_no_warning (elt);
3331 }
3332 }
3333 }
3334
3335 /* Scalarize a COPY. To recap, this is an assignment statement between
3336 two scalarizable references, LHS_ELT and RHS_ELT. */
3337
3338 static void
3339 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
3340 gimple_stmt_iterator *gsi)
3341 {
3342 gimple_seq seq;
3343 gimple stmt;
3344
3345 if (lhs_elt->replacement && rhs_elt->replacement)
3346 {
3347 /* If we have two scalar operands, modify the existing statement. */
3348 stmt = gsi_stmt (*gsi);
3349
3350 /* See the commentary in sra_walk_function concerning
3351 RETURN_EXPR, and why we should never see one here. */
3352 gcc_assert (is_gimple_assign (stmt));
3353 gcc_assert (gimple_assign_copy_p (stmt));
3354
3355
3356 gimple_assign_set_lhs (stmt, lhs_elt->replacement);
3357 gimple_assign_set_rhs1 (stmt, REPLDUP (rhs_elt->replacement));
3358 update_stmt (stmt);
3359 }
3360 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
3361 {
3362 /* If either side requires a block copy, then sync the RHS back
3363 to the original structure, leave the original assignment
3364 statement (which will perform the block copy), then load the
3365 LHS values out of its now-updated original structure. */
3366 /* ??? Could perform a modified pair-wise element copy. That
3367 would at least allow those elements that are instantiated in
3368 both structures to be optimized well. */
3369
3370 seq = NULL;
3371 generate_copy_inout (rhs_elt, false,
3372 generate_element_ref (rhs_elt), &seq);
3373 if (seq)
3374 {
3375 mark_all_v_defs_seq (seq);
3376 sra_insert_before (gsi, seq);
3377 }
3378
3379 seq = NULL;
3380 generate_copy_inout (lhs_elt, true,
3381 generate_element_ref (lhs_elt), &seq);
3382 if (seq)
3383 {
3384 mark_all_v_defs_seq (seq);
3385 sra_insert_after (gsi, seq);
3386 }
3387 }
3388 else
3389 {
3390 /* Otherwise both sides must be fully instantiated. In which
3391 case perform pair-wise element assignments and replace the
3392 original block copy statement. */
3393
3394 stmt = gsi_stmt (*gsi);
3395 mark_all_v_defs_stmt (stmt);
3396
3397 seq = NULL;
3398 generate_element_copy (lhs_elt, rhs_elt, &seq);
3399 gcc_assert (seq);
3400 mark_all_v_defs_seq (seq);
3401 sra_replace (gsi, seq);
3402 }
3403 }
3404
3405 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
3406 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
3407 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
3408 CONSTRUCTOR. */
3409
3410 static void
3411 scalarize_init (struct sra_elt *lhs_elt, tree rhs, gimple_stmt_iterator *gsi)
3412 {
3413 bool result = true;
3414 gimple_seq seq = NULL, init_seq = NULL;
3415
3416 /* Generate initialization statements for all members extant in the RHS. */
3417 if (rhs)
3418 {
3419 /* Unshare the expression just in case this is from a decl's initial. */
3420 rhs = unshare_expr (rhs);
3421 result = generate_element_init (lhs_elt, rhs, &init_seq);
3422 }
3423
3424 if (!result)
3425 {
3426 /* If we failed to convert the entire initializer, then we must
3427 leave the structure assignment in place and must load values
3428 from the structure into the slots for which we did not find
3429 constants. The easiest way to do this is to generate a complete
3430 copy-out, and then follow that with the constant assignments
3431 that we were able to build. DCE will clean things up. */
3432 gimple_seq seq0 = NULL;
3433 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
3434 &seq0);
3435 gimple_seq_add_seq (&seq0, seq);
3436 seq = seq0;
3437 }
3438 else
3439 {
3440 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
3441 a zero value. Initialize the rest of the instantiated elements. */
3442 generate_element_zero (lhs_elt, &seq);
3443 gimple_seq_add_seq (&seq, init_seq);
3444 }
3445
3446 if (lhs_elt->use_block_copy || !result)
3447 {
3448 /* Since LHS is not fully instantiated, we must leave the structure
3449 assignment in place. Treating this case differently from a USE
3450 exposes constants to later optimizations. */
3451 if (seq)
3452 {
3453 mark_all_v_defs_seq (seq);
3454 sra_insert_after (gsi, seq);
3455 }
3456 }
3457 else
3458 {
3459 /* The LHS is fully instantiated. The list of initializations
3460 replaces the original structure assignment. */
3461 gcc_assert (seq);
3462 mark_all_v_defs_stmt (gsi_stmt (*gsi));
3463 mark_all_v_defs_seq (seq);
3464 sra_replace (gsi, seq);
3465 }
3466 }
3467
3468 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
3469 on all INDIRECT_REFs. */
3470
3471 static tree
3472 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3473 {
3474 tree t = *tp;
3475
3476 if (TREE_CODE (t) == INDIRECT_REF)
3477 {
3478 TREE_THIS_NOTRAP (t) = 1;
3479 *walk_subtrees = 0;
3480 }
3481 else if (IS_TYPE_OR_DECL_P (t))
3482 *walk_subtrees = 0;
3483
3484 return NULL;
3485 }
3486
3487 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
3488 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
3489 if ELT is on the left-hand side. */
3490
3491 static void
3492 scalarize_ldst (struct sra_elt *elt, tree other,
3493 gimple_stmt_iterator *gsi, bool is_output)
3494 {
3495 /* Shouldn't have gotten called for a scalar. */
3496 gcc_assert (!elt->replacement);
3497
3498 if (elt->use_block_copy)
3499 {
3500 /* Since ELT is not fully instantiated, we have to leave the
3501 block copy in place. Treat this as a USE. */
3502 scalarize_use (elt, NULL, gsi, is_output, false);
3503 }
3504 else
3505 {
3506 /* The interesting case is when ELT is fully instantiated. In this
3507 case we can have each element stored/loaded directly to/from the
3508 corresponding slot in OTHER. This avoids a block copy. */
3509
3510 gimple_seq seq = NULL;
3511 gimple stmt = gsi_stmt (*gsi);
3512
3513 mark_all_v_defs_stmt (stmt);
3514 generate_copy_inout (elt, is_output, other, &seq);
3515 gcc_assert (seq);
3516 mark_all_v_defs_seq (seq);
3517
3518 /* Preserve EH semantics. */
3519 if (stmt_ends_bb_p (stmt))
3520 {
3521 gimple_stmt_iterator si;
3522 gimple first;
3523 gimple_seq blist = NULL;
3524 bool thr = stmt_could_throw_p (stmt);
3525
3526 /* If the last statement of this BB created an EH edge
3527 before scalarization, we have to locate the first
3528 statement that can throw in the new statement list and
3529 use that as the last statement of this BB, such that EH
3530 semantics is preserved. All statements up to this one
3531 are added to the same BB. All other statements in the
3532 list will be added to normal outgoing edges of the same
3533 BB. If they access any memory, it's the same memory, so
3534 we can assume they won't throw. */
3535 si = gsi_start (seq);
3536 for (first = gsi_stmt (si);
3537 thr && !gsi_end_p (si) && !stmt_could_throw_p (first);
3538 first = gsi_stmt (si))
3539 {
3540 gsi_remove (&si, false);
3541 gimple_seq_add_stmt (&blist, first);
3542 }
3543
3544 /* Extract the first remaining statement from LIST, this is
3545 the EH statement if there is one. */
3546 gsi_remove (&si, false);
3547
3548 if (blist)
3549 sra_insert_before (gsi, blist);
3550
3551 /* Replace the old statement with this new representative. */
3552 gsi_replace (gsi, first, true);
3553
3554 if (!gsi_end_p (si))
3555 {
3556 /* If any reference would trap, then they all would. And more
3557 to the point, the first would. Therefore none of the rest
3558 will trap since the first didn't. Indicate this by
3559 iterating over the remaining statements and set
3560 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
3561 do
3562 {
3563 walk_gimple_stmt (&si, NULL, mark_notrap, NULL);
3564 gsi_next (&si);
3565 }
3566 while (!gsi_end_p (si));
3567
3568 insert_edge_copies_seq (seq, gsi_bb (*gsi));
3569 }
3570 }
3571 else
3572 sra_replace (gsi, seq);
3573 }
3574 }
3575
3576 /* Generate initializations for all scalarizable parameters. */
3577
3578 static void
3579 scalarize_parms (void)
3580 {
3581 gimple_seq seq = NULL;
3582 unsigned i;
3583 bitmap_iterator bi;
3584
3585 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
3586 {
3587 tree var = referenced_var (i);
3588 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
3589 generate_copy_inout (elt, true, var, &seq);
3590 }
3591
3592 if (seq)
3593 {
3594 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
3595 mark_all_v_defs_seq (seq);
3596 }
3597 }
3598
3599 /* Entry point to phase 4. Update the function to match replacements. */
3600
3601 static void
3602 scalarize_function (void)
3603 {
3604 static const struct sra_walk_fns fns = {
3605 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
3606 };
3607
3608 sra_walk_function (&fns);
3609 scalarize_parms ();
3610 gsi_commit_edge_inserts ();
3611 }
3612
3613 \f
3614 /* Debug helper function. Print ELT in a nice human-readable format. */
3615
3616 static void
3617 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
3618 {
3619 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
3620 {
3621 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
3622 dump_sra_elt_name (f, elt->parent);
3623 }
3624 else
3625 {
3626 if (elt->parent)
3627 dump_sra_elt_name (f, elt->parent);
3628 if (DECL_P (elt->element))
3629 {
3630 if (TREE_CODE (elt->element) == FIELD_DECL)
3631 fputc ('.', f);
3632 print_generic_expr (f, elt->element, dump_flags);
3633 }
3634 else if (TREE_CODE (elt->element) == BIT_FIELD_REF)
3635 fprintf (f, "$B" HOST_WIDE_INT_PRINT_DEC "F" HOST_WIDE_INT_PRINT_DEC,
3636 tree_low_cst (TREE_OPERAND (elt->element, 2), 1),
3637 tree_low_cst (TREE_OPERAND (elt->element, 1), 1));
3638 else if (TREE_CODE (elt->element) == RANGE_EXPR)
3639 fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
3640 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
3641 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
3642 else
3643 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
3644 TREE_INT_CST_LOW (elt->element));
3645 }
3646 }
3647
3648 /* Likewise, but callable from the debugger. */
3649
3650 void
3651 debug_sra_elt_name (struct sra_elt *elt)
3652 {
3653 dump_sra_elt_name (stderr, elt);
3654 fputc ('\n', stderr);
3655 }
3656
3657 void
3658 sra_init_cache (void)
3659 {
3660 if (sra_type_decomp_cache)
3661 return;
3662
3663 sra_type_decomp_cache = BITMAP_ALLOC (NULL);
3664 sra_type_inst_cache = BITMAP_ALLOC (NULL);
3665 }
3666
3667
3668 /* Main entry point. */
3669
3670 static unsigned int
3671 tree_sra (void)
3672 {
3673 /* Initialize local variables. */
3674 todoflags = 0;
3675 gcc_obstack_init (&sra_obstack);
3676 sra_candidates = BITMAP_ALLOC (NULL);
3677 needs_copy_in = BITMAP_ALLOC (NULL);
3678 sra_init_cache ();
3679 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
3680
3681 /* Scan. If we find anything, instantiate and scalarize. */
3682 if (find_candidates_for_sra ())
3683 {
3684 scan_function ();
3685 decide_instantiations ();
3686 scalarize_function ();
3687 if (!bitmap_empty_p (sra_candidates))
3688 todoflags |= TODO_rebuild_alias;
3689 }
3690
3691 /* Free allocated memory. */
3692 htab_delete (sra_map);
3693 sra_map = NULL;
3694 BITMAP_FREE (sra_candidates);
3695 BITMAP_FREE (needs_copy_in);
3696 BITMAP_FREE (sra_type_decomp_cache);
3697 BITMAP_FREE (sra_type_inst_cache);
3698 obstack_free (&sra_obstack, NULL);
3699 return todoflags;
3700 }
3701
3702 static unsigned int
3703 tree_sra_early (void)
3704 {
3705 unsigned int ret;
3706
3707 early_sra = true;
3708 ret = tree_sra ();
3709 early_sra = false;
3710
3711 return ret & ~TODO_rebuild_alias;
3712 }
3713
3714 static bool
3715 gate_sra (void)
3716 {
3717 return flag_tree_sra != 0;
3718 }
3719
3720 struct gimple_opt_pass pass_sra_early =
3721 {
3722 {
3723 GIMPLE_PASS,
3724 "esra", /* name */
3725 gate_sra, /* gate */
3726 tree_sra_early, /* execute */
3727 NULL, /* sub */
3728 NULL, /* next */
3729 0, /* static_pass_number */
3730 TV_TREE_SRA, /* tv_id */
3731 PROP_cfg | PROP_ssa, /* properties_required */
3732 0, /* properties_provided */
3733 0, /* properties_destroyed */
3734 0, /* todo_flags_start */
3735 TODO_dump_func
3736 | TODO_update_ssa
3737 | TODO_ggc_collect
3738 | TODO_verify_ssa /* todo_flags_finish */
3739 }
3740 };
3741
3742 struct gimple_opt_pass pass_sra =
3743 {
3744 {
3745 GIMPLE_PASS,
3746 "sra", /* name */
3747 gate_sra, /* gate */
3748 tree_sra, /* execute */
3749 NULL, /* sub */
3750 NULL, /* next */
3751 0, /* static_pass_number */
3752 TV_TREE_SRA, /* tv_id */
3753 PROP_cfg | PROP_ssa, /* properties_required */
3754 0, /* properties_provided */
3755 0, /* properties_destroyed */
3756 0, /* todo_flags_start */
3757 TODO_dump_func
3758 | TODO_update_ssa
3759 | TODO_ggc_collect
3760 | TODO_verify_ssa /* todo_flags_finish */
3761 }
3762 };