1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt
;
60 static rtx
expand_debug_expr (tree
);
62 /* Return an expression tree corresponding to the RHS of GIMPLE
66 gimple_assign_rhs_to_tree (gimple stmt
)
69 enum gimple_rhs_class grhs_class
;
71 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
73 if (grhs_class
== GIMPLE_TERNARY_RHS
)
74 t
= build3 (gimple_assign_rhs_code (stmt
),
75 TREE_TYPE (gimple_assign_lhs (stmt
)),
76 gimple_assign_rhs1 (stmt
),
77 gimple_assign_rhs2 (stmt
),
78 gimple_assign_rhs3 (stmt
));
79 else if (grhs_class
== GIMPLE_BINARY_RHS
)
80 t
= build2 (gimple_assign_rhs_code (stmt
),
81 TREE_TYPE (gimple_assign_lhs (stmt
)),
82 gimple_assign_rhs1 (stmt
),
83 gimple_assign_rhs2 (stmt
));
84 else if (grhs_class
== GIMPLE_UNARY_RHS
)
85 t
= build1 (gimple_assign_rhs_code (stmt
),
86 TREE_TYPE (gimple_assign_lhs (stmt
)),
87 gimple_assign_rhs1 (stmt
));
88 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
90 t
= gimple_assign_rhs1 (stmt
);
91 /* Avoid modifying this tree in place below. */
92 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
93 && gimple_location (stmt
) != EXPR_LOCATION (t
))
94 || (gimple_block (stmt
)
95 && currently_expanding_to_rtl
97 && gimple_block (stmt
) != TREE_BLOCK (t
)))
103 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
104 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
105 if (gimple_block (stmt
) && currently_expanding_to_rtl
&& EXPR_P (t
))
106 TREE_BLOCK (t
) = gimple_block (stmt
);
112 #ifndef STACK_ALIGNMENT_NEEDED
113 #define STACK_ALIGNMENT_NEEDED 1
116 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
118 /* Associate declaration T with storage space X. If T is no
119 SSA name this is exactly SET_DECL_RTL, otherwise make the
120 partition of T associated with X. */
122 set_rtl (tree t
, rtx x
)
124 if (TREE_CODE (t
) == SSA_NAME
)
126 SA
.partition_to_pseudo
[var_to_partition (SA
.map
, t
)] = x
;
128 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t
), x
);
129 /* For the benefit of debug information at -O0 (where vartracking
130 doesn't run) record the place also in the base DECL if it's
131 a normal variable (not a parameter). */
132 if (x
&& x
!= pc_rtx
&& TREE_CODE (SSA_NAME_VAR (t
)) == VAR_DECL
)
134 tree var
= SSA_NAME_VAR (t
);
135 /* If we don't yet have something recorded, just record it now. */
136 if (!DECL_RTL_SET_P (var
))
137 SET_DECL_RTL (var
, x
);
138 /* If we have it set already to "multiple places" don't
140 else if (DECL_RTL (var
) == pc_rtx
)
142 /* If we have something recorded and it's not the same place
143 as we want to record now, we have multiple partitions for the
144 same base variable, with different places. We can't just
145 randomly chose one, hence we have to say that we don't know.
146 This only happens with optimization, and there var-tracking
147 will figure out the right thing. */
148 else if (DECL_RTL (var
) != x
)
149 SET_DECL_RTL (var
, pc_rtx
);
156 /* This structure holds data relevant to one variable that will be
157 placed in a stack slot. */
163 /* Initially, the size of the variable. Later, the size of the partition,
164 if this variable becomes it's partition's representative. */
167 /* The *byte* alignment required for this variable. Or as, with the
168 size, the alignment for this partition. */
171 /* The partition representative. */
172 size_t representative
;
174 /* The next stack variable in the partition, or EOC. */
177 /* The numbers of conflicting stack variables. */
181 #define EOC ((size_t)-1)
183 /* We have an array of such objects while deciding allocation. */
184 static struct stack_var
*stack_vars
;
185 static size_t stack_vars_alloc
;
186 static size_t stack_vars_num
;
187 static struct pointer_map_t
*decl_to_stack_part
;
189 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190 is non-decreasing. */
191 static size_t *stack_vars_sorted
;
193 /* The phase of the stack frame. This is the known misalignment of
194 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
195 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
196 static int frame_phase
;
198 /* Used during expand_used_vars to remember if we saw any decls for
199 which we'd like to enable stack smashing protection. */
200 static bool has_protected_decls
;
202 /* Used during expand_used_vars. Remember if we say a character buffer
203 smaller than our cutoff threshold. Used for -Wstack-protector. */
204 static bool has_short_buffer
;
206 /* Compute the byte alignment to use for DECL. Ignore alignment
207 we can't do with expected alignment of the stack boundary. */
210 align_local_variable (tree decl
)
212 unsigned int align
= LOCAL_DECL_ALIGNMENT (decl
);
213 DECL_ALIGN (decl
) = align
;
214 return align
/ BITS_PER_UNIT
;
217 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218 Return the frame offset. */
221 alloc_stack_frame_space (HOST_WIDE_INT size
, unsigned HOST_WIDE_INT align
)
223 HOST_WIDE_INT offset
, new_frame_offset
;
225 new_frame_offset
= frame_offset
;
226 if (FRAME_GROWS_DOWNWARD
)
228 new_frame_offset
-= size
+ frame_phase
;
229 new_frame_offset
&= -align
;
230 new_frame_offset
+= frame_phase
;
231 offset
= new_frame_offset
;
235 new_frame_offset
-= frame_phase
;
236 new_frame_offset
+= align
- 1;
237 new_frame_offset
&= -align
;
238 new_frame_offset
+= frame_phase
;
239 offset
= new_frame_offset
;
240 new_frame_offset
+= size
;
242 frame_offset
= new_frame_offset
;
244 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
245 frame_offset
= offset
= 0;
250 /* Accumulate DECL into STACK_VARS. */
253 add_stack_var (tree decl
)
257 if (stack_vars_num
>= stack_vars_alloc
)
259 if (stack_vars_alloc
)
260 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
262 stack_vars_alloc
= 32;
264 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
266 if (!decl_to_stack_part
)
267 decl_to_stack_part
= pointer_map_create ();
269 v
= &stack_vars
[stack_vars_num
];
270 * (size_t *)pointer_map_insert (decl_to_stack_part
, decl
) = stack_vars_num
;
273 v
->size
= tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl
)), 1);
274 /* Ensure that all variables have size, so that &a != &b for any two
275 variables that are simultaneously live. */
278 v
->alignb
= align_local_variable (SSAVAR (decl
));
279 /* An alignment of zero can mightily confuse us later. */
280 gcc_assert (v
->alignb
!= 0);
282 /* All variables are initially in their own partition. */
283 v
->representative
= stack_vars_num
;
286 /* All variables initially conflict with no other. */
289 /* Ensure that this decl doesn't get put onto the list twice. */
290 set_rtl (decl
, pc_rtx
);
295 /* Make the decls associated with luid's X and Y conflict. */
298 add_stack_var_conflict (size_t x
, size_t y
)
300 struct stack_var
*a
= &stack_vars
[x
];
301 struct stack_var
*b
= &stack_vars
[y
];
303 a
->conflicts
= BITMAP_ALLOC (NULL
);
305 b
->conflicts
= BITMAP_ALLOC (NULL
);
306 bitmap_set_bit (a
->conflicts
, y
);
307 bitmap_set_bit (b
->conflicts
, x
);
310 /* Check whether the decls associated with luid's X and Y conflict. */
313 stack_var_conflict_p (size_t x
, size_t y
)
315 struct stack_var
*a
= &stack_vars
[x
];
316 struct stack_var
*b
= &stack_vars
[y
];
319 /* Partitions containing an SSA name result from gimple registers
320 with things like unsupported modes. They are top-level and
321 hence conflict with everything else. */
322 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
325 if (!a
->conflicts
|| !b
->conflicts
)
327 return bitmap_bit_p (a
->conflicts
, y
);
330 /* Returns true if TYPE is or contains a union type. */
333 aggregate_contains_union_type (tree type
)
337 if (TREE_CODE (type
) == UNION_TYPE
338 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
340 if (TREE_CODE (type
) == ARRAY_TYPE
)
341 return aggregate_contains_union_type (TREE_TYPE (type
));
342 if (TREE_CODE (type
) != RECORD_TYPE
)
345 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
346 if (TREE_CODE (field
) == FIELD_DECL
)
347 if (aggregate_contains_union_type (TREE_TYPE (field
)))
353 /* A subroutine of expand_used_vars. If two variables X and Y have alias
354 sets that do not conflict, then do add a conflict for these variables
355 in the interference graph. We also need to make sure to add conflicts
356 for union containing structures. Else RTL alias analysis comes along
357 and due to type based aliasing rules decides that for two overlapping
358 union temporaries { short s; int i; } accesses to the same mem through
359 different types may not alias and happily reorders stores across
360 life-time boundaries of the temporaries (See PR25654). */
363 add_alias_set_conflicts (void)
365 size_t i
, j
, n
= stack_vars_num
;
367 for (i
= 0; i
< n
; ++i
)
369 tree type_i
= TREE_TYPE (stack_vars
[i
].decl
);
370 bool aggr_i
= AGGREGATE_TYPE_P (type_i
);
373 contains_union
= aggregate_contains_union_type (type_i
);
374 for (j
= 0; j
< i
; ++j
)
376 tree type_j
= TREE_TYPE (stack_vars
[j
].decl
);
377 bool aggr_j
= AGGREGATE_TYPE_P (type_j
);
379 /* Either the objects conflict by means of type based
380 aliasing rules, or we need to add a conflict. */
381 || !objects_must_conflict_p (type_i
, type_j
)
382 /* In case the types do not conflict ensure that access
383 to elements will conflict. In case of unions we have
384 to be careful as type based aliasing rules may say
385 access to the same memory does not conflict. So play
386 safe and add a conflict in this case when
387 -fstrict-aliasing is used. */
388 || (contains_union
&& flag_strict_aliasing
))
389 add_stack_var_conflict (i
, j
);
394 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
395 enter its partition number into bitmap DATA. */
398 visit_op (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
400 bitmap active
= (bitmap
)data
;
401 op
= get_base_address (op
);
404 && DECL_RTL_IF_SET (op
) == pc_rtx
)
406 size_t *v
= (size_t *) pointer_map_contains (decl_to_stack_part
, op
);
408 bitmap_set_bit (active
, *v
);
413 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
414 record conflicts between it and all currently active other partitions
418 visit_conflict (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
420 bitmap active
= (bitmap
)data
;
421 op
= get_base_address (op
);
424 && DECL_RTL_IF_SET (op
) == pc_rtx
)
427 (size_t *) pointer_map_contains (decl_to_stack_part
, op
);
428 if (v
&& bitmap_set_bit (active
, *v
))
433 gcc_assert (num
< stack_vars_num
);
434 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
435 add_stack_var_conflict (num
, i
);
441 /* Helper routine for add_scope_conflicts, calculating the active partitions
442 at the end of BB, leaving the result in WORK. We're called to generate
443 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
447 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
451 gimple_stmt_iterator gsi
;
452 bool (*visit
)(gimple
, tree
, void *);
455 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
456 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
460 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
462 gimple stmt
= gsi_stmt (gsi
);
463 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
465 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
467 gimple stmt
= gsi_stmt (gsi
);
469 if (gimple_clobber_p (stmt
))
471 tree lhs
= gimple_assign_lhs (stmt
);
473 /* Nested function lowering might introduce LHSs
474 that are COMPONENT_REFs. */
475 if (TREE_CODE (lhs
) != VAR_DECL
)
477 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
479 pointer_map_contains (decl_to_stack_part
, lhs
)))
480 bitmap_clear_bit (work
, *v
);
482 else if (!is_gimple_debug (stmt
))
485 && visit
== visit_op
)
487 /* If this is the first real instruction in this BB we need
488 to add conflicts for everything live at this point now.
489 Unlike classical liveness for named objects we can't
490 rely on seeing a def/use of the names we're interested in.
491 There might merely be indirect loads/stores. We'd not add any
492 conflicts for such partitions. */
495 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
499 EXECUTE_IF_SET_IN_BITMAP (work
, i
+ 1, j
, bj
)
500 add_stack_var_conflict (i
, j
);
502 visit
= visit_conflict
;
504 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
509 /* Generate stack partition conflicts between all partitions that are
510 simultaneously live. */
513 add_scope_conflicts (void)
517 bitmap work
= BITMAP_ALLOC (NULL
);
519 /* We approximate the live range of a stack variable by taking the first
520 mention of its name as starting point(s), and by the end-of-scope
521 death clobber added by gimplify as ending point(s) of the range.
522 This overapproximates in the case we for instance moved an address-taken
523 operation upward, without also moving a dereference to it upwards.
524 But it's conservatively correct as a variable never can hold values
525 before its name is mentioned at least once.
527 We then do a mostly classical bitmap liveness algorithm. */
530 bb
->aux
= BITMAP_ALLOC (NULL
);
538 bitmap active
= (bitmap
)bb
->aux
;
539 add_scope_conflicts_1 (bb
, work
, false);
540 if (bitmap_ior_into (active
, work
))
546 add_scope_conflicts_1 (bb
, work
, true);
550 BITMAP_FREE (bb
->aux
);
553 /* A subroutine of partition_stack_vars. A comparison function for qsort,
554 sorting an array of indices by the properties of the object. */
557 stack_var_cmp (const void *a
, const void *b
)
559 size_t ia
= *(const size_t *)a
;
560 size_t ib
= *(const size_t *)b
;
561 unsigned int aligna
= stack_vars
[ia
].alignb
;
562 unsigned int alignb
= stack_vars
[ib
].alignb
;
563 HOST_WIDE_INT sizea
= stack_vars
[ia
].size
;
564 HOST_WIDE_INT sizeb
= stack_vars
[ib
].size
;
565 tree decla
= stack_vars
[ia
].decl
;
566 tree declb
= stack_vars
[ib
].decl
;
568 unsigned int uida
, uidb
;
570 /* Primary compare on "large" alignment. Large comes first. */
571 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
572 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
573 if (largea
!= largeb
)
574 return (int)largeb
- (int)largea
;
576 /* Secondary compare on size, decreasing */
582 /* Tertiary compare on true alignment, decreasing. */
588 /* Final compare on ID for sort stability, increasing.
589 Two SSA names are compared by their version, SSA names come before
590 non-SSA names, and two normal decls are compared by their DECL_UID. */
591 if (TREE_CODE (decla
) == SSA_NAME
)
593 if (TREE_CODE (declb
) == SSA_NAME
)
594 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
598 else if (TREE_CODE (declb
) == SSA_NAME
)
601 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
610 /* If the points-to solution *PI points to variables that are in a partition
611 together with other variables add all partition members to the pointed-to
615 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
616 struct pointer_map_t
*decls_to_partitions
,
617 struct pointer_set_t
*visited
, bitmap temp
)
625 /* The pointed-to vars bitmap is shared, it is enough to
627 || pointer_set_insert(visited
, pt
->vars
))
632 /* By using a temporary bitmap to store all members of the partitions
633 we have to add we make sure to visit each of the partitions only
635 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
637 || !bitmap_bit_p (temp
, i
))
638 && (part
= (bitmap
*) pointer_map_contains (decls_to_partitions
,
639 (void *)(size_t) i
)))
640 bitmap_ior_into (temp
, *part
);
641 if (!bitmap_empty_p (temp
))
642 bitmap_ior_into (pt
->vars
, temp
);
645 /* Update points-to sets based on partition info, so we can use them on RTL.
646 The bitmaps representing stack partitions will be saved until expand,
647 where partitioned decls used as bases in memory expressions will be
651 update_alias_info_with_stack_vars (void)
653 struct pointer_map_t
*decls_to_partitions
= NULL
;
655 tree var
= NULL_TREE
;
657 for (i
= 0; i
< stack_vars_num
; i
++)
661 struct ptr_info_def
*pi
;
663 /* Not interested in partitions with single variable. */
664 if (stack_vars
[i
].representative
!= i
665 || stack_vars
[i
].next
== EOC
)
668 if (!decls_to_partitions
)
670 decls_to_partitions
= pointer_map_create ();
671 cfun
->gimple_df
->decls_to_pointers
= pointer_map_create ();
674 /* Create an SSA_NAME that points to the partition for use
675 as base during alias-oracle queries on RTL for bases that
676 have been partitioned. */
677 if (var
== NULL_TREE
)
678 var
= create_tmp_var (ptr_type_node
, NULL
);
679 name
= make_ssa_name (var
, NULL
);
681 /* Create bitmaps representing partitions. They will be used for
682 points-to sets later, so use GGC alloc. */
683 part
= BITMAP_GGC_ALLOC ();
684 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
686 tree decl
= stack_vars
[j
].decl
;
687 unsigned int uid
= DECL_PT_UID (decl
);
688 /* We should never end up partitioning SSA names (though they
689 may end up on the stack). Neither should we allocate stack
690 space to something that is unused and thus unreferenced, except
691 for -O0 where we are preserving even unreferenced variables. */
692 gcc_assert (DECL_P (decl
)
694 || referenced_var_lookup (cfun
, DECL_UID (decl
))));
695 bitmap_set_bit (part
, uid
);
696 *((bitmap
*) pointer_map_insert (decls_to_partitions
,
697 (void *)(size_t) uid
)) = part
;
698 *((tree
*) pointer_map_insert (cfun
->gimple_df
->decls_to_pointers
,
702 /* Make the SSA name point to all partition members. */
703 pi
= get_ptr_info (name
);
704 pt_solution_set (&pi
->pt
, part
, false);
707 /* Make all points-to sets that contain one member of a partition
708 contain all members of the partition. */
709 if (decls_to_partitions
)
712 struct pointer_set_t
*visited
= pointer_set_create ();
713 bitmap temp
= BITMAP_ALLOC (NULL
);
715 for (i
= 1; i
< num_ssa_names
; i
++)
717 tree name
= ssa_name (i
);
718 struct ptr_info_def
*pi
;
721 && POINTER_TYPE_P (TREE_TYPE (name
))
722 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
723 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
727 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
728 decls_to_partitions
, visited
, temp
);
730 pointer_set_destroy (visited
);
731 pointer_map_destroy (decls_to_partitions
);
736 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
737 partitioning algorithm. Partitions A and B are known to be non-conflicting.
738 Merge them into a single partition A. */
741 union_stack_vars (size_t a
, size_t b
)
743 struct stack_var
*vb
= &stack_vars
[b
];
747 gcc_assert (stack_vars
[b
].next
== EOC
);
748 /* Add B to A's partition. */
749 stack_vars
[b
].next
= stack_vars
[a
].next
;
750 stack_vars
[b
].representative
= a
;
751 stack_vars
[a
].next
= b
;
753 /* Update the required alignment of partition A to account for B. */
754 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
755 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
757 /* Update the interference graph and merge the conflicts. */
760 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
761 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
762 BITMAP_FREE (vb
->conflicts
);
766 /* A subroutine of expand_used_vars. Binpack the variables into
767 partitions constrained by the interference graph. The overall
768 algorithm used is as follows:
770 Sort the objects by size in descending order.
775 Look for the largest non-conflicting object B with size <= S.
782 partition_stack_vars (void)
784 size_t si
, sj
, n
= stack_vars_num
;
786 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
787 for (si
= 0; si
< n
; ++si
)
788 stack_vars_sorted
[si
] = si
;
793 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
795 for (si
= 0; si
< n
; ++si
)
797 size_t i
= stack_vars_sorted
[si
];
798 unsigned int ialign
= stack_vars
[i
].alignb
;
800 /* Ignore objects that aren't partition representatives. If we
801 see a var that is not a partition representative, it must
802 have been merged earlier. */
803 if (stack_vars
[i
].representative
!= i
)
806 for (sj
= si
+ 1; sj
< n
; ++sj
)
808 size_t j
= stack_vars_sorted
[sj
];
809 unsigned int jalign
= stack_vars
[j
].alignb
;
811 /* Ignore objects that aren't partition representatives. */
812 if (stack_vars
[j
].representative
!= j
)
815 /* Ignore conflicting objects. */
816 if (stack_var_conflict_p (i
, j
))
819 /* Do not mix objects of "small" (supported) alignment
820 and "large" (unsupported) alignment. */
821 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
822 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
825 /* UNION the objects, placing J at OFFSET. */
826 union_stack_vars (i
, j
);
830 update_alias_info_with_stack_vars ();
833 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
836 dump_stack_var_partition (void)
838 size_t si
, i
, j
, n
= stack_vars_num
;
840 for (si
= 0; si
< n
; ++si
)
842 i
= stack_vars_sorted
[si
];
844 /* Skip variables that aren't partition representatives, for now. */
845 if (stack_vars
[i
].representative
!= i
)
848 fprintf (dump_file
, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
849 " align %u\n", (unsigned long) i
, stack_vars
[i
].size
,
850 stack_vars
[i
].alignb
);
852 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
854 fputc ('\t', dump_file
);
855 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
857 fputc ('\n', dump_file
);
861 /* Assign rtl to DECL at BASE + OFFSET. */
864 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
865 HOST_WIDE_INT offset
)
870 /* If this fails, we've overflowed the stack frame. Error nicely? */
871 gcc_assert (offset
== trunc_int_for_mode (offset
, Pmode
));
873 x
= plus_constant (base
, offset
);
874 x
= gen_rtx_MEM (DECL_MODE (SSAVAR (decl
)), x
);
876 if (TREE_CODE (decl
) != SSA_NAME
)
878 /* Set alignment we actually gave this decl if it isn't an SSA name.
879 If it is we generate stack slots only accidentally so it isn't as
880 important, we'll simply use the alignment that is already set. */
881 if (base
== virtual_stack_vars_rtx
)
882 offset
-= frame_phase
;
883 align
= offset
& -offset
;
884 align
*= BITS_PER_UNIT
;
885 if (align
== 0 || align
> base_align
)
888 /* One would think that we could assert that we're not decreasing
889 alignment here, but (at least) the i386 port does exactly this
890 via the MINIMUM_ALIGNMENT hook. */
892 DECL_ALIGN (decl
) = align
;
893 DECL_USER_ALIGN (decl
) = 0;
896 set_mem_attributes (x
, SSAVAR (decl
), true);
900 /* A subroutine of expand_used_vars. Give each partition representative
901 a unique location within the stack frame. Update each partition member
902 with that location. */
905 expand_stack_vars (bool (*pred
) (tree
))
907 size_t si
, i
, j
, n
= stack_vars_num
;
908 HOST_WIDE_INT large_size
= 0, large_alloc
= 0;
909 rtx large_base
= NULL
;
910 unsigned large_align
= 0;
913 /* Determine if there are any variables requiring "large" alignment.
914 Since these are dynamically allocated, we only process these if
915 no predicate involved. */
916 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
917 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
919 /* Find the total size of these variables. */
920 for (si
= 0; si
< n
; ++si
)
924 i
= stack_vars_sorted
[si
];
925 alignb
= stack_vars
[i
].alignb
;
927 /* Stop when we get to the first decl with "small" alignment. */
928 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
931 /* Skip variables that aren't partition representatives. */
932 if (stack_vars
[i
].representative
!= i
)
935 /* Skip variables that have already had rtl assigned. See also
936 add_stack_var where we perpetrate this pc_rtx hack. */
937 decl
= stack_vars
[i
].decl
;
938 if ((TREE_CODE (decl
) == SSA_NAME
939 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)]
940 : DECL_RTL (decl
)) != pc_rtx
)
943 large_size
+= alignb
- 1;
944 large_size
&= -(HOST_WIDE_INT
)alignb
;
945 large_size
+= stack_vars
[i
].size
;
948 /* If there were any, allocate space. */
950 large_base
= allocate_dynamic_stack_space (GEN_INT (large_size
), 0,
954 for (si
= 0; si
< n
; ++si
)
957 unsigned base_align
, alignb
;
958 HOST_WIDE_INT offset
;
960 i
= stack_vars_sorted
[si
];
962 /* Skip variables that aren't partition representatives, for now. */
963 if (stack_vars
[i
].representative
!= i
)
966 /* Skip variables that have already had rtl assigned. See also
967 add_stack_var where we perpetrate this pc_rtx hack. */
968 decl
= stack_vars
[i
].decl
;
969 if ((TREE_CODE (decl
) == SSA_NAME
970 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)]
971 : DECL_RTL (decl
)) != pc_rtx
)
974 /* Check the predicate to see whether this variable should be
975 allocated in this pass. */
976 if (pred
&& !pred (decl
))
979 alignb
= stack_vars
[i
].alignb
;
980 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
982 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
983 base
= virtual_stack_vars_rtx
;
984 base_align
= crtl
->max_used_stack_slot_alignment
;
988 /* Large alignment is only processed in the last pass. */
991 gcc_assert (large_base
!= NULL
);
993 large_alloc
+= alignb
- 1;
994 large_alloc
&= -(HOST_WIDE_INT
)alignb
;
995 offset
= large_alloc
;
996 large_alloc
+= stack_vars
[i
].size
;
999 base_align
= large_align
;
1002 /* Create rtl for each variable based on their location within the
1004 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1006 expand_one_stack_var_at (stack_vars
[j
].decl
,
1012 gcc_assert (large_alloc
== large_size
);
1015 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1016 static HOST_WIDE_INT
1017 account_stack_vars (void)
1019 size_t si
, j
, i
, n
= stack_vars_num
;
1020 HOST_WIDE_INT size
= 0;
1022 for (si
= 0; si
< n
; ++si
)
1024 i
= stack_vars_sorted
[si
];
1026 /* Skip variables that aren't partition representatives, for now. */
1027 if (stack_vars
[i
].representative
!= i
)
1030 size
+= stack_vars
[i
].size
;
1031 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1032 set_rtl (stack_vars
[j
].decl
, NULL
);
1037 /* A subroutine of expand_one_var. Called to immediately assign rtl
1038 to a variable to be allocated in the stack frame. */
1041 expand_one_stack_var (tree var
)
1043 HOST_WIDE_INT size
, offset
;
1044 unsigned byte_align
;
1046 size
= tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var
)), 1);
1047 byte_align
= align_local_variable (SSAVAR (var
));
1049 /* We handle highly aligned variables in expand_stack_vars. */
1050 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1052 offset
= alloc_stack_frame_space (size
, byte_align
);
1054 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1055 crtl
->max_used_stack_slot_alignment
, offset
);
1058 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1059 that will reside in a hard register. */
1062 expand_one_hard_reg_var (tree var
)
1064 rest_of_decl_compilation (var
, 0, 0);
1067 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1068 that will reside in a pseudo register. */
1071 expand_one_register_var (tree var
)
1073 tree decl
= SSAVAR (var
);
1074 tree type
= TREE_TYPE (decl
);
1075 enum machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1076 rtx x
= gen_reg_rtx (reg_mode
);
1080 /* Note if the object is a user variable. */
1081 if (!DECL_ARTIFICIAL (decl
))
1084 if (POINTER_TYPE_P (type
))
1085 mark_reg_pointer (x
, get_pointer_alignment (var
));
1088 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1089 has some associated error, e.g. its type is error-mark. We just need
1090 to pick something that won't crash the rest of the compiler. */
1093 expand_one_error_var (tree var
)
1095 enum machine_mode mode
= DECL_MODE (var
);
1098 if (mode
== BLKmode
)
1099 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1100 else if (mode
== VOIDmode
)
1103 x
= gen_reg_rtx (mode
);
1105 SET_DECL_RTL (var
, x
);
1108 /* A subroutine of expand_one_var. VAR is a variable that will be
1109 allocated to the local stack frame. Return true if we wish to
1110 add VAR to STACK_VARS so that it will be coalesced with other
1111 variables. Return false to allocate VAR immediately.
1113 This function is used to reduce the number of variables considered
1114 for coalescing, which reduces the size of the quadratic problem. */
1117 defer_stack_allocation (tree var
, bool toplevel
)
1119 /* If stack protection is enabled, *all* stack variables must be deferred,
1120 so that we can re-order the strings to the top of the frame. */
1121 if (flag_stack_protect
)
1124 /* We handle "large" alignment via dynamic allocation. We want to handle
1125 this extra complication in only one place, so defer them. */
1126 if (DECL_ALIGN (var
) > MAX_SUPPORTED_STACK_ALIGNMENT
)
1129 /* Variables in the outermost scope automatically conflict with
1130 every other variable. The only reason to want to defer them
1131 at all is that, after sorting, we can more efficiently pack
1132 small variables in the stack frame. Continue to defer at -O2. */
1133 if (toplevel
&& optimize
< 2)
1136 /* Without optimization, *most* variables are allocated from the
1137 stack, which makes the quadratic problem large exactly when we
1138 want compilation to proceed as quickly as possible. On the
1139 other hand, we don't want the function's stack frame size to
1140 get completely out of hand. So we avoid adding scalars and
1141 "small" aggregates to the list at all. */
1142 if (optimize
== 0 && tree_low_cst (DECL_SIZE_UNIT (var
), 1) < 32)
1148 /* A subroutine of expand_used_vars. Expand one variable according to
1149 its flavor. Variables to be placed on the stack are not actually
1150 expanded yet, merely recorded.
1151 When REALLY_EXPAND is false, only add stack values to be allocated.
1152 Return stack usage this variable is supposed to take.
1155 static HOST_WIDE_INT
1156 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1158 unsigned int align
= BITS_PER_UNIT
;
1163 if (TREE_TYPE (var
) != error_mark_node
&& TREE_CODE (var
) == VAR_DECL
)
1165 /* Because we don't know if VAR will be in register or on stack,
1166 we conservatively assume it will be on stack even if VAR is
1167 eventually put into register after RA pass. For non-automatic
1168 variables, which won't be on stack, we collect alignment of
1169 type and ignore user specified alignment. */
1170 if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
1171 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1172 TYPE_MODE (TREE_TYPE (var
)),
1173 TYPE_ALIGN (TREE_TYPE (var
)));
1174 else if (DECL_HAS_VALUE_EXPR_P (var
)
1175 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1176 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1177 or variables which were assigned a stack slot already by
1178 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1179 changed from the offset chosen to it. */
1180 align
= crtl
->stack_alignment_estimated
;
1182 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1184 /* If the variable alignment is very large we'll dynamicaly allocate
1185 it, which means that in-frame portion is just a pointer. */
1186 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1187 align
= POINTER_SIZE
;
1190 if (SUPPORTS_STACK_ALIGNMENT
1191 && crtl
->stack_alignment_estimated
< align
)
1193 /* stack_alignment_estimated shouldn't change after stack
1194 realign decision made */
1195 gcc_assert(!crtl
->stack_realign_processed
);
1196 crtl
->stack_alignment_estimated
= align
;
1199 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1200 So here we only make sure stack_alignment_needed >= align. */
1201 if (crtl
->stack_alignment_needed
< align
)
1202 crtl
->stack_alignment_needed
= align
;
1203 if (crtl
->max_used_stack_slot_alignment
< align
)
1204 crtl
->max_used_stack_slot_alignment
= align
;
1206 if (TREE_CODE (origvar
) == SSA_NAME
)
1208 gcc_assert (TREE_CODE (var
) != VAR_DECL
1209 || (!DECL_EXTERNAL (var
)
1210 && !DECL_HAS_VALUE_EXPR_P (var
)
1211 && !TREE_STATIC (var
)
1212 && TREE_TYPE (var
) != error_mark_node
1213 && !DECL_HARD_REGISTER (var
)
1216 if (TREE_CODE (var
) != VAR_DECL
&& TREE_CODE (origvar
) != SSA_NAME
)
1218 else if (DECL_EXTERNAL (var
))
1220 else if (DECL_HAS_VALUE_EXPR_P (var
))
1222 else if (TREE_STATIC (var
))
1224 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1226 else if (TREE_TYPE (var
) == error_mark_node
)
1229 expand_one_error_var (var
);
1231 else if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
1234 expand_one_hard_reg_var (var
);
1236 else if (use_register_for_decl (var
))
1239 expand_one_register_var (origvar
);
1241 else if (!host_integerp (DECL_SIZE_UNIT (var
), 1))
1245 error ("size of variable %q+D is too large", var
);
1246 expand_one_error_var (var
);
1249 else if (defer_stack_allocation (var
, toplevel
))
1250 add_stack_var (origvar
);
1254 expand_one_stack_var (origvar
);
1255 return tree_low_cst (DECL_SIZE_UNIT (var
), 1);
1260 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1261 expanding variables. Those variables that can be put into registers
1262 are allocated pseudos; those that can't are put on the stack.
1264 TOPLEVEL is true if this is the outermost BLOCK. */
1267 expand_used_vars_for_block (tree block
, bool toplevel
)
1271 /* Expand all variables at this level. */
1272 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1274 && ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1275 || !DECL_NONSHAREABLE (t
)))
1276 expand_one_var (t
, toplevel
, true);
1278 /* Expand all variables at containing levels. */
1279 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1280 expand_used_vars_for_block (t
, false);
1283 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1284 and clear TREE_USED on all local variables. */
1287 clear_tree_used (tree block
)
1291 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1292 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1293 if ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1294 || !DECL_NONSHAREABLE (t
))
1297 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1298 clear_tree_used (t
);
1301 /* Examine TYPE and determine a bit mask of the following features. */
1303 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1304 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1305 #define SPCT_HAS_ARRAY 4
1306 #define SPCT_HAS_AGGREGATE 8
1309 stack_protect_classify_type (tree type
)
1311 unsigned int ret
= 0;
1314 switch (TREE_CODE (type
))
1317 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1318 if (t
== char_type_node
1319 || t
== signed_char_type_node
1320 || t
== unsigned_char_type_node
)
1322 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1323 unsigned HOST_WIDE_INT len
;
1325 if (!TYPE_SIZE_UNIT (type
)
1326 || !host_integerp (TYPE_SIZE_UNIT (type
), 1))
1329 len
= tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
1332 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1334 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1337 ret
= SPCT_HAS_ARRAY
;
1341 case QUAL_UNION_TYPE
:
1343 ret
= SPCT_HAS_AGGREGATE
;
1344 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1345 if (TREE_CODE (t
) == FIELD_DECL
)
1346 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1356 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1357 part of the local stack frame. Remember if we ever return nonzero for
1358 any variable in this function. The return value is the phase number in
1359 which the variable should be allocated. */
1362 stack_protect_decl_phase (tree decl
)
1364 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1367 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1368 has_short_buffer
= true;
1370 if (flag_stack_protect
== 2)
1372 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1373 && !(bits
& SPCT_HAS_AGGREGATE
))
1375 else if (bits
& SPCT_HAS_ARRAY
)
1379 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1382 has_protected_decls
= true;
1387 /* Two helper routines that check for phase 1 and phase 2. These are used
1388 as callbacks for expand_stack_vars. */
1391 stack_protect_decl_phase_1 (tree decl
)
1393 return stack_protect_decl_phase (decl
) == 1;
1397 stack_protect_decl_phase_2 (tree decl
)
1399 return stack_protect_decl_phase (decl
) == 2;
1402 /* Ensure that variables in different stack protection phases conflict
1403 so that they are not merged and share the same stack slot. */
1406 add_stack_protection_conflicts (void)
1408 size_t i
, j
, n
= stack_vars_num
;
1409 unsigned char *phase
;
1411 phase
= XNEWVEC (unsigned char, n
);
1412 for (i
= 0; i
< n
; ++i
)
1413 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1415 for (i
= 0; i
< n
; ++i
)
1417 unsigned char ph_i
= phase
[i
];
1418 for (j
= 0; j
< i
; ++j
)
1419 if (ph_i
!= phase
[j
])
1420 add_stack_var_conflict (i
, j
);
1426 /* Create a decl for the guard at the top of the stack frame. */
1429 create_stack_guard (void)
1431 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1432 VAR_DECL
, NULL
, ptr_type_node
);
1433 TREE_THIS_VOLATILE (guard
) = 1;
1434 TREE_USED (guard
) = 1;
1435 expand_one_stack_var (guard
);
1436 crtl
->stack_protect_guard
= guard
;
1439 /* Prepare for expanding variables. */
1441 init_vars_expansion (void)
1445 /* Set TREE_USED on all variables in the local_decls. */
1446 FOR_EACH_LOCAL_DECL (cfun
, ix
, t
)
1449 /* Clear TREE_USED on all variables associated with a block scope. */
1450 clear_tree_used (DECL_INITIAL (current_function_decl
));
1452 /* Initialize local stack smashing state. */
1453 has_protected_decls
= false;
1454 has_short_buffer
= false;
1457 /* Free up stack variable graph data. */
1459 fini_vars_expansion (void)
1461 size_t i
, n
= stack_vars_num
;
1462 for (i
= 0; i
< n
; i
++)
1463 BITMAP_FREE (stack_vars
[i
].conflicts
);
1464 XDELETEVEC (stack_vars
);
1465 XDELETEVEC (stack_vars_sorted
);
1467 stack_vars_alloc
= stack_vars_num
= 0;
1468 pointer_map_destroy (decl_to_stack_part
);
1469 decl_to_stack_part
= NULL
;
1472 /* Make a fair guess for the size of the stack frame of the function
1473 in NODE. This doesn't have to be exact, the result is only used in
1474 the inline heuristics. So we don't want to run the full stack var
1475 packing algorithm (which is quadratic in the number of stack vars).
1476 Instead, we calculate the total size of all stack vars. This turns
1477 out to be a pretty fair estimate -- packing of stack vars doesn't
1478 happen very often. */
1481 estimated_stack_frame_size (struct cgraph_node
*node
)
1483 HOST_WIDE_INT size
= 0;
1486 tree old_cur_fun_decl
= current_function_decl
;
1487 referenced_var_iterator rvi
;
1488 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
1490 current_function_decl
= node
->decl
;
1493 gcc_checking_assert (gimple_referenced_vars (fn
));
1494 FOR_EACH_REFERENCED_VAR (fn
, var
, rvi
)
1495 size
+= expand_one_var (var
, true, false);
1497 if (stack_vars_num
> 0)
1499 /* Fake sorting the stack vars for account_stack_vars (). */
1500 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1501 for (i
= 0; i
< stack_vars_num
; ++i
)
1502 stack_vars_sorted
[i
] = i
;
1503 size
+= account_stack_vars ();
1504 fini_vars_expansion ();
1507 current_function_decl
= old_cur_fun_decl
;
1511 /* Expand all variables used in the function. */
1514 expand_used_vars (void)
1516 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
1517 VEC(tree
,heap
) *maybe_local_decls
= NULL
;
1521 /* Compute the phase of the stack frame for this function. */
1523 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1524 int off
= STARTING_FRAME_OFFSET
% align
;
1525 frame_phase
= off
? align
- off
: 0;
1528 init_vars_expansion ();
1530 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
1532 tree var
= partition_to_var (SA
.map
, i
);
1534 gcc_assert (is_gimple_reg (var
));
1535 if (TREE_CODE (SSA_NAME_VAR (var
)) == VAR_DECL
)
1536 expand_one_var (var
, true, true);
1539 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1540 contain the default def (representing the parm or result itself)
1541 we don't do anything here. But those which don't contain the
1542 default def (representing a temporary based on the parm/result)
1543 we need to allocate space just like for normal VAR_DECLs. */
1544 if (!bitmap_bit_p (SA
.partition_has_default_def
, i
))
1546 expand_one_var (var
, true, true);
1547 gcc_assert (SA
.partition_to_pseudo
[i
]);
1552 /* At this point all variables on the local_decls with TREE_USED
1553 set are not associated with any block scope. Lay them out. */
1555 len
= VEC_length (tree
, cfun
->local_decls
);
1556 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1558 bool expand_now
= false;
1560 /* Expanded above already. */
1561 if (is_gimple_reg (var
))
1563 TREE_USED (var
) = 0;
1566 /* We didn't set a block for static or extern because it's hard
1567 to tell the difference between a global variable (re)declared
1568 in a local scope, and one that's really declared there to
1569 begin with. And it doesn't really matter much, since we're
1570 not giving them stack space. Expand them now. */
1571 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
1574 /* If the variable is not associated with any block, then it
1575 was created by the optimizers, and could be live anywhere
1577 else if (TREE_USED (var
))
1580 /* Finally, mark all variables on the list as used. We'll use
1581 this in a moment when we expand those associated with scopes. */
1582 TREE_USED (var
) = 1;
1585 expand_one_var (var
, true, true);
1588 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
1590 rtx rtl
= DECL_RTL_IF_SET (var
);
1592 /* Keep artificial non-ignored vars in cfun->local_decls
1593 chain until instantiate_decls. */
1594 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
1595 add_local_decl (cfun
, var
);
1596 else if (rtl
== NULL_RTX
)
1597 /* If rtl isn't set yet, which can happen e.g. with
1598 -fstack-protector, retry before returning from this
1600 VEC_safe_push (tree
, heap
, maybe_local_decls
, var
);
1604 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1606 +-----------------+-----------------+
1607 | ...processed... | ...duplicates...|
1608 +-----------------+-----------------+
1610 +-- LEN points here.
1612 We just want the duplicates, as those are the artificial
1613 non-ignored vars that we want to keep until instantiate_decls.
1614 Move them down and truncate the array. */
1615 if (!VEC_empty (tree
, cfun
->local_decls
))
1616 VEC_block_remove (tree
, cfun
->local_decls
, 0, len
);
1618 /* At this point, all variables within the block tree with TREE_USED
1619 set are actually used by the optimized function. Lay them out. */
1620 expand_used_vars_for_block (outer_block
, true);
1622 if (stack_vars_num
> 0)
1624 add_scope_conflicts ();
1625 /* Due to the way alias sets work, no variables with non-conflicting
1626 alias sets may be assigned the same address. Add conflicts to
1628 add_alias_set_conflicts ();
1630 /* If stack protection is enabled, we don't share space between
1631 vulnerable data and non-vulnerable data. */
1632 if (flag_stack_protect
)
1633 add_stack_protection_conflicts ();
1635 /* Now that we have collected all stack variables, and have computed a
1636 minimal interference graph, attempt to save some stack space. */
1637 partition_stack_vars ();
1639 dump_stack_var_partition ();
1642 /* There are several conditions under which we should create a
1643 stack guard: protect-all, alloca used, protected decls present. */
1644 if (flag_stack_protect
== 2
1645 || (flag_stack_protect
1646 && (cfun
->calls_alloca
|| has_protected_decls
)))
1647 create_stack_guard ();
1649 /* Assign rtl to each variable based on these partitions. */
1650 if (stack_vars_num
> 0)
1652 /* Reorder decls to be protected by iterating over the variables
1653 array multiple times, and allocating out of each phase in turn. */
1654 /* ??? We could probably integrate this into the qsort we did
1655 earlier, such that we naturally see these variables first,
1656 and thus naturally allocate things in the right order. */
1657 if (has_protected_decls
)
1659 /* Phase 1 contains only character arrays. */
1660 expand_stack_vars (stack_protect_decl_phase_1
);
1662 /* Phase 2 contains other kinds of arrays. */
1663 if (flag_stack_protect
== 2)
1664 expand_stack_vars (stack_protect_decl_phase_2
);
1667 expand_stack_vars (NULL
);
1669 fini_vars_expansion ();
1672 /* If there were any artificial non-ignored vars without rtl
1673 found earlier, see if deferred stack allocation hasn't assigned
1675 FOR_EACH_VEC_ELT_REVERSE (tree
, maybe_local_decls
, i
, var
)
1677 rtx rtl
= DECL_RTL_IF_SET (var
);
1679 /* Keep artificial non-ignored vars in cfun->local_decls
1680 chain until instantiate_decls. */
1681 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
1682 add_local_decl (cfun
, var
);
1684 VEC_free (tree
, heap
, maybe_local_decls
);
1686 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1687 if (STACK_ALIGNMENT_NEEDED
)
1689 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1690 if (!FRAME_GROWS_DOWNWARD
)
1691 frame_offset
+= align
- 1;
1692 frame_offset
&= -align
;
1697 /* If we need to produce a detailed dump, print the tree representation
1698 for STMT to the dump file. SINCE is the last RTX after which the RTL
1699 generated for STMT should have been appended. */
1702 maybe_dump_rtl_for_gimple_stmt (gimple stmt
, rtx since
)
1704 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1706 fprintf (dump_file
, "\n;; ");
1707 print_gimple_stmt (dump_file
, stmt
, 0,
1708 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
1709 fprintf (dump_file
, "\n");
1711 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
1715 /* Maps the blocks that do not contain tree labels to rtx labels. */
1717 static struct pointer_map_t
*lab_rtx_for_bb
;
1719 /* Returns the label_rtx expression for a label starting basic block BB. */
1722 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
1724 gimple_stmt_iterator gsi
;
1729 if (bb
->flags
& BB_RTL
)
1730 return block_label (bb
);
1732 elt
= pointer_map_contains (lab_rtx_for_bb
, bb
);
1736 /* Find the tree label if it is present. */
1738 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1740 lab_stmt
= gsi_stmt (gsi
);
1741 if (gimple_code (lab_stmt
) != GIMPLE_LABEL
)
1744 lab
= gimple_label_label (lab_stmt
);
1745 if (DECL_NONLOCAL (lab
))
1748 return label_rtx (lab
);
1751 elt
= pointer_map_insert (lab_rtx_for_bb
, bb
);
1752 *elt
= gen_label_rtx ();
1757 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1758 of a basic block where we just expanded the conditional at the end,
1759 possibly clean up the CFG and instruction sequence. LAST is the
1760 last instruction before the just emitted jump sequence. */
1763 maybe_cleanup_end_of_block (edge e
, rtx last
)
1765 /* Special case: when jumpif decides that the condition is
1766 trivial it emits an unconditional jump (and the necessary
1767 barrier). But we still have two edges, the fallthru one is
1768 wrong. purge_dead_edges would clean this up later. Unfortunately
1769 we have to insert insns (and split edges) before
1770 find_many_sub_basic_blocks and hence before purge_dead_edges.
1771 But splitting edges might create new blocks which depend on the
1772 fact that if there are two edges there's no barrier. So the
1773 barrier would get lost and verify_flow_info would ICE. Instead
1774 of auditing all edge splitters to care for the barrier (which
1775 normally isn't there in a cleaned CFG), fix it here. */
1776 if (BARRIER_P (get_last_insn ()))
1780 /* Now, we have a single successor block, if we have insns to
1781 insert on the remaining edge we potentially will insert
1782 it at the end of this block (if the dest block isn't feasible)
1783 in order to avoid splitting the edge. This insertion will take
1784 place in front of the last jump. But we might have emitted
1785 multiple jumps (conditional and one unconditional) to the
1786 same destination. Inserting in front of the last one then
1787 is a problem. See PR 40021. We fix this by deleting all
1788 jumps except the last unconditional one. */
1789 insn
= PREV_INSN (get_last_insn ());
1790 /* Make sure we have an unconditional jump. Otherwise we're
1792 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
1793 for (insn
= PREV_INSN (insn
); insn
!= last
;)
1795 insn
= PREV_INSN (insn
);
1796 if (JUMP_P (NEXT_INSN (insn
)))
1798 if (!any_condjump_p (NEXT_INSN (insn
)))
1800 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
1801 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
1803 delete_insn (NEXT_INSN (insn
));
1809 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1810 Returns a new basic block if we've terminated the current basic
1811 block and created a new one. */
1814 expand_gimple_cond (basic_block bb
, gimple stmt
)
1816 basic_block new_bb
, dest
;
1821 enum tree_code code
;
1824 code
= gimple_cond_code (stmt
);
1825 op0
= gimple_cond_lhs (stmt
);
1826 op1
= gimple_cond_rhs (stmt
);
1827 /* We're sometimes presented with such code:
1831 This would expand to two comparisons which then later might
1832 be cleaned up by combine. But some pattern matchers like if-conversion
1833 work better when there's only one compare, so make up for this
1834 here as special exception if TER would have made the same change. */
1835 if (gimple_cond_single_var_p (stmt
)
1837 && TREE_CODE (op0
) == SSA_NAME
1838 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
1840 gimple second
= SSA_NAME_DEF_STMT (op0
);
1841 if (gimple_code (second
) == GIMPLE_ASSIGN
)
1843 enum tree_code code2
= gimple_assign_rhs_code (second
);
1844 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
1847 op0
= gimple_assign_rhs1 (second
);
1848 op1
= gimple_assign_rhs2 (second
);
1850 /* If jumps are cheap turn some more codes into
1852 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1854 if ((code2
== BIT_AND_EXPR
1855 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
1856 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
1857 || code2
== TRUTH_AND_EXPR
)
1859 code
= TRUTH_ANDIF_EXPR
;
1860 op0
= gimple_assign_rhs1 (second
);
1861 op1
= gimple_assign_rhs2 (second
);
1863 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
1865 code
= TRUTH_ORIF_EXPR
;
1866 op0
= gimple_assign_rhs1 (second
);
1867 op1
= gimple_assign_rhs2 (second
);
1873 last2
= last
= get_last_insn ();
1875 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
1876 set_curr_insn_source_location (gimple_location (stmt
));
1877 set_curr_insn_block (gimple_block (stmt
));
1879 /* These flags have no purpose in RTL land. */
1880 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
1881 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
1883 /* We can either have a pure conditional jump with one fallthru edge or
1884 two-way jump that needs to be decomposed into two basic blocks. */
1885 if (false_edge
->dest
== bb
->next_bb
)
1887 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
1888 true_edge
->probability
);
1889 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
1890 if (true_edge
->goto_locus
)
1892 set_curr_insn_source_location (true_edge
->goto_locus
);
1893 set_curr_insn_block (true_edge
->goto_block
);
1894 true_edge
->goto_locus
= curr_insn_locator ();
1896 true_edge
->goto_block
= NULL
;
1897 false_edge
->flags
|= EDGE_FALLTHRU
;
1898 maybe_cleanup_end_of_block (false_edge
, last
);
1901 if (true_edge
->dest
== bb
->next_bb
)
1903 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
1904 false_edge
->probability
);
1905 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
1906 if (false_edge
->goto_locus
)
1908 set_curr_insn_source_location (false_edge
->goto_locus
);
1909 set_curr_insn_block (false_edge
->goto_block
);
1910 false_edge
->goto_locus
= curr_insn_locator ();
1912 false_edge
->goto_block
= NULL
;
1913 true_edge
->flags
|= EDGE_FALLTHRU
;
1914 maybe_cleanup_end_of_block (true_edge
, last
);
1918 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
1919 true_edge
->probability
);
1920 last
= get_last_insn ();
1921 if (false_edge
->goto_locus
)
1923 set_curr_insn_source_location (false_edge
->goto_locus
);
1924 set_curr_insn_block (false_edge
->goto_block
);
1925 false_edge
->goto_locus
= curr_insn_locator ();
1927 false_edge
->goto_block
= NULL
;
1928 emit_jump (label_rtx_for_bb (false_edge
->dest
));
1931 if (BARRIER_P (BB_END (bb
)))
1932 BB_END (bb
) = PREV_INSN (BB_END (bb
));
1933 update_bb_for_insn (bb
);
1935 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
1936 dest
= false_edge
->dest
;
1937 redirect_edge_succ (false_edge
, new_bb
);
1938 false_edge
->flags
|= EDGE_FALLTHRU
;
1939 new_bb
->count
= false_edge
->count
;
1940 new_bb
->frequency
= EDGE_FREQUENCY (false_edge
);
1941 new_edge
= make_edge (new_bb
, dest
, 0);
1942 new_edge
->probability
= REG_BR_PROB_BASE
;
1943 new_edge
->count
= new_bb
->count
;
1944 if (BARRIER_P (BB_END (new_bb
)))
1945 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
1946 update_bb_for_insn (new_bb
);
1948 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
1950 if (true_edge
->goto_locus
)
1952 set_curr_insn_source_location (true_edge
->goto_locus
);
1953 set_curr_insn_block (true_edge
->goto_block
);
1954 true_edge
->goto_locus
= curr_insn_locator ();
1956 true_edge
->goto_block
= NULL
;
1961 /* Mark all calls that can have a transaction restart. */
1964 mark_transaction_restart_calls (gimple stmt
)
1966 struct tm_restart_node dummy
;
1969 if (!cfun
->gimple_df
->tm_restart
)
1973 slot
= htab_find_slot (cfun
->gimple_df
->tm_restart
, &dummy
, NO_INSERT
);
1976 struct tm_restart_node
*n
= (struct tm_restart_node
*) *slot
;
1977 tree list
= n
->label_or_list
;
1980 for (insn
= next_real_insn (get_last_insn ());
1982 insn
= next_real_insn (insn
))
1985 if (TREE_CODE (list
) == LABEL_DECL
)
1986 add_reg_note (insn
, REG_TM
, label_rtx (list
));
1988 for (; list
; list
= TREE_CHAIN (list
))
1989 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
1993 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1997 expand_call_stmt (gimple stmt
)
1999 tree exp
, decl
, lhs
;
2003 if (gimple_call_internal_p (stmt
))
2005 expand_internal_call (stmt
);
2009 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2011 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2012 decl
= gimple_call_fndecl (stmt
);
2013 builtin_p
= decl
&& DECL_BUILT_IN (decl
);
2015 /* If this is not a builtin function, the function type through which the
2016 call is made may be different from the type of the function. */
2019 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2020 CALL_EXPR_FN (exp
));
2022 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2023 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2025 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2027 tree arg
= gimple_call_arg (stmt
, i
);
2029 /* TER addresses into arguments of builtin functions so we have a
2030 chance to infer more correct alignment information. See PR39954. */
2032 && TREE_CODE (arg
) == SSA_NAME
2033 && (def
= get_gimple_for_ssa_name (arg
))
2034 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2035 arg
= gimple_assign_rhs1 (def
);
2036 CALL_EXPR_ARG (exp
, i
) = arg
;
2039 if (gimple_has_side_effects (stmt
))
2040 TREE_SIDE_EFFECTS (exp
) = 1;
2042 if (gimple_call_nothrow_p (stmt
))
2043 TREE_NOTHROW (exp
) = 1;
2045 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2046 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2048 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
2049 && (DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA
2050 || DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
2051 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2053 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2054 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2055 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2056 TREE_BLOCK (exp
) = gimple_block (stmt
);
2058 /* Ensure RTL is created for debug args. */
2059 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2061 VEC(tree
, gc
) **debug_args
= decl_debug_args_lookup (decl
);
2066 for (ix
= 1; VEC_iterate (tree
, *debug_args
, ix
, dtemp
); ix
+= 2)
2068 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2069 expand_debug_expr (dtemp
);
2073 lhs
= gimple_call_lhs (stmt
);
2075 expand_assignment (lhs
, exp
, false);
2077 expand_expr_real_1 (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
, NULL
);
2079 mark_transaction_restart_calls (stmt
);
2082 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2083 STMT that doesn't require special handling for outgoing edges. That
2084 is no tailcalls and no GIMPLE_COND. */
2087 expand_gimple_stmt_1 (gimple stmt
)
2091 set_curr_insn_source_location (gimple_location (stmt
));
2092 set_curr_insn_block (gimple_block (stmt
));
2094 switch (gimple_code (stmt
))
2097 op0
= gimple_goto_dest (stmt
);
2098 if (TREE_CODE (op0
) == LABEL_DECL
)
2101 expand_computed_goto (op0
);
2104 expand_label (gimple_label_label (stmt
));
2107 case GIMPLE_PREDICT
:
2113 expand_asm_stmt (stmt
);
2116 expand_call_stmt (stmt
);
2120 op0
= gimple_return_retval (stmt
);
2122 if (op0
&& op0
!= error_mark_node
)
2124 tree result
= DECL_RESULT (current_function_decl
);
2126 /* If we are not returning the current function's RESULT_DECL,
2127 build an assignment to it. */
2130 /* I believe that a function's RESULT_DECL is unique. */
2131 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
2133 /* ??? We'd like to use simply expand_assignment here,
2134 but this fails if the value is of BLKmode but the return
2135 decl is a register. expand_return has special handling
2136 for this combination, which eventually should move
2137 to common code. See comments there. Until then, let's
2138 build a modify expression :-/ */
2139 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
2144 expand_null_return ();
2146 expand_return (op0
);
2151 tree lhs
= gimple_assign_lhs (stmt
);
2153 /* Tree expand used to fiddle with |= and &= of two bitfield
2154 COMPONENT_REFs here. This can't happen with gimple, the LHS
2155 of binary assigns must be a gimple reg. */
2157 if (TREE_CODE (lhs
) != SSA_NAME
2158 || get_gimple_rhs_class (gimple_expr_code (stmt
))
2159 == GIMPLE_SINGLE_RHS
)
2161 tree rhs
= gimple_assign_rhs1 (stmt
);
2162 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
2163 == GIMPLE_SINGLE_RHS
);
2164 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
))
2165 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
2166 if (TREE_CLOBBER_P (rhs
))
2167 /* This is a clobber to mark the going out of scope for
2171 expand_assignment (lhs
, rhs
,
2172 gimple_assign_nontemporal_move_p (stmt
));
2177 bool nontemporal
= gimple_assign_nontemporal_move_p (stmt
);
2178 struct separate_ops ops
;
2179 bool promoted
= false;
2181 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
2182 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
2185 ops
.code
= gimple_assign_rhs_code (stmt
);
2186 ops
.type
= TREE_TYPE (lhs
);
2187 switch (get_gimple_rhs_class (gimple_expr_code (stmt
)))
2189 case GIMPLE_TERNARY_RHS
:
2190 ops
.op2
= gimple_assign_rhs3 (stmt
);
2192 case GIMPLE_BINARY_RHS
:
2193 ops
.op1
= gimple_assign_rhs2 (stmt
);
2195 case GIMPLE_UNARY_RHS
:
2196 ops
.op0
= gimple_assign_rhs1 (stmt
);
2201 ops
.location
= gimple_location (stmt
);
2203 /* If we want to use a nontemporal store, force the value to
2204 register first. If we store into a promoted register,
2205 don't directly expand to target. */
2206 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
2207 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
2214 int unsignedp
= SUBREG_PROMOTED_UNSIGNED_P (target
);
2215 /* If TEMP is a VOIDmode constant, use convert_modes to make
2216 sure that we properly convert it. */
2217 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
2219 temp
= convert_modes (GET_MODE (target
),
2220 TYPE_MODE (ops
.type
),
2222 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
2223 GET_MODE (target
), temp
, unsignedp
);
2226 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
2228 else if (nontemporal
&& emit_storent_insn (target
, temp
))
2232 temp
= force_operand (temp
, target
);
2234 emit_move_insn (target
, temp
);
2245 /* Expand one gimple statement STMT and return the last RTL instruction
2246 before any of the newly generated ones.
2248 In addition to generating the necessary RTL instructions this also
2249 sets REG_EH_REGION notes if necessary and sets the current source
2250 location for diagnostics. */
2253 expand_gimple_stmt (gimple stmt
)
2255 location_t saved_location
= input_location
;
2256 rtx last
= get_last_insn ();
2261 /* We need to save and restore the current source location so that errors
2262 discovered during expansion are emitted with the right location. But
2263 it would be better if the diagnostic routines used the source location
2264 embedded in the tree nodes rather than globals. */
2265 if (gimple_has_location (stmt
))
2266 input_location
= gimple_location (stmt
);
2268 expand_gimple_stmt_1 (stmt
);
2270 /* Free any temporaries used to evaluate this statement. */
2273 input_location
= saved_location
;
2275 /* Mark all insns that may trap. */
2276 lp_nr
= lookup_stmt_eh_lp (stmt
);
2280 for (insn
= next_real_insn (last
); insn
;
2281 insn
= next_real_insn (insn
))
2283 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
2284 /* If we want exceptions for non-call insns, any
2285 may_trap_p instruction may throw. */
2286 && GET_CODE (PATTERN (insn
)) != CLOBBER
2287 && GET_CODE (PATTERN (insn
)) != USE
2288 && insn_could_throw_p (insn
))
2289 make_reg_eh_region_note (insn
, 0, lp_nr
);
2296 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2297 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2298 generated a tail call (something that might be denied by the ABI
2299 rules governing the call; see calls.c).
2301 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2302 can still reach the rest of BB. The case here is __builtin_sqrt,
2303 where the NaN result goes through the external function (with a
2304 tailcall) and the normal result happens via a sqrt instruction. */
2307 expand_gimple_tailcall (basic_block bb
, gimple stmt
, bool *can_fallthru
)
2315 last2
= last
= expand_gimple_stmt (stmt
);
2317 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
2318 if (CALL_P (last
) && SIBLING_CALL_P (last
))
2321 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2323 *can_fallthru
= true;
2327 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2328 Any instructions emitted here are about to be deleted. */
2329 do_pending_stack_adjust ();
2331 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2332 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2333 EH or abnormal edges, we shouldn't have created a tail call in
2334 the first place. So it seems to me we should just be removing
2335 all edges here, or redirecting the existing fallthru edge to
2341 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
2343 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
2345 if (e
->dest
!= EXIT_BLOCK_PTR
)
2347 e
->dest
->count
-= e
->count
;
2348 e
->dest
->frequency
-= EDGE_FREQUENCY (e
);
2349 if (e
->dest
->count
< 0)
2351 if (e
->dest
->frequency
< 0)
2352 e
->dest
->frequency
= 0;
2355 probability
+= e
->probability
;
2362 /* This is somewhat ugly: the call_expr expander often emits instructions
2363 after the sibcall (to perform the function return). These confuse the
2364 find_many_sub_basic_blocks code, so we need to get rid of these. */
2365 last
= NEXT_INSN (last
);
2366 gcc_assert (BARRIER_P (last
));
2368 *can_fallthru
= false;
2369 while (NEXT_INSN (last
))
2371 /* For instance an sqrt builtin expander expands if with
2372 sibcall in the then and label for `else`. */
2373 if (LABEL_P (NEXT_INSN (last
)))
2375 *can_fallthru
= true;
2378 delete_insn (NEXT_INSN (last
));
2381 e
= make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_ABNORMAL
| EDGE_SIBCALL
);
2382 e
->probability
+= probability
;
2385 update_bb_for_insn (bb
);
2387 if (NEXT_INSN (last
))
2389 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2392 if (BARRIER_P (last
))
2393 BB_END (bb
) = PREV_INSN (last
);
2396 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2401 /* Return the difference between the floor and the truncated result of
2402 a signed division by OP1 with remainder MOD. */
2404 floor_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2406 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2407 return gen_rtx_IF_THEN_ELSE
2408 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2409 gen_rtx_IF_THEN_ELSE
2410 (mode
, gen_rtx_LT (BImode
,
2411 gen_rtx_DIV (mode
, op1
, mod
),
2413 constm1_rtx
, const0_rtx
),
2417 /* Return the difference between the ceil and the truncated result of
2418 a signed division by OP1 with remainder MOD. */
2420 ceil_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2422 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2423 return gen_rtx_IF_THEN_ELSE
2424 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2425 gen_rtx_IF_THEN_ELSE
2426 (mode
, gen_rtx_GT (BImode
,
2427 gen_rtx_DIV (mode
, op1
, mod
),
2429 const1_rtx
, const0_rtx
),
2433 /* Return the difference between the ceil and the truncated result of
2434 an unsigned division by OP1 with remainder MOD. */
2436 ceil_udiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
2438 /* (mod != 0 ? 1 : 0) */
2439 return gen_rtx_IF_THEN_ELSE
2440 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2441 const1_rtx
, const0_rtx
);
2444 /* Return the difference between the rounded and the truncated result
2445 of a signed division by OP1 with remainder MOD. Halfway cases are
2446 rounded away from zero, rather than to the nearest even number. */
2448 round_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2450 /* (abs (mod) >= abs (op1) - abs (mod)
2451 ? (op1 / mod > 0 ? 1 : -1)
2453 return gen_rtx_IF_THEN_ELSE
2454 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
2455 gen_rtx_MINUS (mode
,
2456 gen_rtx_ABS (mode
, op1
),
2457 gen_rtx_ABS (mode
, mod
))),
2458 gen_rtx_IF_THEN_ELSE
2459 (mode
, gen_rtx_GT (BImode
,
2460 gen_rtx_DIV (mode
, op1
, mod
),
2462 const1_rtx
, constm1_rtx
),
2466 /* Return the difference between the rounded and the truncated result
2467 of a unsigned division by OP1 with remainder MOD. Halfway cases
2468 are rounded away from zero, rather than to the nearest even
2471 round_udiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2473 /* (mod >= op1 - mod ? 1 : 0) */
2474 return gen_rtx_IF_THEN_ELSE
2475 (mode
, gen_rtx_GE (BImode
, mod
,
2476 gen_rtx_MINUS (mode
, op1
, mod
)),
2477 const1_rtx
, const0_rtx
);
2480 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2484 convert_debug_memory_address (enum machine_mode mode
, rtx x
,
2487 enum machine_mode xmode
= GET_MODE (x
);
2489 #ifndef POINTERS_EXTEND_UNSIGNED
2490 gcc_assert (mode
== Pmode
2491 || mode
== targetm
.addr_space
.address_mode (as
));
2492 gcc_assert (xmode
== mode
|| xmode
== VOIDmode
);
2496 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
2498 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
2501 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
2502 x
= simplify_gen_subreg (mode
, x
, xmode
,
2503 subreg_lowpart_offset
2505 else if (POINTERS_EXTEND_UNSIGNED
> 0)
2506 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
2507 else if (!POINTERS_EXTEND_UNSIGNED
)
2508 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
2511 switch (GET_CODE (x
))
2514 if ((SUBREG_PROMOTED_VAR_P (x
)
2515 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
2516 || (GET_CODE (SUBREG_REG (x
)) == PLUS
2517 && REG_P (XEXP (SUBREG_REG (x
), 0))
2518 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
2519 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
2520 && GET_MODE (SUBREG_REG (x
)) == mode
)
2521 return SUBREG_REG (x
);
2524 temp
= gen_rtx_LABEL_REF (mode
, XEXP (x
, 0));
2525 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
2528 temp
= shallow_copy_rtx (x
);
2529 PUT_MODE (temp
, mode
);
2532 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
2534 temp
= gen_rtx_CONST (mode
, temp
);
2538 if (CONST_INT_P (XEXP (x
, 1)))
2540 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
2542 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
2548 /* Don't know how to express ptr_extend as operation in debug info. */
2551 #endif /* POINTERS_EXTEND_UNSIGNED */
2556 /* Return an RTX equivalent to the value of the parameter DECL. */
2559 expand_debug_parm_decl (tree decl
)
2561 rtx incoming
= DECL_INCOMING_RTL (decl
);
2564 && GET_MODE (incoming
) != BLKmode
2565 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
2566 || (MEM_P (incoming
)
2567 && REG_P (XEXP (incoming
, 0))
2568 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
2570 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
2572 #ifdef HAVE_window_save
2573 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2574 If the target machine has an explicit window save instruction, the
2575 actual entry value is the corresponding OUTGOING_REGNO instead. */
2576 if (REG_P (incoming
)
2577 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
2579 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
2580 OUTGOING_REGNO (REGNO (incoming
)), 0);
2581 else if (MEM_P (incoming
))
2583 rtx reg
= XEXP (incoming
, 0);
2584 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
2586 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
2587 incoming
= replace_equiv_address_nv (incoming
, reg
);
2592 ENTRY_VALUE_EXP (rtl
) = incoming
;
2597 && GET_MODE (incoming
) != BLKmode
2598 && !TREE_ADDRESSABLE (decl
)
2600 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
2601 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
2602 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
2603 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
2609 /* Return an RTX equivalent to the value of the tree expression EXP. */
2612 expand_debug_expr (tree exp
)
2614 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
2615 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
2616 enum machine_mode inner_mode
= VOIDmode
;
2617 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
2620 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
2622 case tcc_expression
:
2623 switch (TREE_CODE (exp
))
2627 case WIDEN_MULT_PLUS_EXPR
:
2628 case WIDEN_MULT_MINUS_EXPR
:
2632 case TRUTH_ANDIF_EXPR
:
2633 case TRUTH_ORIF_EXPR
:
2634 case TRUTH_AND_EXPR
:
2636 case TRUTH_XOR_EXPR
:
2639 case TRUTH_NOT_EXPR
:
2648 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
2655 case tcc_comparison
:
2656 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
2663 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
2664 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
2674 case tcc_exceptional
:
2675 case tcc_declaration
:
2681 switch (TREE_CODE (exp
))
2684 if (!lookup_constant_def (exp
))
2686 if (strlen (TREE_STRING_POINTER (exp
)) + 1
2687 != (size_t) TREE_STRING_LENGTH (exp
))
2689 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
2690 op0
= gen_rtx_MEM (BLKmode
, op0
);
2691 set_mem_attributes (op0
, exp
, 0);
2694 /* Fall through... */
2699 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
2703 gcc_assert (COMPLEX_MODE_P (mode
));
2704 op0
= expand_debug_expr (TREE_REALPART (exp
));
2705 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
2706 return gen_rtx_CONCAT (mode
, op0
, op1
);
2708 case DEBUG_EXPR_DECL
:
2709 op0
= DECL_RTL_IF_SET (exp
);
2714 op0
= gen_rtx_DEBUG_EXPR (mode
);
2715 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
2716 SET_DECL_RTL (exp
, op0
);
2726 op0
= DECL_RTL_IF_SET (exp
);
2728 /* This decl was probably optimized away. */
2731 if (TREE_CODE (exp
) != VAR_DECL
2732 || DECL_EXTERNAL (exp
)
2733 || !TREE_STATIC (exp
)
2735 || DECL_HARD_REGISTER (exp
)
2736 || DECL_IN_CONSTANT_POOL (exp
)
2737 || mode
== VOIDmode
)
2740 op0
= make_decl_rtl_for_debug (exp
);
2742 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
2743 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
2747 op0
= copy_rtx (op0
);
2749 if (GET_MODE (op0
) == BLKmode
2750 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2751 below would ICE. While it is likely a FE bug,
2752 try to be robust here. See PR43166. */
2754 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
2756 gcc_assert (MEM_P (op0
));
2757 op0
= adjust_address_nv (op0
, mode
, 0);
2768 inner_mode
= GET_MODE (op0
);
2770 if (mode
== inner_mode
)
2773 if (inner_mode
== VOIDmode
)
2775 if (TREE_CODE (exp
) == SSA_NAME
)
2776 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
2778 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
2779 if (mode
== inner_mode
)
2783 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
2785 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
2786 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
2787 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
2788 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
2790 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
2792 else if (FLOAT_MODE_P (mode
))
2794 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
2795 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
2796 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
2798 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
2800 else if (FLOAT_MODE_P (inner_mode
))
2803 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
2805 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
2807 else if (CONSTANT_P (op0
)
2808 || GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (inner_mode
))
2809 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
2810 subreg_lowpart_offset (mode
,
2812 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == tcc_unary
2813 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
2815 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
2817 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
2823 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
2825 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
2826 TREE_OPERAND (exp
, 0),
2827 TREE_OPERAND (exp
, 1));
2829 return expand_debug_expr (newexp
);
2833 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
2837 if (TREE_CODE (exp
) == MEM_REF
)
2839 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
2840 || (GET_CODE (op0
) == PLUS
2841 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
2842 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2843 Instead just use get_inner_reference. */
2846 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
2847 if (!op1
|| !CONST_INT_P (op1
))
2850 op0
= plus_constant (op0
, INTVAL (op1
));
2853 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
2854 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
2856 as
= ADDR_SPACE_GENERIC
;
2858 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
2860 if (op0
== NULL_RTX
)
2863 op0
= gen_rtx_MEM (mode
, op0
);
2864 set_mem_attributes (op0
, exp
, 0);
2865 if (TREE_CODE (exp
) == MEM_REF
2866 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
2867 set_mem_expr (op0
, NULL_TREE
);
2868 set_mem_addr_space (op0
, as
);
2872 case TARGET_MEM_REF
:
2873 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
2874 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
2877 op0
= expand_debug_expr
2878 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
2882 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
2883 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
2885 as
= ADDR_SPACE_GENERIC
;
2887 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
2889 if (op0
== NULL_RTX
)
2892 op0
= gen_rtx_MEM (mode
, op0
);
2894 set_mem_attributes (op0
, exp
, 0);
2895 set_mem_addr_space (op0
, as
);
2901 case ARRAY_RANGE_REF
:
2906 case VIEW_CONVERT_EXPR
:
2908 enum machine_mode mode1
;
2909 HOST_WIDE_INT bitsize
, bitpos
;
2912 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
2913 &mode1
, &unsignedp
, &volatilep
, false);
2919 orig_op0
= op0
= expand_debug_expr (tem
);
2926 enum machine_mode addrmode
, offmode
;
2931 op0
= XEXP (op0
, 0);
2932 addrmode
= GET_MODE (op0
);
2933 if (addrmode
== VOIDmode
)
2936 op1
= expand_debug_expr (offset
);
2940 offmode
= GET_MODE (op1
);
2941 if (offmode
== VOIDmode
)
2942 offmode
= TYPE_MODE (TREE_TYPE (offset
));
2944 if (addrmode
!= offmode
)
2945 op1
= simplify_gen_subreg (addrmode
, op1
, offmode
,
2946 subreg_lowpart_offset (addrmode
,
2949 /* Don't use offset_address here, we don't need a
2950 recognizable address, and we don't want to generate
2952 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
2958 if (mode1
== VOIDmode
)
2960 mode1
= smallest_mode_for_size (bitsize
, MODE_INT
);
2961 if (bitpos
>= BITS_PER_UNIT
)
2963 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
2964 bitpos
%= BITS_PER_UNIT
;
2966 else if (bitpos
< 0)
2969 = (-bitpos
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
2970 op0
= adjust_address_nv (op0
, mode1
, units
);
2971 bitpos
+= units
* BITS_PER_UNIT
;
2973 else if (bitpos
== 0 && bitsize
== GET_MODE_BITSIZE (mode
))
2974 op0
= adjust_address_nv (op0
, mode
, 0);
2975 else if (GET_MODE (op0
) != mode1
)
2976 op0
= adjust_address_nv (op0
, mode1
, 0);
2978 op0
= copy_rtx (op0
);
2979 if (op0
== orig_op0
)
2980 op0
= shallow_copy_rtx (op0
);
2981 set_mem_attributes (op0
, exp
, 0);
2984 if (bitpos
== 0 && mode
== GET_MODE (op0
))
2990 if (GET_MODE (op0
) == BLKmode
)
2993 if ((bitpos
% BITS_PER_UNIT
) == 0
2994 && bitsize
== GET_MODE_BITSIZE (mode1
))
2996 enum machine_mode opmode
= GET_MODE (op0
);
2998 if (opmode
== VOIDmode
)
2999 opmode
= TYPE_MODE (TREE_TYPE (tem
));
3001 /* This condition may hold if we're expanding the address
3002 right past the end of an array that turned out not to
3003 be addressable (i.e., the address was only computed in
3004 debug stmts). The gen_subreg below would rightfully
3005 crash, and the address doesn't really exist, so just
3007 if (bitpos
>= GET_MODE_BITSIZE (opmode
))
3010 if ((bitpos
% GET_MODE_BITSIZE (mode
)) == 0)
3011 return simplify_gen_subreg (mode
, op0
, opmode
,
3012 bitpos
/ BITS_PER_UNIT
);
3015 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
3016 && TYPE_UNSIGNED (TREE_TYPE (exp
))
3018 : ZERO_EXTRACT
, mode
,
3019 GET_MODE (op0
) != VOIDmode
3021 : TYPE_MODE (TREE_TYPE (tem
)),
3022 op0
, GEN_INT (bitsize
), GEN_INT (bitpos
));
3026 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
3029 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
3032 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
3035 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3037 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
3040 case FIX_TRUNC_EXPR
:
3041 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
3044 case POINTER_PLUS_EXPR
:
3045 /* For the rare target where pointers are not the same size as
3046 size_t, we need to check for mis-matched modes and correct
3049 && GET_MODE (op0
) != VOIDmode
&& GET_MODE (op1
) != VOIDmode
3050 && GET_MODE (op0
) != GET_MODE (op1
))
3052 if (GET_MODE_BITSIZE (GET_MODE (op0
)) < GET_MODE_BITSIZE (GET_MODE (op1
)))
3053 op1
= simplify_gen_unary (TRUNCATE
, GET_MODE (op0
), op1
,
3056 /* We always sign-extend, regardless of the signedness of
3057 the operand, because the operand is always unsigned
3058 here even if the original C expression is signed. */
3059 op1
= simplify_gen_unary (SIGN_EXTEND
, GET_MODE (op0
), op1
,
3064 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
3067 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
3070 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
3073 case TRUNC_DIV_EXPR
:
3074 case EXACT_DIV_EXPR
:
3076 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
3078 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
3080 case TRUNC_MOD_EXPR
:
3081 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
3083 case FLOOR_DIV_EXPR
:
3085 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
3088 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
3089 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3090 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
3091 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3094 case FLOOR_MOD_EXPR
:
3096 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3099 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3100 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
3101 adj
= simplify_gen_unary (NEG
, mode
,
3102 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3104 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3110 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
3111 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3112 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
3113 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3117 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
3118 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3119 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
3120 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3126 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3127 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
3128 adj
= simplify_gen_unary (NEG
, mode
,
3129 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3131 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3135 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3136 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
3137 adj
= simplify_gen_unary (NEG
, mode
,
3138 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3140 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3143 case ROUND_DIV_EXPR
:
3146 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
3147 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3148 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
3149 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3153 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
3154 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3155 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
3156 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3159 case ROUND_MOD_EXPR
:
3162 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3163 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
3164 adj
= simplify_gen_unary (NEG
, mode
,
3165 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3167 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3171 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3172 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
3173 adj
= simplify_gen_unary (NEG
, mode
,
3174 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3176 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3180 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
3184 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
3186 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
3189 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
3192 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
3195 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
3198 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
3201 case TRUTH_AND_EXPR
:
3202 return simplify_gen_binary (AND
, mode
, op0
, op1
);
3206 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
3209 case TRUTH_XOR_EXPR
:
3210 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
3212 case TRUTH_ANDIF_EXPR
:
3213 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
3215 case TRUTH_ORIF_EXPR
:
3216 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
3218 case TRUTH_NOT_EXPR
:
3219 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
3222 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
3226 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
3230 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
3234 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
3238 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
3241 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
3243 case UNORDERED_EXPR
:
3244 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
3247 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
3250 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
3253 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
3256 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
3259 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
3262 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
3265 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
3268 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
3271 gcc_assert (COMPLEX_MODE_P (mode
));
3272 if (GET_MODE (op0
) == VOIDmode
)
3273 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
3274 if (GET_MODE (op1
) == VOIDmode
)
3275 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
3276 return gen_rtx_CONCAT (mode
, op0
, op1
);
3279 if (GET_CODE (op0
) == CONCAT
)
3280 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
3281 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
3283 GET_MODE_INNER (mode
)));
3286 enum machine_mode imode
= GET_MODE_INNER (mode
);
3291 re
= adjust_address_nv (op0
, imode
, 0);
3292 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
3296 enum machine_mode ifmode
= int_mode_for_mode (mode
);
3297 enum machine_mode ihmode
= int_mode_for_mode (imode
);
3299 if (ifmode
== BLKmode
|| ihmode
== BLKmode
)
3301 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
3304 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
3305 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
3306 if (imode
!= ihmode
)
3307 re
= gen_rtx_SUBREG (imode
, re
, 0);
3308 im
= copy_rtx (op0
);
3310 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
3311 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
3312 if (imode
!= ihmode
)
3313 im
= gen_rtx_SUBREG (imode
, im
, 0);
3315 im
= gen_rtx_NEG (imode
, im
);
3316 return gen_rtx_CONCAT (mode
, re
, im
);
3320 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
3321 if (!op0
|| !MEM_P (op0
))
3323 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
3324 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
3325 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
3326 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
3327 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
3328 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
3330 if (handled_component_p (TREE_OPERAND (exp
, 0)))
3332 HOST_WIDE_INT bitoffset
, bitsize
, maxsize
;
3334 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0),
3335 &bitoffset
, &bitsize
, &maxsize
);
3336 if ((TREE_CODE (decl
) == VAR_DECL
3337 || TREE_CODE (decl
) == PARM_DECL
3338 || TREE_CODE (decl
) == RESULT_DECL
)
3339 && (!TREE_ADDRESSABLE (decl
)
3340 || target_for_debug_bind (decl
))
3341 && (bitoffset
% BITS_PER_UNIT
) == 0
3343 && bitsize
== maxsize
)
3344 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
),
3345 bitoffset
/ BITS_PER_UNIT
);
3351 as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
3352 op0
= convert_debug_memory_address (mode
, XEXP (op0
, 0), as
);
3357 exp
= build_constructor_from_list (TREE_TYPE (exp
),
3358 TREE_VECTOR_CST_ELTS (exp
));
3362 if (TREE_CLOBBER_P (exp
))
3364 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
3369 op0
= gen_rtx_CONCATN
3370 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
3372 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
3374 op1
= expand_debug_expr (val
);
3377 XVECEXP (op0
, 0, i
) = op1
;
3380 if (i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)))
3382 op1
= expand_debug_expr
3383 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
3388 for (; i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)); i
++)
3389 XVECEXP (op0
, 0, i
) = op1
;
3395 goto flag_unsupported
;
3398 /* ??? Maybe handle some builtins? */
3403 gimple g
= get_gimple_for_ssa_name (exp
);
3406 op0
= expand_debug_expr (gimple_assign_rhs_to_tree (g
));
3412 int part
= var_to_partition (SA
.map
, exp
);
3414 if (part
== NO_PARTITION
)
3416 /* If this is a reference to an incoming value of parameter
3417 that is never used in the code or where the incoming
3418 value is never used in the code, use PARM_DECL's
3420 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
3421 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
)
3423 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
3426 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
3433 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
3435 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
3443 /* Vector stuff. For most of the codes we don't have rtl codes. */
3444 case REALIGN_LOAD_EXPR
:
3445 case REDUC_MAX_EXPR
:
3446 case REDUC_MIN_EXPR
:
3447 case REDUC_PLUS_EXPR
:
3449 case VEC_LSHIFT_EXPR
:
3450 case VEC_PACK_FIX_TRUNC_EXPR
:
3451 case VEC_PACK_SAT_EXPR
:
3452 case VEC_PACK_TRUNC_EXPR
:
3453 case VEC_RSHIFT_EXPR
:
3454 case VEC_UNPACK_FLOAT_HI_EXPR
:
3455 case VEC_UNPACK_FLOAT_LO_EXPR
:
3456 case VEC_UNPACK_HI_EXPR
:
3457 case VEC_UNPACK_LO_EXPR
:
3458 case VEC_WIDEN_MULT_HI_EXPR
:
3459 case VEC_WIDEN_MULT_LO_EXPR
:
3460 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3461 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3466 case ADDR_SPACE_CONVERT_EXPR
:
3467 case FIXED_CONVERT_EXPR
:
3469 case WITH_SIZE_EXPR
:
3473 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3474 && SCALAR_INT_MODE_P (mode
))
3477 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3479 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
3482 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3484 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
3486 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
3487 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
3491 case WIDEN_MULT_EXPR
:
3492 case WIDEN_MULT_PLUS_EXPR
:
3493 case WIDEN_MULT_MINUS_EXPR
:
3494 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3495 && SCALAR_INT_MODE_P (mode
))
3497 inner_mode
= GET_MODE (op0
);
3498 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3499 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
3501 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
3502 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
3503 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
3505 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
3506 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
3507 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
3509 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
3510 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
3512 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
3516 case WIDEN_SUM_EXPR
:
3517 case WIDEN_LSHIFT_EXPR
:
3518 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3519 && SCALAR_INT_MODE_P (mode
))
3522 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3524 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
3526 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
3527 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
3532 return simplify_gen_ternary (FMA
, mode
, inner_mode
, op0
, op1
, op2
);
3536 #ifdef ENABLE_CHECKING
3545 /* Return an RTX equivalent to the source bind value of the tree expression
3549 expand_debug_source_expr (tree exp
)
3552 enum machine_mode mode
= VOIDmode
, inner_mode
;
3554 switch (TREE_CODE (exp
))
3558 mode
= DECL_MODE (exp
);
3559 op0
= expand_debug_parm_decl (exp
);
3562 /* See if this isn't an argument that has been completely
3564 if (!DECL_RTL_SET_P (exp
)
3565 && !DECL_INCOMING_RTL (exp
)
3566 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
3569 if (DECL_ABSTRACT_ORIGIN (exp
))
3570 aexp
= DECL_ABSTRACT_ORIGIN (exp
);
3571 if (DECL_CONTEXT (aexp
)
3572 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
3574 VEC(tree
, gc
) **debug_args
;
3577 #ifdef ENABLE_CHECKING
3579 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3580 parm
; parm
= DECL_CHAIN (parm
))
3581 gcc_assert (parm
!= exp
3582 && DECL_ABSTRACT_ORIGIN (parm
) != aexp
);
3584 debug_args
= decl_debug_args_lookup (current_function_decl
);
3585 if (debug_args
!= NULL
)
3587 for (ix
= 0; VEC_iterate (tree
, *debug_args
, ix
, ddecl
);
3590 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
3600 if (op0
== NULL_RTX
)
3603 inner_mode
= GET_MODE (op0
);
3604 if (mode
== inner_mode
)
3607 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
3609 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
3610 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
3611 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
3612 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
3614 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
3616 else if (FLOAT_MODE_P (mode
))
3618 else if (FLOAT_MODE_P (inner_mode
))
3620 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
3621 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
3623 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
3625 else if (CONSTANT_P (op0
)
3626 || GET_MODE_BITSIZE (mode
) <= GET_MODE_BITSIZE (inner_mode
))
3627 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
3628 subreg_lowpart_offset (mode
, inner_mode
));
3629 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
3630 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
3632 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
3637 /* Expand the _LOCs in debug insns. We run this after expanding all
3638 regular insns, so that any variables referenced in the function
3639 will have their DECL_RTLs set. */
3642 expand_debug_locations (void)
3645 rtx last
= get_last_insn ();
3646 int save_strict_alias
= flag_strict_aliasing
;
3648 /* New alias sets while setting up memory attributes cause
3649 -fcompare-debug failures, even though it doesn't bring about any
3651 flag_strict_aliasing
= 0;
3653 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3654 if (DEBUG_INSN_P (insn
))
3656 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
3658 enum machine_mode mode
;
3660 if (value
== NULL_TREE
)
3664 if (INSN_VAR_LOCATION_STATUS (insn
)
3665 == VAR_INIT_STATUS_UNINITIALIZED
)
3666 val
= expand_debug_source_expr (value
);
3668 val
= expand_debug_expr (value
);
3669 gcc_assert (last
== get_last_insn ());
3673 val
= gen_rtx_UNKNOWN_VAR_LOC ();
3676 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
3678 gcc_assert (mode
== GET_MODE (val
)
3679 || (GET_MODE (val
) == VOIDmode
3680 && (CONST_INT_P (val
)
3681 || GET_CODE (val
) == CONST_FIXED
3682 || GET_CODE (val
) == CONST_DOUBLE
3683 || GET_CODE (val
) == LABEL_REF
)));
3686 INSN_VAR_LOCATION_LOC (insn
) = val
;
3689 flag_strict_aliasing
= save_strict_alias
;
3692 /* Expand basic block BB from GIMPLE trees to RTL. */
3695 expand_gimple_basic_block (basic_block bb
)
3697 gimple_stmt_iterator gsi
;
3706 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
3709 /* Note that since we are now transitioning from GIMPLE to RTL, we
3710 cannot use the gsi_*_bb() routines because they expect the basic
3711 block to be in GIMPLE, instead of RTL. Therefore, we need to
3712 access the BB sequence directly. */
3713 stmts
= bb_seq (bb
);
3714 bb
->il
.gimple
= NULL
;
3715 rtl_profile_for_bb (bb
);
3716 init_rtl_bb_info (bb
);
3717 bb
->flags
|= BB_RTL
;
3719 /* Remove the RETURN_EXPR if we may fall though to the exit
3721 gsi
= gsi_last (stmts
);
3722 if (!gsi_end_p (gsi
)
3723 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
3725 gimple ret_stmt
= gsi_stmt (gsi
);
3727 gcc_assert (single_succ_p (bb
));
3728 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR
);
3730 if (bb
->next_bb
== EXIT_BLOCK_PTR
3731 && !gimple_return_retval (ret_stmt
))
3733 gsi_remove (&gsi
, false);
3734 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
3738 gsi
= gsi_start (stmts
);
3739 if (!gsi_end_p (gsi
))
3741 stmt
= gsi_stmt (gsi
);
3742 if (gimple_code (stmt
) != GIMPLE_LABEL
)
3746 elt
= pointer_map_contains (lab_rtx_for_bb
, bb
);
3750 last
= get_last_insn ();
3754 expand_gimple_stmt (stmt
);
3759 emit_label ((rtx
) *elt
);
3761 /* Java emits line number notes in the top of labels.
3762 ??? Make this go away once line number notes are obsoleted. */
3763 BB_HEAD (bb
) = NEXT_INSN (last
);
3764 if (NOTE_P (BB_HEAD (bb
)))
3765 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
3766 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
3768 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
3771 note
= BB_HEAD (bb
) = emit_note (NOTE_INSN_BASIC_BLOCK
);
3773 NOTE_BASIC_BLOCK (note
) = bb
;
3775 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3779 stmt
= gsi_stmt (gsi
);
3781 /* If this statement is a non-debug one, and we generate debug
3782 insns, then this one might be the last real use of a TERed
3783 SSA_NAME, but where there are still some debug uses further
3784 down. Expanding the current SSA name in such further debug
3785 uses by their RHS might lead to wrong debug info, as coalescing
3786 might make the operands of such RHS be placed into the same
3787 pseudo as something else. Like so:
3788 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3792 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3793 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3794 the write to a_2 would actually have clobbered the place which
3797 So, instead of that, we recognize the situation, and generate
3798 debug temporaries at the last real use of TERed SSA names:
3805 if (MAY_HAVE_DEBUG_INSNS
3807 && !is_gimple_debug (stmt
))
3813 location_t sloc
= get_curr_insn_source_location ();
3814 tree sblock
= get_curr_insn_block ();
3816 /* Look for SSA names that have their last use here (TERed
3817 names always have only one real use). */
3818 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3819 if ((def
= get_gimple_for_ssa_name (op
)))
3821 imm_use_iterator imm_iter
;
3822 use_operand_p use_p
;
3823 bool have_debug_uses
= false;
3825 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
3827 if (gimple_debug_bind_p (USE_STMT (use_p
)))
3829 have_debug_uses
= true;
3834 if (have_debug_uses
)
3836 /* OP is a TERed SSA name, with DEF it's defining
3837 statement, and where OP is used in further debug
3838 instructions. Generate a debug temporary, and
3839 replace all uses of OP in debug insns with that
3842 tree value
= gimple_assign_rhs_to_tree (def
);
3843 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
3845 enum machine_mode mode
;
3847 set_curr_insn_source_location (gimple_location (def
));
3848 set_curr_insn_block (gimple_block (def
));
3850 DECL_ARTIFICIAL (vexpr
) = 1;
3851 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
3853 mode
= DECL_MODE (value
);
3855 mode
= TYPE_MODE (TREE_TYPE (value
));
3856 DECL_MODE (vexpr
) = mode
;
3858 val
= gen_rtx_VAR_LOCATION
3859 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
3861 emit_debug_insn (val
);
3863 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
3865 if (!gimple_debug_bind_p (debugstmt
))
3868 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
3869 SET_USE (use_p
, vexpr
);
3871 update_stmt (debugstmt
);
3875 set_curr_insn_source_location (sloc
);
3876 set_curr_insn_block (sblock
);
3879 currently_expanding_gimple_stmt
= stmt
;
3881 /* Expand this statement, then evaluate the resulting RTL and
3882 fixup the CFG accordingly. */
3883 if (gimple_code (stmt
) == GIMPLE_COND
)
3885 new_bb
= expand_gimple_cond (bb
, stmt
);
3889 else if (gimple_debug_bind_p (stmt
))
3891 location_t sloc
= get_curr_insn_source_location ();
3892 tree sblock
= get_curr_insn_block ();
3893 gimple_stmt_iterator nsi
= gsi
;
3897 tree var
= gimple_debug_bind_get_var (stmt
);
3900 enum machine_mode mode
;
3902 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
3903 && TREE_CODE (var
) != LABEL_DECL
3904 && !target_for_debug_bind (var
))
3905 goto delink_debug_stmt
;
3907 if (gimple_debug_bind_has_value_p (stmt
))
3908 value
= gimple_debug_bind_get_value (stmt
);
3912 last
= get_last_insn ();
3914 set_curr_insn_source_location (gimple_location (stmt
));
3915 set_curr_insn_block (gimple_block (stmt
));
3918 mode
= DECL_MODE (var
);
3920 mode
= TYPE_MODE (TREE_TYPE (var
));
3922 val
= gen_rtx_VAR_LOCATION
3923 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
3925 emit_debug_insn (val
);
3927 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3929 /* We can't dump the insn with a TREE where an RTX
3931 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
3932 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
3933 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
3937 /* In order not to generate too many debug temporaries,
3938 we delink all uses of debug statements we already expanded.
3939 Therefore debug statements between definition and real
3940 use of TERed SSA names will continue to use the SSA name,
3941 and not be replaced with debug temps. */
3942 delink_stmt_imm_use (stmt
);
3946 if (gsi_end_p (nsi
))
3948 stmt
= gsi_stmt (nsi
);
3949 if (!gimple_debug_bind_p (stmt
))
3953 set_curr_insn_source_location (sloc
);
3954 set_curr_insn_block (sblock
);
3956 else if (gimple_debug_source_bind_p (stmt
))
3958 location_t sloc
= get_curr_insn_source_location ();
3959 tree sblock
= get_curr_insn_block ();
3960 tree var
= gimple_debug_source_bind_get_var (stmt
);
3961 tree value
= gimple_debug_source_bind_get_value (stmt
);
3963 enum machine_mode mode
;
3965 last
= get_last_insn ();
3967 set_curr_insn_source_location (gimple_location (stmt
));
3968 set_curr_insn_block (gimple_block (stmt
));
3970 mode
= DECL_MODE (var
);
3972 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
3973 VAR_INIT_STATUS_UNINITIALIZED
);
3975 emit_debug_insn (val
);
3977 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3979 /* We can't dump the insn with a TREE where an RTX
3981 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
3982 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
3983 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
3986 set_curr_insn_source_location (sloc
);
3987 set_curr_insn_block (sblock
);
3991 if (is_gimple_call (stmt
) && gimple_call_tail_p (stmt
))
3994 new_bb
= expand_gimple_tailcall (bb
, stmt
, &can_fallthru
);
4005 def_operand_p def_p
;
4006 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
4010 /* Ignore this stmt if it is in the list of
4011 replaceable expressions. */
4013 && bitmap_bit_p (SA
.values
,
4014 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
4017 last
= expand_gimple_stmt (stmt
);
4018 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
4023 currently_expanding_gimple_stmt
= NULL
;
4025 /* Expand implicit goto and convert goto_locus. */
4026 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4028 if (e
->goto_locus
&& e
->goto_block
)
4030 set_curr_insn_source_location (e
->goto_locus
);
4031 set_curr_insn_block (e
->goto_block
);
4032 e
->goto_locus
= curr_insn_locator ();
4034 e
->goto_block
= NULL
;
4035 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
4037 emit_jump (label_rtx_for_bb (e
->dest
));
4038 e
->flags
&= ~EDGE_FALLTHRU
;
4042 /* Expanded RTL can create a jump in the last instruction of block.
4043 This later might be assumed to be a jump to successor and break edge insertion.
4044 We need to insert dummy move to prevent this. PR41440. */
4045 if (single_succ_p (bb
)
4046 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
4047 && (last
= get_last_insn ())
4050 rtx dummy
= gen_reg_rtx (SImode
);
4051 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
4054 do_pending_stack_adjust ();
4056 /* Find the block tail. The last insn in the block is the insn
4057 before a barrier and/or table jump insn. */
4058 last
= get_last_insn ();
4059 if (BARRIER_P (last
))
4060 last
= PREV_INSN (last
);
4061 if (JUMP_TABLE_DATA_P (last
))
4062 last
= PREV_INSN (PREV_INSN (last
));
4065 update_bb_for_insn (bb
);
4071 /* Create a basic block for initialization code. */
4074 construct_init_block (void)
4076 basic_block init_block
, first_block
;
4080 /* Multiple entry points not supported yet. */
4081 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR
->succs
) == 1);
4082 init_rtl_bb_info (ENTRY_BLOCK_PTR
);
4083 init_rtl_bb_info (EXIT_BLOCK_PTR
);
4084 ENTRY_BLOCK_PTR
->flags
|= BB_RTL
;
4085 EXIT_BLOCK_PTR
->flags
|= BB_RTL
;
4087 e
= EDGE_SUCC (ENTRY_BLOCK_PTR
, 0);
4089 /* When entry edge points to first basic block, we don't need jump,
4090 otherwise we have to jump into proper target. */
4091 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR
->next_bb
)
4093 tree label
= gimple_block_label (e
->dest
);
4095 emit_jump (label_rtx (label
));
4099 flags
= EDGE_FALLTHRU
;
4101 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
4104 init_block
->frequency
= ENTRY_BLOCK_PTR
->frequency
;
4105 init_block
->count
= ENTRY_BLOCK_PTR
->count
;
4108 first_block
= e
->dest
;
4109 redirect_edge_succ (e
, init_block
);
4110 e
= make_edge (init_block
, first_block
, flags
);
4113 e
= make_edge (init_block
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
4114 e
->probability
= REG_BR_PROB_BASE
;
4115 e
->count
= ENTRY_BLOCK_PTR
->count
;
4117 update_bb_for_insn (init_block
);
4121 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4122 found in the block tree. */
4125 set_block_levels (tree block
, int level
)
4129 BLOCK_NUMBER (block
) = level
;
4130 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
4131 block
= BLOCK_CHAIN (block
);
4135 /* Create a block containing landing pads and similar stuff. */
4138 construct_exit_block (void)
4140 rtx head
= get_last_insn ();
4142 basic_block exit_block
;
4146 rtx orig_end
= BB_END (EXIT_BLOCK_PTR
->prev_bb
);
4148 rtl_profile_for_bb (EXIT_BLOCK_PTR
);
4150 /* Make sure the locus is set to the end of the function, so that
4151 epilogue line numbers and warnings are set properly. */
4152 if (cfun
->function_end_locus
!= UNKNOWN_LOCATION
)
4153 input_location
= cfun
->function_end_locus
;
4155 /* The following insns belong to the top scope. */
4156 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
4158 /* Generate rtl for function exit. */
4159 expand_function_end ();
4161 end
= get_last_insn ();
4164 /* While emitting the function end we could move end of the last basic block.
4166 BB_END (EXIT_BLOCK_PTR
->prev_bb
) = orig_end
;
4167 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
4168 head
= NEXT_INSN (head
);
4169 exit_block
= create_basic_block (NEXT_INSN (head
), end
,
4170 EXIT_BLOCK_PTR
->prev_bb
);
4171 exit_block
->frequency
= EXIT_BLOCK_PTR
->frequency
;
4172 exit_block
->count
= EXIT_BLOCK_PTR
->count
;
4175 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR
->preds
))
4177 e
= EDGE_PRED (EXIT_BLOCK_PTR
, ix
);
4178 if (!(e
->flags
& EDGE_ABNORMAL
))
4179 redirect_edge_succ (e
, exit_block
);
4184 e
= make_edge (exit_block
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
4185 e
->probability
= REG_BR_PROB_BASE
;
4186 e
->count
= EXIT_BLOCK_PTR
->count
;
4187 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR
->preds
)
4190 e
->count
-= e2
->count
;
4191 exit_block
->count
-= e2
->count
;
4192 exit_block
->frequency
-= EDGE_FREQUENCY (e2
);
4196 if (exit_block
->count
< 0)
4197 exit_block
->count
= 0;
4198 if (exit_block
->frequency
< 0)
4199 exit_block
->frequency
= 0;
4200 update_bb_for_insn (exit_block
);
4203 /* Helper function for discover_nonconstant_array_refs.
4204 Look for ARRAY_REF nodes with non-constant indexes and mark them
4208 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
4209 void *data ATTRIBUTE_UNUSED
)
4213 if (IS_TYPE_OR_DECL_P (t
))
4215 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
4217 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
4218 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
4219 && (!TREE_OPERAND (t
, 2)
4220 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
4221 || (TREE_CODE (t
) == COMPONENT_REF
4222 && (!TREE_OPERAND (t
,2)
4223 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
4224 || TREE_CODE (t
) == BIT_FIELD_REF
4225 || TREE_CODE (t
) == REALPART_EXPR
4226 || TREE_CODE (t
) == IMAGPART_EXPR
4227 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
4228 || CONVERT_EXPR_P (t
))
4229 t
= TREE_OPERAND (t
, 0);
4231 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
4233 t
= get_base_address (t
);
4235 && DECL_MODE (t
) != BLKmode
)
4236 TREE_ADDRESSABLE (t
) = 1;
4245 /* RTL expansion is not able to compile array references with variable
4246 offsets for arrays stored in single register. Discover such
4247 expressions and mark variables as addressable to avoid this
4251 discover_nonconstant_array_refs (void)
4254 gimple_stmt_iterator gsi
;
4257 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4259 gimple stmt
= gsi_stmt (gsi
);
4260 if (!is_gimple_debug (stmt
))
4261 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
4265 /* This function sets crtl->args.internal_arg_pointer to a virtual
4266 register if DRAP is needed. Local register allocator will replace
4267 virtual_incoming_args_rtx with the virtual register. */
4270 expand_stack_alignment (void)
4273 unsigned int preferred_stack_boundary
;
4275 if (! SUPPORTS_STACK_ALIGNMENT
)
4278 if (cfun
->calls_alloca
4279 || cfun
->has_nonlocal_label
4280 || crtl
->has_nonlocal_goto
)
4281 crtl
->need_drap
= true;
4283 /* Call update_stack_boundary here again to update incoming stack
4284 boundary. It may set incoming stack alignment to a different
4285 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4286 use the minimum incoming stack alignment to check if it is OK
4287 to perform sibcall optimization since sibcall optimization will
4288 only align the outgoing stack to incoming stack boundary. */
4289 if (targetm
.calls
.update_stack_boundary
)
4290 targetm
.calls
.update_stack_boundary ();
4292 /* The incoming stack frame has to be aligned at least at
4293 parm_stack_boundary. */
4294 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
4296 /* Update crtl->stack_alignment_estimated and use it later to align
4297 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4298 exceptions since callgraph doesn't collect incoming stack alignment
4300 if (cfun
->can_throw_non_call_exceptions
4301 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
4302 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
4304 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
4305 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
4306 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
4307 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
4308 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
4310 gcc_assert (crtl
->stack_alignment_needed
4311 <= crtl
->stack_alignment_estimated
);
4313 crtl
->stack_realign_needed
4314 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
4315 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
4317 crtl
->stack_realign_processed
= true;
4319 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4321 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
4322 drap_rtx
= targetm
.calls
.get_drap_rtx ();
4324 /* stack_realign_drap and drap_rtx must match. */
4325 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
4327 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4328 if (NULL
!= drap_rtx
)
4330 crtl
->args
.internal_arg_pointer
= drap_rtx
;
4332 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4334 fixup_tail_calls ();
4338 /* Translate the intermediate representation contained in the CFG
4339 from GIMPLE trees to RTL.
4341 We do conversion per basic block and preserve/update the tree CFG.
4342 This implies we have to do some magic as the CFG can simultaneously
4343 consist of basic blocks containing RTL and GIMPLE trees. This can
4344 confuse the CFG hooks, so be careful to not manipulate CFG during
4348 gimple_expand_cfg (void)
4350 basic_block bb
, init_block
;
4357 timevar_push (TV_OUT_OF_SSA
);
4358 rewrite_out_of_ssa (&SA
);
4359 timevar_pop (TV_OUT_OF_SSA
);
4360 SA
.partition_to_pseudo
= (rtx
*)xcalloc (SA
.map
->num_partitions
,
4363 /* Some backends want to know that we are expanding to RTL. */
4364 currently_expanding_to_rtl
= 1;
4366 rtl_profile_for_bb (ENTRY_BLOCK_PTR
);
4368 insn_locators_alloc ();
4369 if (!DECL_IS_BUILTIN (current_function_decl
))
4371 /* Eventually, all FEs should explicitly set function_start_locus. */
4372 if (cfun
->function_start_locus
== UNKNOWN_LOCATION
)
4373 set_curr_insn_source_location
4374 (DECL_SOURCE_LOCATION (current_function_decl
));
4376 set_curr_insn_source_location (cfun
->function_start_locus
);
4379 set_curr_insn_source_location (UNKNOWN_LOCATION
);
4380 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
4381 prologue_locator
= curr_insn_locator ();
4383 #ifdef INSN_SCHEDULING
4384 init_sched_attrs ();
4387 /* Make sure first insn is a note even if we don't want linenums.
4388 This makes sure the first insn will never be deleted.
4389 Also, final expects a note to appear there. */
4390 emit_note (NOTE_INSN_DELETED
);
4392 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4393 discover_nonconstant_array_refs ();
4395 targetm
.expand_to_rtl_hook ();
4396 crtl
->stack_alignment_needed
= STACK_BOUNDARY
;
4397 crtl
->max_used_stack_slot_alignment
= STACK_BOUNDARY
;
4398 crtl
->stack_alignment_estimated
= 0;
4399 crtl
->preferred_stack_boundary
= STACK_BOUNDARY
;
4400 cfun
->cfg
->max_jumptable_ents
= 0;
4402 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4403 of the function section at exapnsion time to predict distance of calls. */
4404 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
4406 /* Expand the variables recorded during gimple lowering. */
4407 timevar_push (TV_VAR_EXPAND
);
4410 expand_used_vars ();
4412 var_seq
= get_insns ();
4414 timevar_pop (TV_VAR_EXPAND
);
4416 /* Honor stack protection warnings. */
4417 if (warn_stack_protect
)
4419 if (cfun
->calls_alloca
)
4420 warning (OPT_Wstack_protector
,
4421 "stack protector not protecting local variables: "
4422 "variable length buffer");
4423 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
4424 warning (OPT_Wstack_protector
,
4425 "stack protector not protecting function: "
4426 "all local arrays are less than %d bytes long",
4427 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
4430 /* Set up parameters and prepare for return, for the function. */
4431 expand_function_start (current_function_decl
);
4433 /* If we emitted any instructions for setting up the variables,
4434 emit them before the FUNCTION_START note. */
4437 emit_insn_before (var_seq
, parm_birth_insn
);
4439 /* In expand_function_end we'll insert the alloca save/restore
4440 before parm_birth_insn. We've just insertted an alloca call.
4441 Adjust the pointer to match. */
4442 parm_birth_insn
= var_seq
;
4445 /* Now that we also have the parameter RTXs, copy them over to our
4447 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
4449 tree var
= SSA_NAME_VAR (partition_to_var (SA
.map
, i
));
4451 if (TREE_CODE (var
) != VAR_DECL
4452 && !SA
.partition_to_pseudo
[i
])
4453 SA
.partition_to_pseudo
[i
] = DECL_RTL_IF_SET (var
);
4454 gcc_assert (SA
.partition_to_pseudo
[i
]);
4456 /* If this decl was marked as living in multiple places, reset
4457 this now to NULL. */
4458 if (DECL_RTL_IF_SET (var
) == pc_rtx
)
4459 SET_DECL_RTL (var
, NULL
);
4461 /* Some RTL parts really want to look at DECL_RTL(x) when x
4462 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4463 SET_DECL_RTL here making this available, but that would mean
4464 to select one of the potentially many RTLs for one DECL. Instead
4465 of doing that we simply reset the MEM_EXPR of the RTL in question,
4466 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4467 if (!DECL_RTL_SET_P (var
))
4469 if (MEM_P (SA
.partition_to_pseudo
[i
]))
4470 set_mem_expr (SA
.partition_to_pseudo
[i
], NULL
);
4474 /* If we have a class containing differently aligned pointers
4475 we need to merge those into the corresponding RTL pointer
4477 for (i
= 1; i
< num_ssa_names
; i
++)
4479 tree name
= ssa_name (i
);
4484 || !POINTER_TYPE_P (TREE_TYPE (name
))
4485 /* We might have generated new SSA names in
4486 update_alias_info_with_stack_vars. They will have a NULL
4487 defining statements, and won't be part of the partitioning,
4489 || !SSA_NAME_DEF_STMT (name
))
4491 part
= var_to_partition (SA
.map
, name
);
4492 if (part
== NO_PARTITION
)
4494 r
= SA
.partition_to_pseudo
[part
];
4496 mark_reg_pointer (r
, get_pointer_alignment (name
));
4499 /* If this function is `main', emit a call to `__main'
4500 to run global initializers, etc. */
4501 if (DECL_NAME (current_function_decl
)
4502 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
4503 && DECL_FILE_SCOPE_P (current_function_decl
))
4504 expand_main_function ();
4506 /* Initialize the stack_protect_guard field. This must happen after the
4507 call to __main (if any) so that the external decl is initialized. */
4508 if (crtl
->stack_protect_guard
)
4509 stack_protect_prologue ();
4511 expand_phi_nodes (&SA
);
4513 /* Register rtl specific functions for cfg. */
4514 rtl_register_cfg_hooks ();
4516 init_block
= construct_init_block ();
4518 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4519 remaining edges later. */
4520 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
4521 e
->flags
&= ~EDGE_EXECUTABLE
;
4523 lab_rtx_for_bb
= pointer_map_create ();
4524 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
4525 bb
= expand_gimple_basic_block (bb
);
4527 if (MAY_HAVE_DEBUG_INSNS
)
4528 expand_debug_locations ();
4530 execute_free_datastructures ();
4531 timevar_push (TV_OUT_OF_SSA
);
4532 finish_out_of_ssa (&SA
);
4533 timevar_pop (TV_OUT_OF_SSA
);
4535 timevar_push (TV_POST_EXPAND
);
4536 /* We are no longer in SSA form. */
4537 cfun
->gimple_df
->in_ssa_p
= false;
4539 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4540 conservatively to true until they are all profile aware. */
4541 pointer_map_destroy (lab_rtx_for_bb
);
4544 construct_exit_block ();
4545 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
4546 insn_locators_finalize ();
4548 /* Zap the tree EH table. */
4549 set_eh_throw_stmt_table (cfun
, NULL
);
4551 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4552 split edges which edge insertions might do. */
4553 rebuild_jump_labels (get_insns ());
4555 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
4559 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
4563 rebuild_jump_labels_chain (e
->insns
.r
);
4564 /* Avoid putting insns before parm_birth_insn. */
4565 if (e
->src
== ENTRY_BLOCK_PTR
4566 && single_succ_p (ENTRY_BLOCK_PTR
)
4569 rtx insns
= e
->insns
.r
;
4570 e
->insns
.r
= NULL_RTX
;
4571 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
4574 commit_one_edge_insertion (e
);
4581 /* We're done expanding trees to RTL. */
4582 currently_expanding_to_rtl
= 0;
4584 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
4588 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
4590 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4591 e
->flags
&= ~EDGE_EXECUTABLE
;
4593 /* At the moment not all abnormal edges match the RTL
4594 representation. It is safe to remove them here as
4595 find_many_sub_basic_blocks will rediscover them.
4596 In the future we should get this fixed properly. */
4597 if ((e
->flags
& EDGE_ABNORMAL
)
4598 && !(e
->flags
& EDGE_SIBCALL
))
4605 blocks
= sbitmap_alloc (last_basic_block
);
4606 sbitmap_ones (blocks
);
4607 find_many_sub_basic_blocks (blocks
);
4608 sbitmap_free (blocks
);
4609 purge_all_dead_edges ();
4613 expand_stack_alignment ();
4615 #ifdef ENABLE_CHECKING
4616 verify_flow_info ();
4619 /* There's no need to defer outputting this function any more; we
4620 know we want to output it. */
4621 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
4623 /* Now that we're done expanding trees to RTL, we shouldn't have any
4624 more CONCATs anywhere. */
4625 generating_concat_p
= 0;
4630 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4631 /* And the pass manager will dump RTL for us. */
4634 /* If we're emitting a nested function, make sure its parent gets
4635 emitted as well. Doing otherwise confuses debug info. */
4638 for (parent
= DECL_CONTEXT (current_function_decl
);
4639 parent
!= NULL_TREE
;
4640 parent
= get_containing_scope (parent
))
4641 if (TREE_CODE (parent
) == FUNCTION_DECL
)
4642 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
4645 /* We are now committed to emitting code for this function. Do any
4646 preparation, such as emitting abstract debug info for the inline
4647 before it gets mangled by optimization. */
4648 if (cgraph_function_possibly_inlined_p (current_function_decl
))
4649 (*debug_hooks
->outlining_inline_function
) (current_function_decl
);
4651 TREE_ASM_WRITTEN (current_function_decl
) = 1;
4653 /* After expanding, the return labels are no longer needed. */
4654 return_label
= NULL
;
4655 naked_return_label
= NULL
;
4657 /* After expanding, the tm_restart map is no longer needed. */
4658 if (cfun
->gimple_df
->tm_restart
)
4660 htab_delete (cfun
->gimple_df
->tm_restart
);
4661 cfun
->gimple_df
->tm_restart
= NULL
;
4664 /* Tag the blocks with a depth number so that change_scope can find
4665 the common parent easily. */
4666 set_block_levels (DECL_INITIAL (cfun
->decl
), 0);
4667 default_rtl_profile ();
4668 timevar_pop (TV_POST_EXPAND
);
4672 struct rtl_opt_pass pass_expand
=
4676 "expand", /* name */
4678 gimple_expand_cfg
, /* execute */
4681 0, /* static_pass_number */
4682 TV_EXPAND
, /* tv_id */
4683 PROP_ssa
| PROP_gimple_leh
| PROP_cfg
4684 | PROP_gimple_lcx
, /* properties_required */
4685 PROP_rtl
, /* properties_provided */
4686 PROP_ssa
| PROP_trees
, /* properties_destroyed */
4687 TODO_verify_ssa
| TODO_verify_flow
4688 | TODO_verify_stmts
, /* todo_flags_start */
4689 TODO_ggc_collect
/* todo_flags_finish */