cgraph.h: Update copyrights;
[gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
50 #include "cfgloop.h"
51 #include "regs.h" /* For reg_renumber. */
52 #include "integrate.h" /* For emit_initial_value_sets. */
53 #include "insn-attr.h" /* For INSN_SCHEDULING. */
54
55 /* This variable holds information helping the rewriting of SSA trees
56 into RTL. */
57 struct ssaexpand SA;
58
59 /* This variable holds the currently expanded gimple statement for purposes
60 of comminucating the profile info to the builtin expanders. */
61 gimple currently_expanding_gimple_stmt;
62
63 static rtx expand_debug_expr (tree);
64
65 /* Return an expression tree corresponding to the RHS of GIMPLE
66 statement STMT. */
67
68 tree
69 gimple_assign_rhs_to_tree (gimple stmt)
70 {
71 tree t;
72 enum gimple_rhs_class grhs_class;
73
74 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
75
76 if (grhs_class == GIMPLE_TERNARY_RHS)
77 t = build3 (gimple_assign_rhs_code (stmt),
78 TREE_TYPE (gimple_assign_lhs (stmt)),
79 gimple_assign_rhs1 (stmt),
80 gimple_assign_rhs2 (stmt),
81 gimple_assign_rhs3 (stmt));
82 else if (grhs_class == GIMPLE_BINARY_RHS)
83 t = build2 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt),
86 gimple_assign_rhs2 (stmt));
87 else if (grhs_class == GIMPLE_UNARY_RHS)
88 t = build1 (gimple_assign_rhs_code (stmt),
89 TREE_TYPE (gimple_assign_lhs (stmt)),
90 gimple_assign_rhs1 (stmt));
91 else if (grhs_class == GIMPLE_SINGLE_RHS)
92 {
93 t = gimple_assign_rhs1 (stmt);
94 /* Avoid modifying this tree in place below. */
95 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
96 && gimple_location (stmt) != EXPR_LOCATION (t))
97 || (gimple_block (stmt)
98 && currently_expanding_to_rtl
99 && EXPR_P (t)
100 && gimple_block (stmt) != TREE_BLOCK (t)))
101 t = copy_node (t);
102 }
103 else
104 gcc_unreachable ();
105
106 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
107 SET_EXPR_LOCATION (t, gimple_location (stmt));
108 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
109 TREE_BLOCK (t) = gimple_block (stmt);
110
111 return t;
112 }
113
114
115 #ifndef STACK_ALIGNMENT_NEEDED
116 #define STACK_ALIGNMENT_NEEDED 1
117 #endif
118
119 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
120
121 /* Associate declaration T with storage space X. If T is no
122 SSA name this is exactly SET_DECL_RTL, otherwise make the
123 partition of T associated with X. */
124 static inline void
125 set_rtl (tree t, rtx x)
126 {
127 if (TREE_CODE (t) == SSA_NAME)
128 {
129 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
130 if (x && !MEM_P (x))
131 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
132 /* For the benefit of debug information at -O0 (where vartracking
133 doesn't run) record the place also in the base DECL if it's
134 a normal variable (not a parameter). */
135 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
136 {
137 tree var = SSA_NAME_VAR (t);
138 /* If we don't yet have something recorded, just record it now. */
139 if (!DECL_RTL_SET_P (var))
140 SET_DECL_RTL (var, x);
141 /* If we have it set already to "multiple places" don't
142 change this. */
143 else if (DECL_RTL (var) == pc_rtx)
144 ;
145 /* If we have something recorded and it's not the same place
146 as we want to record now, we have multiple partitions for the
147 same base variable, with different places. We can't just
148 randomly chose one, hence we have to say that we don't know.
149 This only happens with optimization, and there var-tracking
150 will figure out the right thing. */
151 else if (DECL_RTL (var) != x)
152 SET_DECL_RTL (var, pc_rtx);
153 }
154 }
155 else
156 SET_DECL_RTL (t, x);
157 }
158
159 /* This structure holds data relevant to one variable that will be
160 placed in a stack slot. */
161 struct stack_var
162 {
163 /* The Variable. */
164 tree decl;
165
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
168 HOST_WIDE_INT size;
169
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
172 unsigned int alignb;
173
174 /* The partition representative. */
175 size_t representative;
176
177 /* The next stack variable in the partition, or EOC. */
178 size_t next;
179
180 /* The numbers of conflicting stack variables. */
181 bitmap conflicts;
182 };
183
184 #define EOC ((size_t)-1)
185
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
190 static struct pointer_map_t *decl_to_stack_part;
191
192 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
193 is non-decreasing. */
194 static size_t *stack_vars_sorted;
195
196 /* The phase of the stack frame. This is the known misalignment of
197 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
198 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
199 static int frame_phase;
200
201 /* Used during expand_used_vars to remember if we saw any decls for
202 which we'd like to enable stack smashing protection. */
203 static bool has_protected_decls;
204
205 /* Used during expand_used_vars. Remember if we say a character buffer
206 smaller than our cutoff threshold. Used for -Wstack-protector. */
207 static bool has_short_buffer;
208
209 /* Compute the byte alignment to use for DECL. Ignore alignment
210 we can't do with expected alignment of the stack boundary. */
211
212 static unsigned int
213 align_local_variable (tree decl)
214 {
215 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
216 DECL_ALIGN (decl) = align;
217 return align / BITS_PER_UNIT;
218 }
219
220 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
221 Return the frame offset. */
222
223 static HOST_WIDE_INT
224 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
225 {
226 HOST_WIDE_INT offset, new_frame_offset;
227
228 new_frame_offset = frame_offset;
229 if (FRAME_GROWS_DOWNWARD)
230 {
231 new_frame_offset -= size + frame_phase;
232 new_frame_offset &= -align;
233 new_frame_offset += frame_phase;
234 offset = new_frame_offset;
235 }
236 else
237 {
238 new_frame_offset -= frame_phase;
239 new_frame_offset += align - 1;
240 new_frame_offset &= -align;
241 new_frame_offset += frame_phase;
242 offset = new_frame_offset;
243 new_frame_offset += size;
244 }
245 frame_offset = new_frame_offset;
246
247 if (frame_offset_overflow (frame_offset, cfun->decl))
248 frame_offset = offset = 0;
249
250 return offset;
251 }
252
253 /* Accumulate DECL into STACK_VARS. */
254
255 static void
256 add_stack_var (tree decl)
257 {
258 struct stack_var *v;
259
260 if (stack_vars_num >= stack_vars_alloc)
261 {
262 if (stack_vars_alloc)
263 stack_vars_alloc = stack_vars_alloc * 3 / 2;
264 else
265 stack_vars_alloc = 32;
266 stack_vars
267 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
268 }
269 if (!decl_to_stack_part)
270 decl_to_stack_part = pointer_map_create ();
271
272 v = &stack_vars[stack_vars_num];
273 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
274
275 v->decl = decl;
276 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
277 /* Ensure that all variables have size, so that &a != &b for any two
278 variables that are simultaneously live. */
279 if (v->size == 0)
280 v->size = 1;
281 v->alignb = align_local_variable (SSAVAR (decl));
282 /* An alignment of zero can mightily confuse us later. */
283 gcc_assert (v->alignb != 0);
284
285 /* All variables are initially in their own partition. */
286 v->representative = stack_vars_num;
287 v->next = EOC;
288
289 /* All variables initially conflict with no other. */
290 v->conflicts = NULL;
291
292 /* Ensure that this decl doesn't get put onto the list twice. */
293 set_rtl (decl, pc_rtx);
294
295 stack_vars_num++;
296 }
297
298 /* Make the decls associated with luid's X and Y conflict. */
299
300 static void
301 add_stack_var_conflict (size_t x, size_t y)
302 {
303 struct stack_var *a = &stack_vars[x];
304 struct stack_var *b = &stack_vars[y];
305 if (!a->conflicts)
306 a->conflicts = BITMAP_ALLOC (NULL);
307 if (!b->conflicts)
308 b->conflicts = BITMAP_ALLOC (NULL);
309 bitmap_set_bit (a->conflicts, y);
310 bitmap_set_bit (b->conflicts, x);
311 }
312
313 /* Check whether the decls associated with luid's X and Y conflict. */
314
315 static bool
316 stack_var_conflict_p (size_t x, size_t y)
317 {
318 struct stack_var *a = &stack_vars[x];
319 struct stack_var *b = &stack_vars[y];
320 if (x == y)
321 return false;
322 /* Partitions containing an SSA name result from gimple registers
323 with things like unsupported modes. They are top-level and
324 hence conflict with everything else. */
325 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
326 return true;
327
328 if (!a->conflicts || !b->conflicts)
329 return false;
330 return bitmap_bit_p (a->conflicts, y);
331 }
332
333 /* Returns true if TYPE is or contains a union type. */
334
335 static bool
336 aggregate_contains_union_type (tree type)
337 {
338 tree field;
339
340 if (TREE_CODE (type) == UNION_TYPE
341 || TREE_CODE (type) == QUAL_UNION_TYPE)
342 return true;
343 if (TREE_CODE (type) == ARRAY_TYPE)
344 return aggregate_contains_union_type (TREE_TYPE (type));
345 if (TREE_CODE (type) != RECORD_TYPE)
346 return false;
347
348 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
349 if (TREE_CODE (field) == FIELD_DECL)
350 if (aggregate_contains_union_type (TREE_TYPE (field)))
351 return true;
352
353 return false;
354 }
355
356 /* A subroutine of expand_used_vars. If two variables X and Y have alias
357 sets that do not conflict, then do add a conflict for these variables
358 in the interference graph. We also need to make sure to add conflicts
359 for union containing structures. Else RTL alias analysis comes along
360 and due to type based aliasing rules decides that for two overlapping
361 union temporaries { short s; int i; } accesses to the same mem through
362 different types may not alias and happily reorders stores across
363 life-time boundaries of the temporaries (See PR25654). */
364
365 static void
366 add_alias_set_conflicts (void)
367 {
368 size_t i, j, n = stack_vars_num;
369
370 for (i = 0; i < n; ++i)
371 {
372 tree type_i = TREE_TYPE (stack_vars[i].decl);
373 bool aggr_i = AGGREGATE_TYPE_P (type_i);
374 bool contains_union;
375
376 contains_union = aggregate_contains_union_type (type_i);
377 for (j = 0; j < i; ++j)
378 {
379 tree type_j = TREE_TYPE (stack_vars[j].decl);
380 bool aggr_j = AGGREGATE_TYPE_P (type_j);
381 if (aggr_i != aggr_j
382 /* Either the objects conflict by means of type based
383 aliasing rules, or we need to add a conflict. */
384 || !objects_must_conflict_p (type_i, type_j)
385 /* In case the types do not conflict ensure that access
386 to elements will conflict. In case of unions we have
387 to be careful as type based aliasing rules may say
388 access to the same memory does not conflict. So play
389 safe and add a conflict in this case when
390 -fstrict-aliasing is used. */
391 || (contains_union && flag_strict_aliasing))
392 add_stack_var_conflict (i, j);
393 }
394 }
395 }
396
397 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
398 enter its partition number into bitmap DATA. */
399
400 static bool
401 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
402 {
403 bitmap active = (bitmap)data;
404 op = get_base_address (op);
405 if (op
406 && DECL_P (op)
407 && DECL_RTL_IF_SET (op) == pc_rtx)
408 {
409 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
410 if (v)
411 bitmap_set_bit (active, *v);
412 }
413 return false;
414 }
415
416 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
417 record conflicts between it and all currently active other partitions
418 from bitmap DATA. */
419
420 static bool
421 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
422 {
423 bitmap active = (bitmap)data;
424 op = get_base_address (op);
425 if (op
426 && DECL_P (op)
427 && DECL_RTL_IF_SET (op) == pc_rtx)
428 {
429 size_t *v =
430 (size_t *) pointer_map_contains (decl_to_stack_part, op);
431 if (v && bitmap_set_bit (active, *v))
432 {
433 size_t num = *v;
434 bitmap_iterator bi;
435 unsigned i;
436 gcc_assert (num < stack_vars_num);
437 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
438 add_stack_var_conflict (num, i);
439 }
440 }
441 return false;
442 }
443
444 /* Helper routine for add_scope_conflicts, calculating the active partitions
445 at the end of BB, leaving the result in WORK. We're called to generate
446 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
447 liveness. */
448
449 static void
450 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
451 {
452 edge e;
453 edge_iterator ei;
454 gimple_stmt_iterator gsi;
455 bool (*visit)(gimple, tree, void *);
456
457 bitmap_clear (work);
458 FOR_EACH_EDGE (e, ei, bb->preds)
459 bitmap_ior_into (work, (bitmap)e->src->aux);
460
461 visit = visit_op;
462
463 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
464 {
465 gimple stmt = gsi_stmt (gsi);
466 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
467 }
468 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
469 {
470 gimple stmt = gsi_stmt (gsi);
471
472 if (gimple_clobber_p (stmt))
473 {
474 tree lhs = gimple_assign_lhs (stmt);
475 size_t *v;
476 /* Nested function lowering might introduce LHSs
477 that are COMPONENT_REFs. */
478 if (TREE_CODE (lhs) != VAR_DECL)
479 continue;
480 if (DECL_RTL_IF_SET (lhs) == pc_rtx
481 && (v = (size_t *)
482 pointer_map_contains (decl_to_stack_part, lhs)))
483 bitmap_clear_bit (work, *v);
484 }
485 else if (!is_gimple_debug (stmt))
486 {
487 if (for_conflict
488 && visit == visit_op)
489 {
490 /* If this is the first real instruction in this BB we need
491 to add conflicts for everything live at this point now.
492 Unlike classical liveness for named objects we can't
493 rely on seeing a def/use of the names we're interested in.
494 There might merely be indirect loads/stores. We'd not add any
495 conflicts for such partitions. */
496 bitmap_iterator bi;
497 unsigned i;
498 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
499 {
500 unsigned j;
501 bitmap_iterator bj;
502 EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
503 add_stack_var_conflict (i, j);
504 }
505 visit = visit_conflict;
506 }
507 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
508 }
509 }
510 }
511
512 /* Generate stack partition conflicts between all partitions that are
513 simultaneously live. */
514
515 static void
516 add_scope_conflicts (void)
517 {
518 basic_block bb;
519 bool changed;
520 bitmap work = BITMAP_ALLOC (NULL);
521
522 /* We approximate the live range of a stack variable by taking the first
523 mention of its name as starting point(s), and by the end-of-scope
524 death clobber added by gimplify as ending point(s) of the range.
525 This overapproximates in the case we for instance moved an address-taken
526 operation upward, without also moving a dereference to it upwards.
527 But it's conservatively correct as a variable never can hold values
528 before its name is mentioned at least once.
529
530 We then do a mostly classical bitmap liveness algorithm. */
531
532 FOR_ALL_BB (bb)
533 bb->aux = BITMAP_ALLOC (NULL);
534
535 changed = true;
536 while (changed)
537 {
538 changed = false;
539 FOR_EACH_BB (bb)
540 {
541 bitmap active = (bitmap)bb->aux;
542 add_scope_conflicts_1 (bb, work, false);
543 if (bitmap_ior_into (active, work))
544 changed = true;
545 }
546 }
547
548 FOR_EACH_BB (bb)
549 add_scope_conflicts_1 (bb, work, true);
550
551 BITMAP_FREE (work);
552 FOR_ALL_BB (bb)
553 BITMAP_FREE (bb->aux);
554 }
555
556 /* A subroutine of partition_stack_vars. A comparison function for qsort,
557 sorting an array of indices by the properties of the object. */
558
559 static int
560 stack_var_cmp (const void *a, const void *b)
561 {
562 size_t ia = *(const size_t *)a;
563 size_t ib = *(const size_t *)b;
564 unsigned int aligna = stack_vars[ia].alignb;
565 unsigned int alignb = stack_vars[ib].alignb;
566 HOST_WIDE_INT sizea = stack_vars[ia].size;
567 HOST_WIDE_INT sizeb = stack_vars[ib].size;
568 tree decla = stack_vars[ia].decl;
569 tree declb = stack_vars[ib].decl;
570 bool largea, largeb;
571 unsigned int uida, uidb;
572
573 /* Primary compare on "large" alignment. Large comes first. */
574 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
575 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
576 if (largea != largeb)
577 return (int)largeb - (int)largea;
578
579 /* Secondary compare on size, decreasing */
580 if (sizea > sizeb)
581 return -1;
582 if (sizea < sizeb)
583 return 1;
584
585 /* Tertiary compare on true alignment, decreasing. */
586 if (aligna < alignb)
587 return -1;
588 if (aligna > alignb)
589 return 1;
590
591 /* Final compare on ID for sort stability, increasing.
592 Two SSA names are compared by their version, SSA names come before
593 non-SSA names, and two normal decls are compared by their DECL_UID. */
594 if (TREE_CODE (decla) == SSA_NAME)
595 {
596 if (TREE_CODE (declb) == SSA_NAME)
597 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
598 else
599 return -1;
600 }
601 else if (TREE_CODE (declb) == SSA_NAME)
602 return 1;
603 else
604 uida = DECL_UID (decla), uidb = DECL_UID (declb);
605 if (uida < uidb)
606 return 1;
607 if (uida > uidb)
608 return -1;
609 return 0;
610 }
611
612
613 /* If the points-to solution *PI points to variables that are in a partition
614 together with other variables add all partition members to the pointed-to
615 variables bitmap. */
616
617 static void
618 add_partitioned_vars_to_ptset (struct pt_solution *pt,
619 struct pointer_map_t *decls_to_partitions,
620 struct pointer_set_t *visited, bitmap temp)
621 {
622 bitmap_iterator bi;
623 unsigned i;
624 bitmap *part;
625
626 if (pt->anything
627 || pt->vars == NULL
628 /* The pointed-to vars bitmap is shared, it is enough to
629 visit it once. */
630 || pointer_set_insert(visited, pt->vars))
631 return;
632
633 bitmap_clear (temp);
634
635 /* By using a temporary bitmap to store all members of the partitions
636 we have to add we make sure to visit each of the partitions only
637 once. */
638 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
639 if ((!temp
640 || !bitmap_bit_p (temp, i))
641 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
642 (void *)(size_t) i)))
643 bitmap_ior_into (temp, *part);
644 if (!bitmap_empty_p (temp))
645 bitmap_ior_into (pt->vars, temp);
646 }
647
648 /* Update points-to sets based on partition info, so we can use them on RTL.
649 The bitmaps representing stack partitions will be saved until expand,
650 where partitioned decls used as bases in memory expressions will be
651 rewritten. */
652
653 static void
654 update_alias_info_with_stack_vars (void)
655 {
656 struct pointer_map_t *decls_to_partitions = NULL;
657 size_t i, j;
658 tree var = NULL_TREE;
659
660 for (i = 0; i < stack_vars_num; i++)
661 {
662 bitmap part = NULL;
663 tree name;
664 struct ptr_info_def *pi;
665
666 /* Not interested in partitions with single variable. */
667 if (stack_vars[i].representative != i
668 || stack_vars[i].next == EOC)
669 continue;
670
671 if (!decls_to_partitions)
672 {
673 decls_to_partitions = pointer_map_create ();
674 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
675 }
676
677 /* Create an SSA_NAME that points to the partition for use
678 as base during alias-oracle queries on RTL for bases that
679 have been partitioned. */
680 if (var == NULL_TREE)
681 var = create_tmp_var (ptr_type_node, NULL);
682 name = make_ssa_name (var, NULL);
683
684 /* Create bitmaps representing partitions. They will be used for
685 points-to sets later, so use GGC alloc. */
686 part = BITMAP_GGC_ALLOC ();
687 for (j = i; j != EOC; j = stack_vars[j].next)
688 {
689 tree decl = stack_vars[j].decl;
690 unsigned int uid = DECL_PT_UID (decl);
691 /* We should never end up partitioning SSA names (though they
692 may end up on the stack). Neither should we allocate stack
693 space to something that is unused and thus unreferenced, except
694 for -O0 where we are preserving even unreferenced variables. */
695 gcc_assert (DECL_P (decl)
696 && (!optimize
697 || referenced_var_lookup (cfun, DECL_UID (decl))));
698 bitmap_set_bit (part, uid);
699 *((bitmap *) pointer_map_insert (decls_to_partitions,
700 (void *)(size_t) uid)) = part;
701 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
702 decl)) = name;
703 }
704
705 /* Make the SSA name point to all partition members. */
706 pi = get_ptr_info (name);
707 pt_solution_set (&pi->pt, part, false);
708 }
709
710 /* Make all points-to sets that contain one member of a partition
711 contain all members of the partition. */
712 if (decls_to_partitions)
713 {
714 unsigned i;
715 struct pointer_set_t *visited = pointer_set_create ();
716 bitmap temp = BITMAP_ALLOC (NULL);
717
718 for (i = 1; i < num_ssa_names; i++)
719 {
720 tree name = ssa_name (i);
721 struct ptr_info_def *pi;
722
723 if (name
724 && POINTER_TYPE_P (TREE_TYPE (name))
725 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
726 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
727 visited, temp);
728 }
729
730 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
731 decls_to_partitions, visited, temp);
732
733 pointer_set_destroy (visited);
734 pointer_map_destroy (decls_to_partitions);
735 BITMAP_FREE (temp);
736 }
737 }
738
739 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
740 partitioning algorithm. Partitions A and B are known to be non-conflicting.
741 Merge them into a single partition A. */
742
743 static void
744 union_stack_vars (size_t a, size_t b)
745 {
746 struct stack_var *vb = &stack_vars[b];
747 bitmap_iterator bi;
748 unsigned u;
749
750 gcc_assert (stack_vars[b].next == EOC);
751 /* Add B to A's partition. */
752 stack_vars[b].next = stack_vars[a].next;
753 stack_vars[b].representative = a;
754 stack_vars[a].next = b;
755
756 /* Update the required alignment of partition A to account for B. */
757 if (stack_vars[a].alignb < stack_vars[b].alignb)
758 stack_vars[a].alignb = stack_vars[b].alignb;
759
760 /* Update the interference graph and merge the conflicts. */
761 if (vb->conflicts)
762 {
763 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
764 add_stack_var_conflict (a, stack_vars[u].representative);
765 BITMAP_FREE (vb->conflicts);
766 }
767 }
768
769 /* A subroutine of expand_used_vars. Binpack the variables into
770 partitions constrained by the interference graph. The overall
771 algorithm used is as follows:
772
773 Sort the objects by size in descending order.
774 For each object A {
775 S = size(A)
776 O = 0
777 loop {
778 Look for the largest non-conflicting object B with size <= S.
779 UNION (A, B)
780 }
781 }
782 */
783
784 static void
785 partition_stack_vars (void)
786 {
787 size_t si, sj, n = stack_vars_num;
788
789 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
790 for (si = 0; si < n; ++si)
791 stack_vars_sorted[si] = si;
792
793 if (n == 1)
794 return;
795
796 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
797
798 for (si = 0; si < n; ++si)
799 {
800 size_t i = stack_vars_sorted[si];
801 unsigned int ialign = stack_vars[i].alignb;
802
803 /* Ignore objects that aren't partition representatives. If we
804 see a var that is not a partition representative, it must
805 have been merged earlier. */
806 if (stack_vars[i].representative != i)
807 continue;
808
809 for (sj = si + 1; sj < n; ++sj)
810 {
811 size_t j = stack_vars_sorted[sj];
812 unsigned int jalign = stack_vars[j].alignb;
813
814 /* Ignore objects that aren't partition representatives. */
815 if (stack_vars[j].representative != j)
816 continue;
817
818 /* Ignore conflicting objects. */
819 if (stack_var_conflict_p (i, j))
820 continue;
821
822 /* Do not mix objects of "small" (supported) alignment
823 and "large" (unsupported) alignment. */
824 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
825 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
826 continue;
827
828 /* UNION the objects, placing J at OFFSET. */
829 union_stack_vars (i, j);
830 }
831 }
832
833 update_alias_info_with_stack_vars ();
834 }
835
836 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
837
838 static void
839 dump_stack_var_partition (void)
840 {
841 size_t si, i, j, n = stack_vars_num;
842
843 for (si = 0; si < n; ++si)
844 {
845 i = stack_vars_sorted[si];
846
847 /* Skip variables that aren't partition representatives, for now. */
848 if (stack_vars[i].representative != i)
849 continue;
850
851 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
852 " align %u\n", (unsigned long) i, stack_vars[i].size,
853 stack_vars[i].alignb);
854
855 for (j = i; j != EOC; j = stack_vars[j].next)
856 {
857 fputc ('\t', dump_file);
858 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
859 }
860 fputc ('\n', dump_file);
861 }
862 }
863
864 /* Assign rtl to DECL at BASE + OFFSET. */
865
866 static void
867 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
868 HOST_WIDE_INT offset)
869 {
870 unsigned align;
871 rtx x;
872
873 /* If this fails, we've overflowed the stack frame. Error nicely? */
874 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
875
876 x = plus_constant (base, offset);
877 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
878
879 if (TREE_CODE (decl) != SSA_NAME)
880 {
881 /* Set alignment we actually gave this decl if it isn't an SSA name.
882 If it is we generate stack slots only accidentally so it isn't as
883 important, we'll simply use the alignment that is already set. */
884 if (base == virtual_stack_vars_rtx)
885 offset -= frame_phase;
886 align = offset & -offset;
887 align *= BITS_PER_UNIT;
888 if (align == 0 || align > base_align)
889 align = base_align;
890
891 /* One would think that we could assert that we're not decreasing
892 alignment here, but (at least) the i386 port does exactly this
893 via the MINIMUM_ALIGNMENT hook. */
894
895 DECL_ALIGN (decl) = align;
896 DECL_USER_ALIGN (decl) = 0;
897 }
898
899 set_mem_attributes (x, SSAVAR (decl), true);
900 set_rtl (decl, x);
901 }
902
903 /* A subroutine of expand_used_vars. Give each partition representative
904 a unique location within the stack frame. Update each partition member
905 with that location. */
906
907 static void
908 expand_stack_vars (bool (*pred) (tree))
909 {
910 size_t si, i, j, n = stack_vars_num;
911 HOST_WIDE_INT large_size = 0, large_alloc = 0;
912 rtx large_base = NULL;
913 unsigned large_align = 0;
914 tree decl;
915
916 /* Determine if there are any variables requiring "large" alignment.
917 Since these are dynamically allocated, we only process these if
918 no predicate involved. */
919 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
920 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
921 {
922 /* Find the total size of these variables. */
923 for (si = 0; si < n; ++si)
924 {
925 unsigned alignb;
926
927 i = stack_vars_sorted[si];
928 alignb = stack_vars[i].alignb;
929
930 /* Stop when we get to the first decl with "small" alignment. */
931 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
932 break;
933
934 /* Skip variables that aren't partition representatives. */
935 if (stack_vars[i].representative != i)
936 continue;
937
938 /* Skip variables that have already had rtl assigned. See also
939 add_stack_var where we perpetrate this pc_rtx hack. */
940 decl = stack_vars[i].decl;
941 if ((TREE_CODE (decl) == SSA_NAME
942 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
943 : DECL_RTL (decl)) != pc_rtx)
944 continue;
945
946 large_size += alignb - 1;
947 large_size &= -(HOST_WIDE_INT)alignb;
948 large_size += stack_vars[i].size;
949 }
950
951 /* If there were any, allocate space. */
952 if (large_size > 0)
953 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
954 large_align, true);
955 }
956
957 for (si = 0; si < n; ++si)
958 {
959 rtx base;
960 unsigned base_align, alignb;
961 HOST_WIDE_INT offset;
962
963 i = stack_vars_sorted[si];
964
965 /* Skip variables that aren't partition representatives, for now. */
966 if (stack_vars[i].representative != i)
967 continue;
968
969 /* Skip variables that have already had rtl assigned. See also
970 add_stack_var where we perpetrate this pc_rtx hack. */
971 decl = stack_vars[i].decl;
972 if ((TREE_CODE (decl) == SSA_NAME
973 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
974 : DECL_RTL (decl)) != pc_rtx)
975 continue;
976
977 /* Check the predicate to see whether this variable should be
978 allocated in this pass. */
979 if (pred && !pred (decl))
980 continue;
981
982 alignb = stack_vars[i].alignb;
983 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
984 {
985 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
986 base = virtual_stack_vars_rtx;
987 base_align = crtl->max_used_stack_slot_alignment;
988 }
989 else
990 {
991 /* Large alignment is only processed in the last pass. */
992 if (pred)
993 continue;
994 gcc_assert (large_base != NULL);
995
996 large_alloc += alignb - 1;
997 large_alloc &= -(HOST_WIDE_INT)alignb;
998 offset = large_alloc;
999 large_alloc += stack_vars[i].size;
1000
1001 base = large_base;
1002 base_align = large_align;
1003 }
1004
1005 /* Create rtl for each variable based on their location within the
1006 partition. */
1007 for (j = i; j != EOC; j = stack_vars[j].next)
1008 {
1009 expand_one_stack_var_at (stack_vars[j].decl,
1010 base, base_align,
1011 offset);
1012 }
1013 }
1014
1015 gcc_assert (large_alloc == large_size);
1016 }
1017
1018 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1019 static HOST_WIDE_INT
1020 account_stack_vars (void)
1021 {
1022 size_t si, j, i, n = stack_vars_num;
1023 HOST_WIDE_INT size = 0;
1024
1025 for (si = 0; si < n; ++si)
1026 {
1027 i = stack_vars_sorted[si];
1028
1029 /* Skip variables that aren't partition representatives, for now. */
1030 if (stack_vars[i].representative != i)
1031 continue;
1032
1033 size += stack_vars[i].size;
1034 for (j = i; j != EOC; j = stack_vars[j].next)
1035 set_rtl (stack_vars[j].decl, NULL);
1036 }
1037 return size;
1038 }
1039
1040 /* A subroutine of expand_one_var. Called to immediately assign rtl
1041 to a variable to be allocated in the stack frame. */
1042
1043 static void
1044 expand_one_stack_var (tree var)
1045 {
1046 HOST_WIDE_INT size, offset;
1047 unsigned byte_align;
1048
1049 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1050 byte_align = align_local_variable (SSAVAR (var));
1051
1052 /* We handle highly aligned variables in expand_stack_vars. */
1053 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1054
1055 offset = alloc_stack_frame_space (size, byte_align);
1056
1057 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1058 crtl->max_used_stack_slot_alignment, offset);
1059 }
1060
1061 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1062 that will reside in a hard register. */
1063
1064 static void
1065 expand_one_hard_reg_var (tree var)
1066 {
1067 rest_of_decl_compilation (var, 0, 0);
1068 }
1069
1070 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1071 that will reside in a pseudo register. */
1072
1073 static void
1074 expand_one_register_var (tree var)
1075 {
1076 tree decl = SSAVAR (var);
1077 tree type = TREE_TYPE (decl);
1078 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1079 rtx x = gen_reg_rtx (reg_mode);
1080
1081 set_rtl (var, x);
1082
1083 /* Note if the object is a user variable. */
1084 if (!DECL_ARTIFICIAL (decl))
1085 mark_user_reg (x);
1086
1087 if (POINTER_TYPE_P (type))
1088 mark_reg_pointer (x, get_pointer_alignment (var));
1089 }
1090
1091 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1092 has some associated error, e.g. its type is error-mark. We just need
1093 to pick something that won't crash the rest of the compiler. */
1094
1095 static void
1096 expand_one_error_var (tree var)
1097 {
1098 enum machine_mode mode = DECL_MODE (var);
1099 rtx x;
1100
1101 if (mode == BLKmode)
1102 x = gen_rtx_MEM (BLKmode, const0_rtx);
1103 else if (mode == VOIDmode)
1104 x = const0_rtx;
1105 else
1106 x = gen_reg_rtx (mode);
1107
1108 SET_DECL_RTL (var, x);
1109 }
1110
1111 /* A subroutine of expand_one_var. VAR is a variable that will be
1112 allocated to the local stack frame. Return true if we wish to
1113 add VAR to STACK_VARS so that it will be coalesced with other
1114 variables. Return false to allocate VAR immediately.
1115
1116 This function is used to reduce the number of variables considered
1117 for coalescing, which reduces the size of the quadratic problem. */
1118
1119 static bool
1120 defer_stack_allocation (tree var, bool toplevel)
1121 {
1122 /* If stack protection is enabled, *all* stack variables must be deferred,
1123 so that we can re-order the strings to the top of the frame. */
1124 if (flag_stack_protect)
1125 return true;
1126
1127 /* We handle "large" alignment via dynamic allocation. We want to handle
1128 this extra complication in only one place, so defer them. */
1129 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1130 return true;
1131
1132 /* Variables in the outermost scope automatically conflict with
1133 every other variable. The only reason to want to defer them
1134 at all is that, after sorting, we can more efficiently pack
1135 small variables in the stack frame. Continue to defer at -O2. */
1136 if (toplevel && optimize < 2)
1137 return false;
1138
1139 /* Without optimization, *most* variables are allocated from the
1140 stack, which makes the quadratic problem large exactly when we
1141 want compilation to proceed as quickly as possible. On the
1142 other hand, we don't want the function's stack frame size to
1143 get completely out of hand. So we avoid adding scalars and
1144 "small" aggregates to the list at all. */
1145 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1146 return false;
1147
1148 return true;
1149 }
1150
1151 /* A subroutine of expand_used_vars. Expand one variable according to
1152 its flavor. Variables to be placed on the stack are not actually
1153 expanded yet, merely recorded.
1154 When REALLY_EXPAND is false, only add stack values to be allocated.
1155 Return stack usage this variable is supposed to take.
1156 */
1157
1158 static HOST_WIDE_INT
1159 expand_one_var (tree var, bool toplevel, bool really_expand)
1160 {
1161 unsigned int align = BITS_PER_UNIT;
1162 tree origvar = var;
1163
1164 var = SSAVAR (var);
1165
1166 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1167 {
1168 /* Because we don't know if VAR will be in register or on stack,
1169 we conservatively assume it will be on stack even if VAR is
1170 eventually put into register after RA pass. For non-automatic
1171 variables, which won't be on stack, we collect alignment of
1172 type and ignore user specified alignment. */
1173 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1174 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1175 TYPE_MODE (TREE_TYPE (var)),
1176 TYPE_ALIGN (TREE_TYPE (var)));
1177 else if (DECL_HAS_VALUE_EXPR_P (var)
1178 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1179 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1180 or variables which were assigned a stack slot already by
1181 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1182 changed from the offset chosen to it. */
1183 align = crtl->stack_alignment_estimated;
1184 else
1185 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1186
1187 /* If the variable alignment is very large we'll dynamicaly allocate
1188 it, which means that in-frame portion is just a pointer. */
1189 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1190 align = POINTER_SIZE;
1191 }
1192
1193 if (SUPPORTS_STACK_ALIGNMENT
1194 && crtl->stack_alignment_estimated < align)
1195 {
1196 /* stack_alignment_estimated shouldn't change after stack
1197 realign decision made */
1198 gcc_assert(!crtl->stack_realign_processed);
1199 crtl->stack_alignment_estimated = align;
1200 }
1201
1202 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1203 So here we only make sure stack_alignment_needed >= align. */
1204 if (crtl->stack_alignment_needed < align)
1205 crtl->stack_alignment_needed = align;
1206 if (crtl->max_used_stack_slot_alignment < align)
1207 crtl->max_used_stack_slot_alignment = align;
1208
1209 if (TREE_CODE (origvar) == SSA_NAME)
1210 {
1211 gcc_assert (TREE_CODE (var) != VAR_DECL
1212 || (!DECL_EXTERNAL (var)
1213 && !DECL_HAS_VALUE_EXPR_P (var)
1214 && !TREE_STATIC (var)
1215 && TREE_TYPE (var) != error_mark_node
1216 && !DECL_HARD_REGISTER (var)
1217 && really_expand));
1218 }
1219 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1220 ;
1221 else if (DECL_EXTERNAL (var))
1222 ;
1223 else if (DECL_HAS_VALUE_EXPR_P (var))
1224 ;
1225 else if (TREE_STATIC (var))
1226 ;
1227 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1228 ;
1229 else if (TREE_TYPE (var) == error_mark_node)
1230 {
1231 if (really_expand)
1232 expand_one_error_var (var);
1233 }
1234 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1235 {
1236 if (really_expand)
1237 expand_one_hard_reg_var (var);
1238 }
1239 else if (use_register_for_decl (var))
1240 {
1241 if (really_expand)
1242 expand_one_register_var (origvar);
1243 }
1244 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1245 {
1246 if (really_expand)
1247 {
1248 error ("size of variable %q+D is too large", var);
1249 expand_one_error_var (var);
1250 }
1251 }
1252 else if (defer_stack_allocation (var, toplevel))
1253 add_stack_var (origvar);
1254 else
1255 {
1256 if (really_expand)
1257 expand_one_stack_var (origvar);
1258 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1259 }
1260 return 0;
1261 }
1262
1263 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1264 expanding variables. Those variables that can be put into registers
1265 are allocated pseudos; those that can't are put on the stack.
1266
1267 TOPLEVEL is true if this is the outermost BLOCK. */
1268
1269 static void
1270 expand_used_vars_for_block (tree block, bool toplevel)
1271 {
1272 tree t;
1273
1274 /* Expand all variables at this level. */
1275 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1276 if (TREE_USED (t)
1277 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1278 || !DECL_NONSHAREABLE (t)))
1279 expand_one_var (t, toplevel, true);
1280
1281 /* Expand all variables at containing levels. */
1282 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1283 expand_used_vars_for_block (t, false);
1284 }
1285
1286 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1287 and clear TREE_USED on all local variables. */
1288
1289 static void
1290 clear_tree_used (tree block)
1291 {
1292 tree t;
1293
1294 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1295 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1296 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1297 || !DECL_NONSHAREABLE (t))
1298 TREE_USED (t) = 0;
1299
1300 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1301 clear_tree_used (t);
1302 }
1303
1304 /* Examine TYPE and determine a bit mask of the following features. */
1305
1306 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1307 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1308 #define SPCT_HAS_ARRAY 4
1309 #define SPCT_HAS_AGGREGATE 8
1310
1311 static unsigned int
1312 stack_protect_classify_type (tree type)
1313 {
1314 unsigned int ret = 0;
1315 tree t;
1316
1317 switch (TREE_CODE (type))
1318 {
1319 case ARRAY_TYPE:
1320 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1321 if (t == char_type_node
1322 || t == signed_char_type_node
1323 || t == unsigned_char_type_node)
1324 {
1325 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1326 unsigned HOST_WIDE_INT len;
1327
1328 if (!TYPE_SIZE_UNIT (type)
1329 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1330 len = max;
1331 else
1332 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1333
1334 if (len < max)
1335 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1336 else
1337 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1338 }
1339 else
1340 ret = SPCT_HAS_ARRAY;
1341 break;
1342
1343 case UNION_TYPE:
1344 case QUAL_UNION_TYPE:
1345 case RECORD_TYPE:
1346 ret = SPCT_HAS_AGGREGATE;
1347 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1348 if (TREE_CODE (t) == FIELD_DECL)
1349 ret |= stack_protect_classify_type (TREE_TYPE (t));
1350 break;
1351
1352 default:
1353 break;
1354 }
1355
1356 return ret;
1357 }
1358
1359 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1360 part of the local stack frame. Remember if we ever return nonzero for
1361 any variable in this function. The return value is the phase number in
1362 which the variable should be allocated. */
1363
1364 static int
1365 stack_protect_decl_phase (tree decl)
1366 {
1367 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1368 int ret = 0;
1369
1370 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1371 has_short_buffer = true;
1372
1373 if (flag_stack_protect == 2)
1374 {
1375 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1376 && !(bits & SPCT_HAS_AGGREGATE))
1377 ret = 1;
1378 else if (bits & SPCT_HAS_ARRAY)
1379 ret = 2;
1380 }
1381 else
1382 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1383
1384 if (ret)
1385 has_protected_decls = true;
1386
1387 return ret;
1388 }
1389
1390 /* Two helper routines that check for phase 1 and phase 2. These are used
1391 as callbacks for expand_stack_vars. */
1392
1393 static bool
1394 stack_protect_decl_phase_1 (tree decl)
1395 {
1396 return stack_protect_decl_phase (decl) == 1;
1397 }
1398
1399 static bool
1400 stack_protect_decl_phase_2 (tree decl)
1401 {
1402 return stack_protect_decl_phase (decl) == 2;
1403 }
1404
1405 /* Ensure that variables in different stack protection phases conflict
1406 so that they are not merged and share the same stack slot. */
1407
1408 static void
1409 add_stack_protection_conflicts (void)
1410 {
1411 size_t i, j, n = stack_vars_num;
1412 unsigned char *phase;
1413
1414 phase = XNEWVEC (unsigned char, n);
1415 for (i = 0; i < n; ++i)
1416 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1417
1418 for (i = 0; i < n; ++i)
1419 {
1420 unsigned char ph_i = phase[i];
1421 for (j = 0; j < i; ++j)
1422 if (ph_i != phase[j])
1423 add_stack_var_conflict (i, j);
1424 }
1425
1426 XDELETEVEC (phase);
1427 }
1428
1429 /* Create a decl for the guard at the top of the stack frame. */
1430
1431 static void
1432 create_stack_guard (void)
1433 {
1434 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1435 VAR_DECL, NULL, ptr_type_node);
1436 TREE_THIS_VOLATILE (guard) = 1;
1437 TREE_USED (guard) = 1;
1438 expand_one_stack_var (guard);
1439 crtl->stack_protect_guard = guard;
1440 }
1441
1442 /* Prepare for expanding variables. */
1443 static void
1444 init_vars_expansion (void)
1445 {
1446 tree t;
1447 unsigned ix;
1448 /* Set TREE_USED on all variables in the local_decls. */
1449 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1450 TREE_USED (t) = 1;
1451
1452 /* Clear TREE_USED on all variables associated with a block scope. */
1453 clear_tree_used (DECL_INITIAL (current_function_decl));
1454
1455 /* Initialize local stack smashing state. */
1456 has_protected_decls = false;
1457 has_short_buffer = false;
1458 }
1459
1460 /* Free up stack variable graph data. */
1461 static void
1462 fini_vars_expansion (void)
1463 {
1464 size_t i, n = stack_vars_num;
1465 for (i = 0; i < n; i++)
1466 BITMAP_FREE (stack_vars[i].conflicts);
1467 XDELETEVEC (stack_vars);
1468 XDELETEVEC (stack_vars_sorted);
1469 stack_vars = NULL;
1470 stack_vars_alloc = stack_vars_num = 0;
1471 pointer_map_destroy (decl_to_stack_part);
1472 decl_to_stack_part = NULL;
1473 }
1474
1475 /* Make a fair guess for the size of the stack frame of the function
1476 in NODE. This doesn't have to be exact, the result is only used in
1477 the inline heuristics. So we don't want to run the full stack var
1478 packing algorithm (which is quadratic in the number of stack vars).
1479 Instead, we calculate the total size of all stack vars. This turns
1480 out to be a pretty fair estimate -- packing of stack vars doesn't
1481 happen very often. */
1482
1483 HOST_WIDE_INT
1484 estimated_stack_frame_size (struct cgraph_node *node)
1485 {
1486 HOST_WIDE_INT size = 0;
1487 size_t i;
1488 tree var;
1489 tree old_cur_fun_decl = current_function_decl;
1490 referenced_var_iterator rvi;
1491 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
1492
1493 current_function_decl = node->symbol.decl;
1494 push_cfun (fn);
1495
1496 gcc_checking_assert (gimple_referenced_vars (fn));
1497 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1498 size += expand_one_var (var, true, false);
1499
1500 if (stack_vars_num > 0)
1501 {
1502 /* Fake sorting the stack vars for account_stack_vars (). */
1503 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1504 for (i = 0; i < stack_vars_num; ++i)
1505 stack_vars_sorted[i] = i;
1506 size += account_stack_vars ();
1507 fini_vars_expansion ();
1508 }
1509 pop_cfun ();
1510 current_function_decl = old_cur_fun_decl;
1511 return size;
1512 }
1513
1514 /* Expand all variables used in the function. */
1515
1516 static void
1517 expand_used_vars (void)
1518 {
1519 tree var, outer_block = DECL_INITIAL (current_function_decl);
1520 VEC(tree,heap) *maybe_local_decls = NULL;
1521 unsigned i;
1522 unsigned len;
1523
1524 /* Compute the phase of the stack frame for this function. */
1525 {
1526 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1527 int off = STARTING_FRAME_OFFSET % align;
1528 frame_phase = off ? align - off : 0;
1529 }
1530
1531 init_vars_expansion ();
1532
1533 for (i = 0; i < SA.map->num_partitions; i++)
1534 {
1535 tree var = partition_to_var (SA.map, i);
1536
1537 gcc_assert (is_gimple_reg (var));
1538 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1539 expand_one_var (var, true, true);
1540 else
1541 {
1542 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1543 contain the default def (representing the parm or result itself)
1544 we don't do anything here. But those which don't contain the
1545 default def (representing a temporary based on the parm/result)
1546 we need to allocate space just like for normal VAR_DECLs. */
1547 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1548 {
1549 expand_one_var (var, true, true);
1550 gcc_assert (SA.partition_to_pseudo[i]);
1551 }
1552 }
1553 }
1554
1555 /* At this point all variables on the local_decls with TREE_USED
1556 set are not associated with any block scope. Lay them out. */
1557
1558 len = VEC_length (tree, cfun->local_decls);
1559 FOR_EACH_LOCAL_DECL (cfun, i, var)
1560 {
1561 bool expand_now = false;
1562
1563 /* Expanded above already. */
1564 if (is_gimple_reg (var))
1565 {
1566 TREE_USED (var) = 0;
1567 goto next;
1568 }
1569 /* We didn't set a block for static or extern because it's hard
1570 to tell the difference between a global variable (re)declared
1571 in a local scope, and one that's really declared there to
1572 begin with. And it doesn't really matter much, since we're
1573 not giving them stack space. Expand them now. */
1574 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1575 expand_now = true;
1576
1577 /* If the variable is not associated with any block, then it
1578 was created by the optimizers, and could be live anywhere
1579 in the function. */
1580 else if (TREE_USED (var))
1581 expand_now = true;
1582
1583 /* Finally, mark all variables on the list as used. We'll use
1584 this in a moment when we expand those associated with scopes. */
1585 TREE_USED (var) = 1;
1586
1587 if (expand_now)
1588 expand_one_var (var, true, true);
1589
1590 next:
1591 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1592 {
1593 rtx rtl = DECL_RTL_IF_SET (var);
1594
1595 /* Keep artificial non-ignored vars in cfun->local_decls
1596 chain until instantiate_decls. */
1597 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1598 add_local_decl (cfun, var);
1599 else if (rtl == NULL_RTX)
1600 /* If rtl isn't set yet, which can happen e.g. with
1601 -fstack-protector, retry before returning from this
1602 function. */
1603 VEC_safe_push (tree, heap, maybe_local_decls, var);
1604 }
1605 }
1606
1607 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1608
1609 +-----------------+-----------------+
1610 | ...processed... | ...duplicates...|
1611 +-----------------+-----------------+
1612 ^
1613 +-- LEN points here.
1614
1615 We just want the duplicates, as those are the artificial
1616 non-ignored vars that we want to keep until instantiate_decls.
1617 Move them down and truncate the array. */
1618 if (!VEC_empty (tree, cfun->local_decls))
1619 VEC_block_remove (tree, cfun->local_decls, 0, len);
1620
1621 /* At this point, all variables within the block tree with TREE_USED
1622 set are actually used by the optimized function. Lay them out. */
1623 expand_used_vars_for_block (outer_block, true);
1624
1625 if (stack_vars_num > 0)
1626 {
1627 add_scope_conflicts ();
1628 /* Due to the way alias sets work, no variables with non-conflicting
1629 alias sets may be assigned the same address. Add conflicts to
1630 reflect this. */
1631 add_alias_set_conflicts ();
1632
1633 /* If stack protection is enabled, we don't share space between
1634 vulnerable data and non-vulnerable data. */
1635 if (flag_stack_protect)
1636 add_stack_protection_conflicts ();
1637
1638 /* Now that we have collected all stack variables, and have computed a
1639 minimal interference graph, attempt to save some stack space. */
1640 partition_stack_vars ();
1641 if (dump_file)
1642 dump_stack_var_partition ();
1643 }
1644
1645 /* There are several conditions under which we should create a
1646 stack guard: protect-all, alloca used, protected decls present. */
1647 if (flag_stack_protect == 2
1648 || (flag_stack_protect
1649 && (cfun->calls_alloca || has_protected_decls)))
1650 create_stack_guard ();
1651
1652 /* Assign rtl to each variable based on these partitions. */
1653 if (stack_vars_num > 0)
1654 {
1655 /* Reorder decls to be protected by iterating over the variables
1656 array multiple times, and allocating out of each phase in turn. */
1657 /* ??? We could probably integrate this into the qsort we did
1658 earlier, such that we naturally see these variables first,
1659 and thus naturally allocate things in the right order. */
1660 if (has_protected_decls)
1661 {
1662 /* Phase 1 contains only character arrays. */
1663 expand_stack_vars (stack_protect_decl_phase_1);
1664
1665 /* Phase 2 contains other kinds of arrays. */
1666 if (flag_stack_protect == 2)
1667 expand_stack_vars (stack_protect_decl_phase_2);
1668 }
1669
1670 expand_stack_vars (NULL);
1671
1672 fini_vars_expansion ();
1673 }
1674
1675 /* If there were any artificial non-ignored vars without rtl
1676 found earlier, see if deferred stack allocation hasn't assigned
1677 rtl to them. */
1678 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1679 {
1680 rtx rtl = DECL_RTL_IF_SET (var);
1681
1682 /* Keep artificial non-ignored vars in cfun->local_decls
1683 chain until instantiate_decls. */
1684 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1685 add_local_decl (cfun, var);
1686 }
1687 VEC_free (tree, heap, maybe_local_decls);
1688
1689 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1690 if (STACK_ALIGNMENT_NEEDED)
1691 {
1692 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1693 if (!FRAME_GROWS_DOWNWARD)
1694 frame_offset += align - 1;
1695 frame_offset &= -align;
1696 }
1697 }
1698
1699
1700 /* If we need to produce a detailed dump, print the tree representation
1701 for STMT to the dump file. SINCE is the last RTX after which the RTL
1702 generated for STMT should have been appended. */
1703
1704 static void
1705 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1706 {
1707 if (dump_file && (dump_flags & TDF_DETAILS))
1708 {
1709 fprintf (dump_file, "\n;; ");
1710 print_gimple_stmt (dump_file, stmt, 0,
1711 TDF_SLIM | (dump_flags & TDF_LINENO));
1712 fprintf (dump_file, "\n");
1713
1714 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1715 }
1716 }
1717
1718 /* Maps the blocks that do not contain tree labels to rtx labels. */
1719
1720 static struct pointer_map_t *lab_rtx_for_bb;
1721
1722 /* Returns the label_rtx expression for a label starting basic block BB. */
1723
1724 static rtx
1725 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1726 {
1727 gimple_stmt_iterator gsi;
1728 tree lab;
1729 gimple lab_stmt;
1730 void **elt;
1731
1732 if (bb->flags & BB_RTL)
1733 return block_label (bb);
1734
1735 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1736 if (elt)
1737 return (rtx) *elt;
1738
1739 /* Find the tree label if it is present. */
1740
1741 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1742 {
1743 lab_stmt = gsi_stmt (gsi);
1744 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1745 break;
1746
1747 lab = gimple_label_label (lab_stmt);
1748 if (DECL_NONLOCAL (lab))
1749 break;
1750
1751 return label_rtx (lab);
1752 }
1753
1754 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1755 *elt = gen_label_rtx ();
1756 return (rtx) *elt;
1757 }
1758
1759
1760 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1761 of a basic block where we just expanded the conditional at the end,
1762 possibly clean up the CFG and instruction sequence. LAST is the
1763 last instruction before the just emitted jump sequence. */
1764
1765 static void
1766 maybe_cleanup_end_of_block (edge e, rtx last)
1767 {
1768 /* Special case: when jumpif decides that the condition is
1769 trivial it emits an unconditional jump (and the necessary
1770 barrier). But we still have two edges, the fallthru one is
1771 wrong. purge_dead_edges would clean this up later. Unfortunately
1772 we have to insert insns (and split edges) before
1773 find_many_sub_basic_blocks and hence before purge_dead_edges.
1774 But splitting edges might create new blocks which depend on the
1775 fact that if there are two edges there's no barrier. So the
1776 barrier would get lost and verify_flow_info would ICE. Instead
1777 of auditing all edge splitters to care for the barrier (which
1778 normally isn't there in a cleaned CFG), fix it here. */
1779 if (BARRIER_P (get_last_insn ()))
1780 {
1781 rtx insn;
1782 remove_edge (e);
1783 /* Now, we have a single successor block, if we have insns to
1784 insert on the remaining edge we potentially will insert
1785 it at the end of this block (if the dest block isn't feasible)
1786 in order to avoid splitting the edge. This insertion will take
1787 place in front of the last jump. But we might have emitted
1788 multiple jumps (conditional and one unconditional) to the
1789 same destination. Inserting in front of the last one then
1790 is a problem. See PR 40021. We fix this by deleting all
1791 jumps except the last unconditional one. */
1792 insn = PREV_INSN (get_last_insn ());
1793 /* Make sure we have an unconditional jump. Otherwise we're
1794 confused. */
1795 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1796 for (insn = PREV_INSN (insn); insn != last;)
1797 {
1798 insn = PREV_INSN (insn);
1799 if (JUMP_P (NEXT_INSN (insn)))
1800 {
1801 if (!any_condjump_p (NEXT_INSN (insn)))
1802 {
1803 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1804 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1805 }
1806 delete_insn (NEXT_INSN (insn));
1807 }
1808 }
1809 }
1810 }
1811
1812 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1813 Returns a new basic block if we've terminated the current basic
1814 block and created a new one. */
1815
1816 static basic_block
1817 expand_gimple_cond (basic_block bb, gimple stmt)
1818 {
1819 basic_block new_bb, dest;
1820 edge new_edge;
1821 edge true_edge;
1822 edge false_edge;
1823 rtx last2, last;
1824 enum tree_code code;
1825 tree op0, op1;
1826
1827 code = gimple_cond_code (stmt);
1828 op0 = gimple_cond_lhs (stmt);
1829 op1 = gimple_cond_rhs (stmt);
1830 /* We're sometimes presented with such code:
1831 D.123_1 = x < y;
1832 if (D.123_1 != 0)
1833 ...
1834 This would expand to two comparisons which then later might
1835 be cleaned up by combine. But some pattern matchers like if-conversion
1836 work better when there's only one compare, so make up for this
1837 here as special exception if TER would have made the same change. */
1838 if (gimple_cond_single_var_p (stmt)
1839 && SA.values
1840 && TREE_CODE (op0) == SSA_NAME
1841 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1842 {
1843 gimple second = SSA_NAME_DEF_STMT (op0);
1844 if (gimple_code (second) == GIMPLE_ASSIGN)
1845 {
1846 enum tree_code code2 = gimple_assign_rhs_code (second);
1847 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1848 {
1849 code = code2;
1850 op0 = gimple_assign_rhs1 (second);
1851 op1 = gimple_assign_rhs2 (second);
1852 }
1853 /* If jumps are cheap turn some more codes into
1854 jumpy sequences. */
1855 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1856 {
1857 if ((code2 == BIT_AND_EXPR
1858 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1859 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1860 || code2 == TRUTH_AND_EXPR)
1861 {
1862 code = TRUTH_ANDIF_EXPR;
1863 op0 = gimple_assign_rhs1 (second);
1864 op1 = gimple_assign_rhs2 (second);
1865 }
1866 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1867 {
1868 code = TRUTH_ORIF_EXPR;
1869 op0 = gimple_assign_rhs1 (second);
1870 op1 = gimple_assign_rhs2 (second);
1871 }
1872 }
1873 }
1874 }
1875
1876 last2 = last = get_last_insn ();
1877
1878 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1879 set_curr_insn_source_location (gimple_location (stmt));
1880 set_curr_insn_block (gimple_block (stmt));
1881
1882 /* These flags have no purpose in RTL land. */
1883 true_edge->flags &= ~EDGE_TRUE_VALUE;
1884 false_edge->flags &= ~EDGE_FALSE_VALUE;
1885
1886 /* We can either have a pure conditional jump with one fallthru edge or
1887 two-way jump that needs to be decomposed into two basic blocks. */
1888 if (false_edge->dest == bb->next_bb)
1889 {
1890 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1891 true_edge->probability);
1892 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1893 if (true_edge->goto_locus)
1894 {
1895 set_curr_insn_source_location (true_edge->goto_locus);
1896 set_curr_insn_block (true_edge->goto_block);
1897 true_edge->goto_locus = curr_insn_locator ();
1898 }
1899 true_edge->goto_block = NULL;
1900 false_edge->flags |= EDGE_FALLTHRU;
1901 maybe_cleanup_end_of_block (false_edge, last);
1902 return NULL;
1903 }
1904 if (true_edge->dest == bb->next_bb)
1905 {
1906 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1907 false_edge->probability);
1908 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1909 if (false_edge->goto_locus)
1910 {
1911 set_curr_insn_source_location (false_edge->goto_locus);
1912 set_curr_insn_block (false_edge->goto_block);
1913 false_edge->goto_locus = curr_insn_locator ();
1914 }
1915 false_edge->goto_block = NULL;
1916 true_edge->flags |= EDGE_FALLTHRU;
1917 maybe_cleanup_end_of_block (true_edge, last);
1918 return NULL;
1919 }
1920
1921 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1922 true_edge->probability);
1923 last = get_last_insn ();
1924 if (false_edge->goto_locus)
1925 {
1926 set_curr_insn_source_location (false_edge->goto_locus);
1927 set_curr_insn_block (false_edge->goto_block);
1928 false_edge->goto_locus = curr_insn_locator ();
1929 }
1930 false_edge->goto_block = NULL;
1931 emit_jump (label_rtx_for_bb (false_edge->dest));
1932
1933 BB_END (bb) = last;
1934 if (BARRIER_P (BB_END (bb)))
1935 BB_END (bb) = PREV_INSN (BB_END (bb));
1936 update_bb_for_insn (bb);
1937
1938 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1939 dest = false_edge->dest;
1940 redirect_edge_succ (false_edge, new_bb);
1941 false_edge->flags |= EDGE_FALLTHRU;
1942 new_bb->count = false_edge->count;
1943 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1944 if (current_loops && bb->loop_father)
1945 add_bb_to_loop (new_bb, bb->loop_father);
1946 new_edge = make_edge (new_bb, dest, 0);
1947 new_edge->probability = REG_BR_PROB_BASE;
1948 new_edge->count = new_bb->count;
1949 if (BARRIER_P (BB_END (new_bb)))
1950 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1951 update_bb_for_insn (new_bb);
1952
1953 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1954
1955 if (true_edge->goto_locus)
1956 {
1957 set_curr_insn_source_location (true_edge->goto_locus);
1958 set_curr_insn_block (true_edge->goto_block);
1959 true_edge->goto_locus = curr_insn_locator ();
1960 }
1961 true_edge->goto_block = NULL;
1962
1963 return new_bb;
1964 }
1965
1966 /* Mark all calls that can have a transaction restart. */
1967
1968 static void
1969 mark_transaction_restart_calls (gimple stmt)
1970 {
1971 struct tm_restart_node dummy;
1972 void **slot;
1973
1974 if (!cfun->gimple_df->tm_restart)
1975 return;
1976
1977 dummy.stmt = stmt;
1978 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1979 if (slot)
1980 {
1981 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1982 tree list = n->label_or_list;
1983 rtx insn;
1984
1985 for (insn = next_real_insn (get_last_insn ());
1986 !CALL_P (insn);
1987 insn = next_real_insn (insn))
1988 continue;
1989
1990 if (TREE_CODE (list) == LABEL_DECL)
1991 add_reg_note (insn, REG_TM, label_rtx (list));
1992 else
1993 for (; list ; list = TREE_CHAIN (list))
1994 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1995 }
1996 }
1997
1998 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1999 statement STMT. */
2000
2001 static void
2002 expand_call_stmt (gimple stmt)
2003 {
2004 tree exp, decl, lhs;
2005 bool builtin_p;
2006 size_t i;
2007
2008 if (gimple_call_internal_p (stmt))
2009 {
2010 expand_internal_call (stmt);
2011 return;
2012 }
2013
2014 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2015
2016 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2017 decl = gimple_call_fndecl (stmt);
2018 builtin_p = decl && DECL_BUILT_IN (decl);
2019
2020 /* If this is not a builtin function, the function type through which the
2021 call is made may be different from the type of the function. */
2022 if (!builtin_p)
2023 CALL_EXPR_FN (exp)
2024 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2025 CALL_EXPR_FN (exp));
2026
2027 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2028 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2029
2030 for (i = 0; i < gimple_call_num_args (stmt); i++)
2031 {
2032 tree arg = gimple_call_arg (stmt, i);
2033 gimple def;
2034 /* TER addresses into arguments of builtin functions so we have a
2035 chance to infer more correct alignment information. See PR39954. */
2036 if (builtin_p
2037 && TREE_CODE (arg) == SSA_NAME
2038 && (def = get_gimple_for_ssa_name (arg))
2039 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2040 arg = gimple_assign_rhs1 (def);
2041 CALL_EXPR_ARG (exp, i) = arg;
2042 }
2043
2044 if (gimple_has_side_effects (stmt))
2045 TREE_SIDE_EFFECTS (exp) = 1;
2046
2047 if (gimple_call_nothrow_p (stmt))
2048 TREE_NOTHROW (exp) = 1;
2049
2050 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2051 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2052 if (decl
2053 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2054 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2055 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2056 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2057 else
2058 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2059 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2060 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2061 TREE_BLOCK (exp) = gimple_block (stmt);
2062
2063 /* Ensure RTL is created for debug args. */
2064 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2065 {
2066 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2067 unsigned int ix;
2068 tree dtemp;
2069
2070 if (debug_args)
2071 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2072 {
2073 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2074 expand_debug_expr (dtemp);
2075 }
2076 }
2077
2078 lhs = gimple_call_lhs (stmt);
2079 if (lhs)
2080 expand_assignment (lhs, exp, false);
2081 else
2082 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2083
2084 mark_transaction_restart_calls (stmt);
2085 }
2086
2087 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2088 STMT that doesn't require special handling for outgoing edges. That
2089 is no tailcalls and no GIMPLE_COND. */
2090
2091 static void
2092 expand_gimple_stmt_1 (gimple stmt)
2093 {
2094 tree op0;
2095
2096 set_curr_insn_source_location (gimple_location (stmt));
2097 set_curr_insn_block (gimple_block (stmt));
2098
2099 switch (gimple_code (stmt))
2100 {
2101 case GIMPLE_GOTO:
2102 op0 = gimple_goto_dest (stmt);
2103 if (TREE_CODE (op0) == LABEL_DECL)
2104 expand_goto (op0);
2105 else
2106 expand_computed_goto (op0);
2107 break;
2108 case GIMPLE_LABEL:
2109 expand_label (gimple_label_label (stmt));
2110 break;
2111 case GIMPLE_NOP:
2112 case GIMPLE_PREDICT:
2113 break;
2114 case GIMPLE_SWITCH:
2115 expand_case (stmt);
2116 break;
2117 case GIMPLE_ASM:
2118 expand_asm_stmt (stmt);
2119 break;
2120 case GIMPLE_CALL:
2121 expand_call_stmt (stmt);
2122 break;
2123
2124 case GIMPLE_RETURN:
2125 op0 = gimple_return_retval (stmt);
2126
2127 if (op0 && op0 != error_mark_node)
2128 {
2129 tree result = DECL_RESULT (current_function_decl);
2130
2131 /* If we are not returning the current function's RESULT_DECL,
2132 build an assignment to it. */
2133 if (op0 != result)
2134 {
2135 /* I believe that a function's RESULT_DECL is unique. */
2136 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2137
2138 /* ??? We'd like to use simply expand_assignment here,
2139 but this fails if the value is of BLKmode but the return
2140 decl is a register. expand_return has special handling
2141 for this combination, which eventually should move
2142 to common code. See comments there. Until then, let's
2143 build a modify expression :-/ */
2144 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2145 result, op0);
2146 }
2147 }
2148 if (!op0)
2149 expand_null_return ();
2150 else
2151 expand_return (op0);
2152 break;
2153
2154 case GIMPLE_ASSIGN:
2155 {
2156 tree lhs = gimple_assign_lhs (stmt);
2157
2158 /* Tree expand used to fiddle with |= and &= of two bitfield
2159 COMPONENT_REFs here. This can't happen with gimple, the LHS
2160 of binary assigns must be a gimple reg. */
2161
2162 if (TREE_CODE (lhs) != SSA_NAME
2163 || get_gimple_rhs_class (gimple_expr_code (stmt))
2164 == GIMPLE_SINGLE_RHS)
2165 {
2166 tree rhs = gimple_assign_rhs1 (stmt);
2167 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2168 == GIMPLE_SINGLE_RHS);
2169 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2170 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2171 if (TREE_CLOBBER_P (rhs))
2172 /* This is a clobber to mark the going out of scope for
2173 this LHS. */
2174 ;
2175 else
2176 expand_assignment (lhs, rhs,
2177 gimple_assign_nontemporal_move_p (stmt));
2178 }
2179 else
2180 {
2181 rtx target, temp;
2182 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2183 struct separate_ops ops;
2184 bool promoted = false;
2185
2186 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2187 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2188 promoted = true;
2189
2190 ops.code = gimple_assign_rhs_code (stmt);
2191 ops.type = TREE_TYPE (lhs);
2192 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2193 {
2194 case GIMPLE_TERNARY_RHS:
2195 ops.op2 = gimple_assign_rhs3 (stmt);
2196 /* Fallthru */
2197 case GIMPLE_BINARY_RHS:
2198 ops.op1 = gimple_assign_rhs2 (stmt);
2199 /* Fallthru */
2200 case GIMPLE_UNARY_RHS:
2201 ops.op0 = gimple_assign_rhs1 (stmt);
2202 break;
2203 default:
2204 gcc_unreachable ();
2205 }
2206 ops.location = gimple_location (stmt);
2207
2208 /* If we want to use a nontemporal store, force the value to
2209 register first. If we store into a promoted register,
2210 don't directly expand to target. */
2211 temp = nontemporal || promoted ? NULL_RTX : target;
2212 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2213 EXPAND_NORMAL);
2214
2215 if (temp == target)
2216 ;
2217 else if (promoted)
2218 {
2219 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2220 /* If TEMP is a VOIDmode constant, use convert_modes to make
2221 sure that we properly convert it. */
2222 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2223 {
2224 temp = convert_modes (GET_MODE (target),
2225 TYPE_MODE (ops.type),
2226 temp, unsignedp);
2227 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2228 GET_MODE (target), temp, unsignedp);
2229 }
2230
2231 convert_move (SUBREG_REG (target), temp, unsignedp);
2232 }
2233 else if (nontemporal && emit_storent_insn (target, temp))
2234 ;
2235 else
2236 {
2237 temp = force_operand (temp, target);
2238 if (temp != target)
2239 emit_move_insn (target, temp);
2240 }
2241 }
2242 }
2243 break;
2244
2245 default:
2246 gcc_unreachable ();
2247 }
2248 }
2249
2250 /* Expand one gimple statement STMT and return the last RTL instruction
2251 before any of the newly generated ones.
2252
2253 In addition to generating the necessary RTL instructions this also
2254 sets REG_EH_REGION notes if necessary and sets the current source
2255 location for diagnostics. */
2256
2257 static rtx
2258 expand_gimple_stmt (gimple stmt)
2259 {
2260 location_t saved_location = input_location;
2261 rtx last = get_last_insn ();
2262 int lp_nr;
2263
2264 gcc_assert (cfun);
2265
2266 /* We need to save and restore the current source location so that errors
2267 discovered during expansion are emitted with the right location. But
2268 it would be better if the diagnostic routines used the source location
2269 embedded in the tree nodes rather than globals. */
2270 if (gimple_has_location (stmt))
2271 input_location = gimple_location (stmt);
2272
2273 expand_gimple_stmt_1 (stmt);
2274
2275 /* Free any temporaries used to evaluate this statement. */
2276 free_temp_slots ();
2277
2278 input_location = saved_location;
2279
2280 /* Mark all insns that may trap. */
2281 lp_nr = lookup_stmt_eh_lp (stmt);
2282 if (lp_nr)
2283 {
2284 rtx insn;
2285 for (insn = next_real_insn (last); insn;
2286 insn = next_real_insn (insn))
2287 {
2288 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2289 /* If we want exceptions for non-call insns, any
2290 may_trap_p instruction may throw. */
2291 && GET_CODE (PATTERN (insn)) != CLOBBER
2292 && GET_CODE (PATTERN (insn)) != USE
2293 && insn_could_throw_p (insn))
2294 make_reg_eh_region_note (insn, 0, lp_nr);
2295 }
2296 }
2297
2298 return last;
2299 }
2300
2301 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2302 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2303 generated a tail call (something that might be denied by the ABI
2304 rules governing the call; see calls.c).
2305
2306 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2307 can still reach the rest of BB. The case here is __builtin_sqrt,
2308 where the NaN result goes through the external function (with a
2309 tailcall) and the normal result happens via a sqrt instruction. */
2310
2311 static basic_block
2312 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2313 {
2314 rtx last2, last;
2315 edge e;
2316 edge_iterator ei;
2317 int probability;
2318 gcov_type count;
2319
2320 last2 = last = expand_gimple_stmt (stmt);
2321
2322 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2323 if (CALL_P (last) && SIBLING_CALL_P (last))
2324 goto found;
2325
2326 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2327
2328 *can_fallthru = true;
2329 return NULL;
2330
2331 found:
2332 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2333 Any instructions emitted here are about to be deleted. */
2334 do_pending_stack_adjust ();
2335
2336 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2337 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2338 EH or abnormal edges, we shouldn't have created a tail call in
2339 the first place. So it seems to me we should just be removing
2340 all edges here, or redirecting the existing fallthru edge to
2341 the exit block. */
2342
2343 probability = 0;
2344 count = 0;
2345
2346 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2347 {
2348 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2349 {
2350 if (e->dest != EXIT_BLOCK_PTR)
2351 {
2352 e->dest->count -= e->count;
2353 e->dest->frequency -= EDGE_FREQUENCY (e);
2354 if (e->dest->count < 0)
2355 e->dest->count = 0;
2356 if (e->dest->frequency < 0)
2357 e->dest->frequency = 0;
2358 }
2359 count += e->count;
2360 probability += e->probability;
2361 remove_edge (e);
2362 }
2363 else
2364 ei_next (&ei);
2365 }
2366
2367 /* This is somewhat ugly: the call_expr expander often emits instructions
2368 after the sibcall (to perform the function return). These confuse the
2369 find_many_sub_basic_blocks code, so we need to get rid of these. */
2370 last = NEXT_INSN (last);
2371 gcc_assert (BARRIER_P (last));
2372
2373 *can_fallthru = false;
2374 while (NEXT_INSN (last))
2375 {
2376 /* For instance an sqrt builtin expander expands if with
2377 sibcall in the then and label for `else`. */
2378 if (LABEL_P (NEXT_INSN (last)))
2379 {
2380 *can_fallthru = true;
2381 break;
2382 }
2383 delete_insn (NEXT_INSN (last));
2384 }
2385
2386 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2387 e->probability += probability;
2388 e->count += count;
2389 BB_END (bb) = last;
2390 update_bb_for_insn (bb);
2391
2392 if (NEXT_INSN (last))
2393 {
2394 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2395
2396 last = BB_END (bb);
2397 if (BARRIER_P (last))
2398 BB_END (bb) = PREV_INSN (last);
2399 }
2400
2401 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2402
2403 return bb;
2404 }
2405
2406 /* Return the difference between the floor and the truncated result of
2407 a signed division by OP1 with remainder MOD. */
2408 static rtx
2409 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2410 {
2411 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2412 return gen_rtx_IF_THEN_ELSE
2413 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2414 gen_rtx_IF_THEN_ELSE
2415 (mode, gen_rtx_LT (BImode,
2416 gen_rtx_DIV (mode, op1, mod),
2417 const0_rtx),
2418 constm1_rtx, const0_rtx),
2419 const0_rtx);
2420 }
2421
2422 /* Return the difference between the ceil and the truncated result of
2423 a signed division by OP1 with remainder MOD. */
2424 static rtx
2425 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2426 {
2427 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2428 return gen_rtx_IF_THEN_ELSE
2429 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2430 gen_rtx_IF_THEN_ELSE
2431 (mode, gen_rtx_GT (BImode,
2432 gen_rtx_DIV (mode, op1, mod),
2433 const0_rtx),
2434 const1_rtx, const0_rtx),
2435 const0_rtx);
2436 }
2437
2438 /* Return the difference between the ceil and the truncated result of
2439 an unsigned division by OP1 with remainder MOD. */
2440 static rtx
2441 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2442 {
2443 /* (mod != 0 ? 1 : 0) */
2444 return gen_rtx_IF_THEN_ELSE
2445 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2446 const1_rtx, const0_rtx);
2447 }
2448
2449 /* Return the difference between the rounded and the truncated result
2450 of a signed division by OP1 with remainder MOD. Halfway cases are
2451 rounded away from zero, rather than to the nearest even number. */
2452 static rtx
2453 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2454 {
2455 /* (abs (mod) >= abs (op1) - abs (mod)
2456 ? (op1 / mod > 0 ? 1 : -1)
2457 : 0) */
2458 return gen_rtx_IF_THEN_ELSE
2459 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2460 gen_rtx_MINUS (mode,
2461 gen_rtx_ABS (mode, op1),
2462 gen_rtx_ABS (mode, mod))),
2463 gen_rtx_IF_THEN_ELSE
2464 (mode, gen_rtx_GT (BImode,
2465 gen_rtx_DIV (mode, op1, mod),
2466 const0_rtx),
2467 const1_rtx, constm1_rtx),
2468 const0_rtx);
2469 }
2470
2471 /* Return the difference between the rounded and the truncated result
2472 of a unsigned division by OP1 with remainder MOD. Halfway cases
2473 are rounded away from zero, rather than to the nearest even
2474 number. */
2475 static rtx
2476 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2477 {
2478 /* (mod >= op1 - mod ? 1 : 0) */
2479 return gen_rtx_IF_THEN_ELSE
2480 (mode, gen_rtx_GE (BImode, mod,
2481 gen_rtx_MINUS (mode, op1, mod)),
2482 const1_rtx, const0_rtx);
2483 }
2484
2485 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2486 any rtl. */
2487
2488 static rtx
2489 convert_debug_memory_address (enum machine_mode mode, rtx x,
2490 addr_space_t as)
2491 {
2492 enum machine_mode xmode = GET_MODE (x);
2493
2494 #ifndef POINTERS_EXTEND_UNSIGNED
2495 gcc_assert (mode == Pmode
2496 || mode == targetm.addr_space.address_mode (as));
2497 gcc_assert (xmode == mode || xmode == VOIDmode);
2498 #else
2499 rtx temp;
2500
2501 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2502
2503 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2504 return x;
2505
2506 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2507 x = simplify_gen_subreg (mode, x, xmode,
2508 subreg_lowpart_offset
2509 (mode, xmode));
2510 else if (POINTERS_EXTEND_UNSIGNED > 0)
2511 x = gen_rtx_ZERO_EXTEND (mode, x);
2512 else if (!POINTERS_EXTEND_UNSIGNED)
2513 x = gen_rtx_SIGN_EXTEND (mode, x);
2514 else
2515 {
2516 switch (GET_CODE (x))
2517 {
2518 case SUBREG:
2519 if ((SUBREG_PROMOTED_VAR_P (x)
2520 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2521 || (GET_CODE (SUBREG_REG (x)) == PLUS
2522 && REG_P (XEXP (SUBREG_REG (x), 0))
2523 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2524 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2525 && GET_MODE (SUBREG_REG (x)) == mode)
2526 return SUBREG_REG (x);
2527 break;
2528 case LABEL_REF:
2529 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2530 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2531 return temp;
2532 case SYMBOL_REF:
2533 temp = shallow_copy_rtx (x);
2534 PUT_MODE (temp, mode);
2535 return temp;
2536 case CONST:
2537 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2538 if (temp)
2539 temp = gen_rtx_CONST (mode, temp);
2540 return temp;
2541 case PLUS:
2542 case MINUS:
2543 if (CONST_INT_P (XEXP (x, 1)))
2544 {
2545 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2546 if (temp)
2547 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2548 }
2549 break;
2550 default:
2551 break;
2552 }
2553 /* Don't know how to express ptr_extend as operation in debug info. */
2554 return NULL;
2555 }
2556 #endif /* POINTERS_EXTEND_UNSIGNED */
2557
2558 return x;
2559 }
2560
2561 /* Return an RTX equivalent to the value of the parameter DECL. */
2562
2563 static rtx
2564 expand_debug_parm_decl (tree decl)
2565 {
2566 rtx incoming = DECL_INCOMING_RTL (decl);
2567
2568 if (incoming
2569 && GET_MODE (incoming) != BLKmode
2570 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2571 || (MEM_P (incoming)
2572 && REG_P (XEXP (incoming, 0))
2573 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2574 {
2575 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2576
2577 #ifdef HAVE_window_save
2578 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2579 If the target machine has an explicit window save instruction, the
2580 actual entry value is the corresponding OUTGOING_REGNO instead. */
2581 if (REG_P (incoming)
2582 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2583 incoming
2584 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2585 OUTGOING_REGNO (REGNO (incoming)), 0);
2586 else if (MEM_P (incoming))
2587 {
2588 rtx reg = XEXP (incoming, 0);
2589 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2590 {
2591 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2592 incoming = replace_equiv_address_nv (incoming, reg);
2593 }
2594 }
2595 #endif
2596
2597 ENTRY_VALUE_EXP (rtl) = incoming;
2598 return rtl;
2599 }
2600
2601 if (incoming
2602 && GET_MODE (incoming) != BLKmode
2603 && !TREE_ADDRESSABLE (decl)
2604 && MEM_P (incoming)
2605 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2606 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2607 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2608 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2609 return incoming;
2610
2611 return NULL_RTX;
2612 }
2613
2614 /* Return an RTX equivalent to the value of the tree expression EXP. */
2615
2616 static rtx
2617 expand_debug_expr (tree exp)
2618 {
2619 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2620 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2621 enum machine_mode inner_mode = VOIDmode;
2622 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2623 addr_space_t as;
2624
2625 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2626 {
2627 case tcc_expression:
2628 switch (TREE_CODE (exp))
2629 {
2630 case COND_EXPR:
2631 case DOT_PROD_EXPR:
2632 case WIDEN_MULT_PLUS_EXPR:
2633 case WIDEN_MULT_MINUS_EXPR:
2634 case FMA_EXPR:
2635 goto ternary;
2636
2637 case TRUTH_ANDIF_EXPR:
2638 case TRUTH_ORIF_EXPR:
2639 case TRUTH_AND_EXPR:
2640 case TRUTH_OR_EXPR:
2641 case TRUTH_XOR_EXPR:
2642 goto binary;
2643
2644 case TRUTH_NOT_EXPR:
2645 goto unary;
2646
2647 default:
2648 break;
2649 }
2650 break;
2651
2652 ternary:
2653 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2654 if (!op2)
2655 return NULL_RTX;
2656 /* Fall through. */
2657
2658 binary:
2659 case tcc_binary:
2660 case tcc_comparison:
2661 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2662 if (!op1)
2663 return NULL_RTX;
2664 /* Fall through. */
2665
2666 unary:
2667 case tcc_unary:
2668 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2669 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2670 if (!op0)
2671 return NULL_RTX;
2672 break;
2673
2674 case tcc_type:
2675 case tcc_statement:
2676 gcc_unreachable ();
2677
2678 case tcc_constant:
2679 case tcc_exceptional:
2680 case tcc_declaration:
2681 case tcc_reference:
2682 case tcc_vl_exp:
2683 break;
2684 }
2685
2686 switch (TREE_CODE (exp))
2687 {
2688 case STRING_CST:
2689 if (!lookup_constant_def (exp))
2690 {
2691 if (strlen (TREE_STRING_POINTER (exp)) + 1
2692 != (size_t) TREE_STRING_LENGTH (exp))
2693 return NULL_RTX;
2694 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2695 op0 = gen_rtx_MEM (BLKmode, op0);
2696 set_mem_attributes (op0, exp, 0);
2697 return op0;
2698 }
2699 /* Fall through... */
2700
2701 case INTEGER_CST:
2702 case REAL_CST:
2703 case FIXED_CST:
2704 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2705 return op0;
2706
2707 case COMPLEX_CST:
2708 gcc_assert (COMPLEX_MODE_P (mode));
2709 op0 = expand_debug_expr (TREE_REALPART (exp));
2710 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2711 return gen_rtx_CONCAT (mode, op0, op1);
2712
2713 case DEBUG_EXPR_DECL:
2714 op0 = DECL_RTL_IF_SET (exp);
2715
2716 if (op0)
2717 return op0;
2718
2719 op0 = gen_rtx_DEBUG_EXPR (mode);
2720 DEBUG_EXPR_TREE_DECL (op0) = exp;
2721 SET_DECL_RTL (exp, op0);
2722
2723 return op0;
2724
2725 case VAR_DECL:
2726 case PARM_DECL:
2727 case FUNCTION_DECL:
2728 case LABEL_DECL:
2729 case CONST_DECL:
2730 case RESULT_DECL:
2731 op0 = DECL_RTL_IF_SET (exp);
2732
2733 /* This decl was probably optimized away. */
2734 if (!op0)
2735 {
2736 if (TREE_CODE (exp) != VAR_DECL
2737 || DECL_EXTERNAL (exp)
2738 || !TREE_STATIC (exp)
2739 || !DECL_NAME (exp)
2740 || DECL_HARD_REGISTER (exp)
2741 || DECL_IN_CONSTANT_POOL (exp)
2742 || mode == VOIDmode)
2743 return NULL;
2744
2745 op0 = make_decl_rtl_for_debug (exp);
2746 if (!MEM_P (op0)
2747 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2748 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2749 return NULL;
2750 }
2751 else
2752 op0 = copy_rtx (op0);
2753
2754 if (GET_MODE (op0) == BLKmode
2755 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2756 below would ICE. While it is likely a FE bug,
2757 try to be robust here. See PR43166. */
2758 || mode == BLKmode
2759 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2760 {
2761 gcc_assert (MEM_P (op0));
2762 op0 = adjust_address_nv (op0, mode, 0);
2763 return op0;
2764 }
2765
2766 /* Fall through. */
2767
2768 adjust_mode:
2769 case PAREN_EXPR:
2770 case NOP_EXPR:
2771 case CONVERT_EXPR:
2772 {
2773 inner_mode = GET_MODE (op0);
2774
2775 if (mode == inner_mode)
2776 return op0;
2777
2778 if (inner_mode == VOIDmode)
2779 {
2780 if (TREE_CODE (exp) == SSA_NAME)
2781 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2782 else
2783 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2784 if (mode == inner_mode)
2785 return op0;
2786 }
2787
2788 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2789 {
2790 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2791 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2792 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2793 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2794 else
2795 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2796 }
2797 else if (FLOAT_MODE_P (mode))
2798 {
2799 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2800 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2801 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2802 else
2803 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2804 }
2805 else if (FLOAT_MODE_P (inner_mode))
2806 {
2807 if (unsignedp)
2808 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2809 else
2810 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2811 }
2812 else if (CONSTANT_P (op0)
2813 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2814 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2815 subreg_lowpart_offset (mode,
2816 inner_mode));
2817 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2818 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2819 : unsignedp)
2820 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2821 else
2822 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2823
2824 return op0;
2825 }
2826
2827 case MEM_REF:
2828 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2829 {
2830 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2831 TREE_OPERAND (exp, 0),
2832 TREE_OPERAND (exp, 1));
2833 if (newexp)
2834 return expand_debug_expr (newexp);
2835 }
2836 /* FALLTHROUGH */
2837 case INDIRECT_REF:
2838 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2839 if (!op0)
2840 return NULL;
2841
2842 if (TREE_CODE (exp) == MEM_REF)
2843 {
2844 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2845 || (GET_CODE (op0) == PLUS
2846 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2847 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2848 Instead just use get_inner_reference. */
2849 goto component_ref;
2850
2851 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2852 if (!op1 || !CONST_INT_P (op1))
2853 return NULL;
2854
2855 op0 = plus_constant (op0, INTVAL (op1));
2856 }
2857
2858 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2859 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2860 else
2861 as = ADDR_SPACE_GENERIC;
2862
2863 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2864 op0, as);
2865 if (op0 == NULL_RTX)
2866 return NULL;
2867
2868 op0 = gen_rtx_MEM (mode, op0);
2869 set_mem_attributes (op0, exp, 0);
2870 if (TREE_CODE (exp) == MEM_REF
2871 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2872 set_mem_expr (op0, NULL_TREE);
2873 set_mem_addr_space (op0, as);
2874
2875 return op0;
2876
2877 case TARGET_MEM_REF:
2878 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2879 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2880 return NULL;
2881
2882 op0 = expand_debug_expr
2883 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2884 if (!op0)
2885 return NULL;
2886
2887 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2888 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2889 else
2890 as = ADDR_SPACE_GENERIC;
2891
2892 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2893 op0, as);
2894 if (op0 == NULL_RTX)
2895 return NULL;
2896
2897 op0 = gen_rtx_MEM (mode, op0);
2898
2899 set_mem_attributes (op0, exp, 0);
2900 set_mem_addr_space (op0, as);
2901
2902 return op0;
2903
2904 component_ref:
2905 case ARRAY_REF:
2906 case ARRAY_RANGE_REF:
2907 case COMPONENT_REF:
2908 case BIT_FIELD_REF:
2909 case REALPART_EXPR:
2910 case IMAGPART_EXPR:
2911 case VIEW_CONVERT_EXPR:
2912 {
2913 enum machine_mode mode1;
2914 HOST_WIDE_INT bitsize, bitpos;
2915 tree offset;
2916 int volatilep = 0;
2917 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2918 &mode1, &unsignedp, &volatilep, false);
2919 rtx orig_op0;
2920
2921 if (bitsize == 0)
2922 return NULL;
2923
2924 orig_op0 = op0 = expand_debug_expr (tem);
2925
2926 if (!op0)
2927 return NULL;
2928
2929 if (offset)
2930 {
2931 enum machine_mode addrmode, offmode;
2932
2933 if (!MEM_P (op0))
2934 return NULL;
2935
2936 op0 = XEXP (op0, 0);
2937 addrmode = GET_MODE (op0);
2938 if (addrmode == VOIDmode)
2939 addrmode = Pmode;
2940
2941 op1 = expand_debug_expr (offset);
2942 if (!op1)
2943 return NULL;
2944
2945 offmode = GET_MODE (op1);
2946 if (offmode == VOIDmode)
2947 offmode = TYPE_MODE (TREE_TYPE (offset));
2948
2949 if (addrmode != offmode)
2950 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2951 subreg_lowpart_offset (addrmode,
2952 offmode));
2953
2954 /* Don't use offset_address here, we don't need a
2955 recognizable address, and we don't want to generate
2956 code. */
2957 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2958 op0, op1));
2959 }
2960
2961 if (MEM_P (op0))
2962 {
2963 if (mode1 == VOIDmode)
2964 /* Bitfield. */
2965 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2966 if (bitpos >= BITS_PER_UNIT)
2967 {
2968 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2969 bitpos %= BITS_PER_UNIT;
2970 }
2971 else if (bitpos < 0)
2972 {
2973 HOST_WIDE_INT units
2974 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2975 op0 = adjust_address_nv (op0, mode1, units);
2976 bitpos += units * BITS_PER_UNIT;
2977 }
2978 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2979 op0 = adjust_address_nv (op0, mode, 0);
2980 else if (GET_MODE (op0) != mode1)
2981 op0 = adjust_address_nv (op0, mode1, 0);
2982 else
2983 op0 = copy_rtx (op0);
2984 if (op0 == orig_op0)
2985 op0 = shallow_copy_rtx (op0);
2986 set_mem_attributes (op0, exp, 0);
2987 }
2988
2989 if (bitpos == 0 && mode == GET_MODE (op0))
2990 return op0;
2991
2992 if (bitpos < 0)
2993 return NULL;
2994
2995 if (GET_MODE (op0) == BLKmode)
2996 return NULL;
2997
2998 if ((bitpos % BITS_PER_UNIT) == 0
2999 && bitsize == GET_MODE_BITSIZE (mode1))
3000 {
3001 enum machine_mode opmode = GET_MODE (op0);
3002
3003 if (opmode == VOIDmode)
3004 opmode = TYPE_MODE (TREE_TYPE (tem));
3005
3006 /* This condition may hold if we're expanding the address
3007 right past the end of an array that turned out not to
3008 be addressable (i.e., the address was only computed in
3009 debug stmts). The gen_subreg below would rightfully
3010 crash, and the address doesn't really exist, so just
3011 drop it. */
3012 if (bitpos >= GET_MODE_BITSIZE (opmode))
3013 return NULL;
3014
3015 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3016 return simplify_gen_subreg (mode, op0, opmode,
3017 bitpos / BITS_PER_UNIT);
3018 }
3019
3020 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3021 && TYPE_UNSIGNED (TREE_TYPE (exp))
3022 ? SIGN_EXTRACT
3023 : ZERO_EXTRACT, mode,
3024 GET_MODE (op0) != VOIDmode
3025 ? GET_MODE (op0)
3026 : TYPE_MODE (TREE_TYPE (tem)),
3027 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3028 }
3029
3030 case ABS_EXPR:
3031 return simplify_gen_unary (ABS, mode, op0, mode);
3032
3033 case NEGATE_EXPR:
3034 return simplify_gen_unary (NEG, mode, op0, mode);
3035
3036 case BIT_NOT_EXPR:
3037 return simplify_gen_unary (NOT, mode, op0, mode);
3038
3039 case FLOAT_EXPR:
3040 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3041 0)))
3042 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3043 inner_mode);
3044
3045 case FIX_TRUNC_EXPR:
3046 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3047 inner_mode);
3048
3049 case POINTER_PLUS_EXPR:
3050 /* For the rare target where pointers are not the same size as
3051 size_t, we need to check for mis-matched modes and correct
3052 the addend. */
3053 if (op0 && op1
3054 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3055 && GET_MODE (op0) != GET_MODE (op1))
3056 {
3057 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3058 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3059 GET_MODE (op1));
3060 else
3061 /* We always sign-extend, regardless of the signedness of
3062 the operand, because the operand is always unsigned
3063 here even if the original C expression is signed. */
3064 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3065 GET_MODE (op1));
3066 }
3067 /* Fall through. */
3068 case PLUS_EXPR:
3069 return simplify_gen_binary (PLUS, mode, op0, op1);
3070
3071 case MINUS_EXPR:
3072 return simplify_gen_binary (MINUS, mode, op0, op1);
3073
3074 case MULT_EXPR:
3075 return simplify_gen_binary (MULT, mode, op0, op1);
3076
3077 case RDIV_EXPR:
3078 case TRUNC_DIV_EXPR:
3079 case EXACT_DIV_EXPR:
3080 if (unsignedp)
3081 return simplify_gen_binary (UDIV, mode, op0, op1);
3082 else
3083 return simplify_gen_binary (DIV, mode, op0, op1);
3084
3085 case TRUNC_MOD_EXPR:
3086 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3087
3088 case FLOOR_DIV_EXPR:
3089 if (unsignedp)
3090 return simplify_gen_binary (UDIV, mode, op0, op1);
3091 else
3092 {
3093 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3094 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3095 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3096 return simplify_gen_binary (PLUS, mode, div, adj);
3097 }
3098
3099 case FLOOR_MOD_EXPR:
3100 if (unsignedp)
3101 return simplify_gen_binary (UMOD, mode, op0, op1);
3102 else
3103 {
3104 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3105 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3106 adj = simplify_gen_unary (NEG, mode,
3107 simplify_gen_binary (MULT, mode, adj, op1),
3108 mode);
3109 return simplify_gen_binary (PLUS, mode, mod, adj);
3110 }
3111
3112 case CEIL_DIV_EXPR:
3113 if (unsignedp)
3114 {
3115 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3116 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3117 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3118 return simplify_gen_binary (PLUS, mode, div, adj);
3119 }
3120 else
3121 {
3122 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3123 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3124 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3125 return simplify_gen_binary (PLUS, mode, div, adj);
3126 }
3127
3128 case CEIL_MOD_EXPR:
3129 if (unsignedp)
3130 {
3131 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3132 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3133 adj = simplify_gen_unary (NEG, mode,
3134 simplify_gen_binary (MULT, mode, adj, op1),
3135 mode);
3136 return simplify_gen_binary (PLUS, mode, mod, adj);
3137 }
3138 else
3139 {
3140 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3141 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3142 adj = simplify_gen_unary (NEG, mode,
3143 simplify_gen_binary (MULT, mode, adj, op1),
3144 mode);
3145 return simplify_gen_binary (PLUS, mode, mod, adj);
3146 }
3147
3148 case ROUND_DIV_EXPR:
3149 if (unsignedp)
3150 {
3151 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3152 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3153 rtx adj = round_udiv_adjust (mode, mod, op1);
3154 return simplify_gen_binary (PLUS, mode, div, adj);
3155 }
3156 else
3157 {
3158 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3159 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3160 rtx adj = round_sdiv_adjust (mode, mod, op1);
3161 return simplify_gen_binary (PLUS, mode, div, adj);
3162 }
3163
3164 case ROUND_MOD_EXPR:
3165 if (unsignedp)
3166 {
3167 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3168 rtx adj = round_udiv_adjust (mode, mod, op1);
3169 adj = simplify_gen_unary (NEG, mode,
3170 simplify_gen_binary (MULT, mode, adj, op1),
3171 mode);
3172 return simplify_gen_binary (PLUS, mode, mod, adj);
3173 }
3174 else
3175 {
3176 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3177 rtx adj = round_sdiv_adjust (mode, mod, op1);
3178 adj = simplify_gen_unary (NEG, mode,
3179 simplify_gen_binary (MULT, mode, adj, op1),
3180 mode);
3181 return simplify_gen_binary (PLUS, mode, mod, adj);
3182 }
3183
3184 case LSHIFT_EXPR:
3185 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3186
3187 case RSHIFT_EXPR:
3188 if (unsignedp)
3189 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3190 else
3191 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3192
3193 case LROTATE_EXPR:
3194 return simplify_gen_binary (ROTATE, mode, op0, op1);
3195
3196 case RROTATE_EXPR:
3197 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3198
3199 case MIN_EXPR:
3200 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3201
3202 case MAX_EXPR:
3203 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3204
3205 case BIT_AND_EXPR:
3206 case TRUTH_AND_EXPR:
3207 return simplify_gen_binary (AND, mode, op0, op1);
3208
3209 case BIT_IOR_EXPR:
3210 case TRUTH_OR_EXPR:
3211 return simplify_gen_binary (IOR, mode, op0, op1);
3212
3213 case BIT_XOR_EXPR:
3214 case TRUTH_XOR_EXPR:
3215 return simplify_gen_binary (XOR, mode, op0, op1);
3216
3217 case TRUTH_ANDIF_EXPR:
3218 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3219
3220 case TRUTH_ORIF_EXPR:
3221 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3222
3223 case TRUTH_NOT_EXPR:
3224 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3225
3226 case LT_EXPR:
3227 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3228 op0, op1);
3229
3230 case LE_EXPR:
3231 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3232 op0, op1);
3233
3234 case GT_EXPR:
3235 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3236 op0, op1);
3237
3238 case GE_EXPR:
3239 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3240 op0, op1);
3241
3242 case EQ_EXPR:
3243 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3244
3245 case NE_EXPR:
3246 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3247
3248 case UNORDERED_EXPR:
3249 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3250
3251 case ORDERED_EXPR:
3252 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3253
3254 case UNLT_EXPR:
3255 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3256
3257 case UNLE_EXPR:
3258 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3259
3260 case UNGT_EXPR:
3261 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3262
3263 case UNGE_EXPR:
3264 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3265
3266 case UNEQ_EXPR:
3267 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3268
3269 case LTGT_EXPR:
3270 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3271
3272 case COND_EXPR:
3273 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3274
3275 case COMPLEX_EXPR:
3276 gcc_assert (COMPLEX_MODE_P (mode));
3277 if (GET_MODE (op0) == VOIDmode)
3278 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3279 if (GET_MODE (op1) == VOIDmode)
3280 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3281 return gen_rtx_CONCAT (mode, op0, op1);
3282
3283 case CONJ_EXPR:
3284 if (GET_CODE (op0) == CONCAT)
3285 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3286 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3287 XEXP (op0, 1),
3288 GET_MODE_INNER (mode)));
3289 else
3290 {
3291 enum machine_mode imode = GET_MODE_INNER (mode);
3292 rtx re, im;
3293
3294 if (MEM_P (op0))
3295 {
3296 re = adjust_address_nv (op0, imode, 0);
3297 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3298 }
3299 else
3300 {
3301 enum machine_mode ifmode = int_mode_for_mode (mode);
3302 enum machine_mode ihmode = int_mode_for_mode (imode);
3303 rtx halfsize;
3304 if (ifmode == BLKmode || ihmode == BLKmode)
3305 return NULL;
3306 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3307 re = op0;
3308 if (mode != ifmode)
3309 re = gen_rtx_SUBREG (ifmode, re, 0);
3310 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3311 if (imode != ihmode)
3312 re = gen_rtx_SUBREG (imode, re, 0);
3313 im = copy_rtx (op0);
3314 if (mode != ifmode)
3315 im = gen_rtx_SUBREG (ifmode, im, 0);
3316 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3317 if (imode != ihmode)
3318 im = gen_rtx_SUBREG (imode, im, 0);
3319 }
3320 im = gen_rtx_NEG (imode, im);
3321 return gen_rtx_CONCAT (mode, re, im);
3322 }
3323
3324 case ADDR_EXPR:
3325 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3326 if (!op0 || !MEM_P (op0))
3327 {
3328 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3329 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3330 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3331 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3332 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3333 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3334
3335 if (handled_component_p (TREE_OPERAND (exp, 0)))
3336 {
3337 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3338 tree decl
3339 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3340 &bitoffset, &bitsize, &maxsize);
3341 if ((TREE_CODE (decl) == VAR_DECL
3342 || TREE_CODE (decl) == PARM_DECL
3343 || TREE_CODE (decl) == RESULT_DECL)
3344 && (!TREE_ADDRESSABLE (decl)
3345 || target_for_debug_bind (decl))
3346 && (bitoffset % BITS_PER_UNIT) == 0
3347 && bitsize > 0
3348 && bitsize == maxsize)
3349 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3350 bitoffset / BITS_PER_UNIT);
3351 }
3352
3353 return NULL;
3354 }
3355
3356 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3357 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3358
3359 return op0;
3360
3361 case VECTOR_CST:
3362 {
3363 unsigned i;
3364
3365 op0 = gen_rtx_CONCATN
3366 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3367
3368 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3369 {
3370 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3371 if (!op1)
3372 return NULL;
3373 XVECEXP (op0, 0, i) = op1;
3374 }
3375
3376 return op0;
3377 }
3378
3379 case CONSTRUCTOR:
3380 if (TREE_CLOBBER_P (exp))
3381 return NULL;
3382 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3383 {
3384 unsigned i;
3385 tree val;
3386
3387 op0 = gen_rtx_CONCATN
3388 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3389
3390 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3391 {
3392 op1 = expand_debug_expr (val);
3393 if (!op1)
3394 return NULL;
3395 XVECEXP (op0, 0, i) = op1;
3396 }
3397
3398 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3399 {
3400 op1 = expand_debug_expr
3401 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3402
3403 if (!op1)
3404 return NULL;
3405
3406 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3407 XVECEXP (op0, 0, i) = op1;
3408 }
3409
3410 return op0;
3411 }
3412 else
3413 goto flag_unsupported;
3414
3415 case CALL_EXPR:
3416 /* ??? Maybe handle some builtins? */
3417 return NULL;
3418
3419 case SSA_NAME:
3420 {
3421 gimple g = get_gimple_for_ssa_name (exp);
3422 if (g)
3423 {
3424 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3425 if (!op0)
3426 return NULL;
3427 }
3428 else
3429 {
3430 int part = var_to_partition (SA.map, exp);
3431
3432 if (part == NO_PARTITION)
3433 {
3434 /* If this is a reference to an incoming value of parameter
3435 that is never used in the code or where the incoming
3436 value is never used in the code, use PARM_DECL's
3437 DECL_RTL if set. */
3438 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3439 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3440 {
3441 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3442 if (op0)
3443 goto adjust_mode;
3444 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3445 if (op0)
3446 goto adjust_mode;
3447 }
3448 return NULL;
3449 }
3450
3451 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3452
3453 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3454 }
3455 goto adjust_mode;
3456 }
3457
3458 case ERROR_MARK:
3459 return NULL;
3460
3461 /* Vector stuff. For most of the codes we don't have rtl codes. */
3462 case REALIGN_LOAD_EXPR:
3463 case REDUC_MAX_EXPR:
3464 case REDUC_MIN_EXPR:
3465 case REDUC_PLUS_EXPR:
3466 case VEC_COND_EXPR:
3467 case VEC_LSHIFT_EXPR:
3468 case VEC_PACK_FIX_TRUNC_EXPR:
3469 case VEC_PACK_SAT_EXPR:
3470 case VEC_PACK_TRUNC_EXPR:
3471 case VEC_RSHIFT_EXPR:
3472 case VEC_UNPACK_FLOAT_HI_EXPR:
3473 case VEC_UNPACK_FLOAT_LO_EXPR:
3474 case VEC_UNPACK_HI_EXPR:
3475 case VEC_UNPACK_LO_EXPR:
3476 case VEC_WIDEN_MULT_HI_EXPR:
3477 case VEC_WIDEN_MULT_LO_EXPR:
3478 case VEC_WIDEN_LSHIFT_HI_EXPR:
3479 case VEC_WIDEN_LSHIFT_LO_EXPR:
3480 case VEC_PERM_EXPR:
3481 return NULL;
3482
3483 /* Misc codes. */
3484 case ADDR_SPACE_CONVERT_EXPR:
3485 case FIXED_CONVERT_EXPR:
3486 case OBJ_TYPE_REF:
3487 case WITH_SIZE_EXPR:
3488 return NULL;
3489
3490 case DOT_PROD_EXPR:
3491 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3492 && SCALAR_INT_MODE_P (mode))
3493 {
3494 op0
3495 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3496 0)))
3497 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3498 inner_mode);
3499 op1
3500 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3501 1)))
3502 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3503 inner_mode);
3504 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3505 return simplify_gen_binary (PLUS, mode, op0, op2);
3506 }
3507 return NULL;
3508
3509 case WIDEN_MULT_EXPR:
3510 case WIDEN_MULT_PLUS_EXPR:
3511 case WIDEN_MULT_MINUS_EXPR:
3512 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3513 && SCALAR_INT_MODE_P (mode))
3514 {
3515 inner_mode = GET_MODE (op0);
3516 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3517 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3518 else
3519 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3520 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3521 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3522 else
3523 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3524 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3525 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3526 return op0;
3527 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3528 return simplify_gen_binary (PLUS, mode, op0, op2);
3529 else
3530 return simplify_gen_binary (MINUS, mode, op2, op0);
3531 }
3532 return NULL;
3533
3534 case WIDEN_SUM_EXPR:
3535 case WIDEN_LSHIFT_EXPR:
3536 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3537 && SCALAR_INT_MODE_P (mode))
3538 {
3539 op0
3540 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3541 0)))
3542 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3543 inner_mode);
3544 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3545 ? ASHIFT : PLUS, mode, op0, op1);
3546 }
3547 return NULL;
3548
3549 case FMA_EXPR:
3550 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3551
3552 default:
3553 flag_unsupported:
3554 #ifdef ENABLE_CHECKING
3555 debug_tree (exp);
3556 gcc_unreachable ();
3557 #else
3558 return NULL;
3559 #endif
3560 }
3561 }
3562
3563 /* Return an RTX equivalent to the source bind value of the tree expression
3564 EXP. */
3565
3566 static rtx
3567 expand_debug_source_expr (tree exp)
3568 {
3569 rtx op0 = NULL_RTX;
3570 enum machine_mode mode = VOIDmode, inner_mode;
3571
3572 switch (TREE_CODE (exp))
3573 {
3574 case PARM_DECL:
3575 {
3576 mode = DECL_MODE (exp);
3577 op0 = expand_debug_parm_decl (exp);
3578 if (op0)
3579 break;
3580 /* See if this isn't an argument that has been completely
3581 optimized out. */
3582 if (!DECL_RTL_SET_P (exp)
3583 && !DECL_INCOMING_RTL (exp)
3584 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3585 {
3586 tree aexp = exp;
3587 if (DECL_ABSTRACT_ORIGIN (exp))
3588 aexp = DECL_ABSTRACT_ORIGIN (exp);
3589 if (DECL_CONTEXT (aexp)
3590 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3591 {
3592 VEC(tree, gc) **debug_args;
3593 unsigned int ix;
3594 tree ddecl;
3595 #ifdef ENABLE_CHECKING
3596 tree parm;
3597 for (parm = DECL_ARGUMENTS (current_function_decl);
3598 parm; parm = DECL_CHAIN (parm))
3599 gcc_assert (parm != exp
3600 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3601 #endif
3602 debug_args = decl_debug_args_lookup (current_function_decl);
3603 if (debug_args != NULL)
3604 {
3605 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3606 ix += 2)
3607 if (ddecl == aexp)
3608 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3609 }
3610 }
3611 }
3612 break;
3613 }
3614 default:
3615 break;
3616 }
3617
3618 if (op0 == NULL_RTX)
3619 return NULL_RTX;
3620
3621 inner_mode = GET_MODE (op0);
3622 if (mode == inner_mode)
3623 return op0;
3624
3625 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3626 {
3627 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3628 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3629 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3630 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3631 else
3632 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3633 }
3634 else if (FLOAT_MODE_P (mode))
3635 gcc_unreachable ();
3636 else if (FLOAT_MODE_P (inner_mode))
3637 {
3638 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3639 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3640 else
3641 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3642 }
3643 else if (CONSTANT_P (op0)
3644 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3645 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3646 subreg_lowpart_offset (mode, inner_mode));
3647 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3648 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3649 else
3650 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3651
3652 return op0;
3653 }
3654
3655 /* Expand the _LOCs in debug insns. We run this after expanding all
3656 regular insns, so that any variables referenced in the function
3657 will have their DECL_RTLs set. */
3658
3659 static void
3660 expand_debug_locations (void)
3661 {
3662 rtx insn;
3663 rtx last = get_last_insn ();
3664 int save_strict_alias = flag_strict_aliasing;
3665
3666 /* New alias sets while setting up memory attributes cause
3667 -fcompare-debug failures, even though it doesn't bring about any
3668 codegen changes. */
3669 flag_strict_aliasing = 0;
3670
3671 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3672 if (DEBUG_INSN_P (insn))
3673 {
3674 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3675 rtx val;
3676 enum machine_mode mode;
3677
3678 if (value == NULL_TREE)
3679 val = NULL_RTX;
3680 else
3681 {
3682 if (INSN_VAR_LOCATION_STATUS (insn)
3683 == VAR_INIT_STATUS_UNINITIALIZED)
3684 val = expand_debug_source_expr (value);
3685 else
3686 val = expand_debug_expr (value);
3687 gcc_assert (last == get_last_insn ());
3688 }
3689
3690 if (!val)
3691 val = gen_rtx_UNKNOWN_VAR_LOC ();
3692 else
3693 {
3694 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3695
3696 gcc_assert (mode == GET_MODE (val)
3697 || (GET_MODE (val) == VOIDmode
3698 && (CONST_INT_P (val)
3699 || GET_CODE (val) == CONST_FIXED
3700 || GET_CODE (val) == CONST_DOUBLE
3701 || GET_CODE (val) == LABEL_REF)));
3702 }
3703
3704 INSN_VAR_LOCATION_LOC (insn) = val;
3705 }
3706
3707 flag_strict_aliasing = save_strict_alias;
3708 }
3709
3710 /* Expand basic block BB from GIMPLE trees to RTL. */
3711
3712 static basic_block
3713 expand_gimple_basic_block (basic_block bb)
3714 {
3715 gimple_stmt_iterator gsi;
3716 gimple_seq stmts;
3717 gimple stmt = NULL;
3718 rtx note, last;
3719 edge e;
3720 edge_iterator ei;
3721 void **elt;
3722
3723 if (dump_file)
3724 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3725 bb->index);
3726
3727 /* Note that since we are now transitioning from GIMPLE to RTL, we
3728 cannot use the gsi_*_bb() routines because they expect the basic
3729 block to be in GIMPLE, instead of RTL. Therefore, we need to
3730 access the BB sequence directly. */
3731 stmts = bb_seq (bb);
3732 bb->il.gimple = NULL;
3733 rtl_profile_for_bb (bb);
3734 init_rtl_bb_info (bb);
3735 bb->flags |= BB_RTL;
3736
3737 /* Remove the RETURN_EXPR if we may fall though to the exit
3738 instead. */
3739 gsi = gsi_last (stmts);
3740 if (!gsi_end_p (gsi)
3741 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3742 {
3743 gimple ret_stmt = gsi_stmt (gsi);
3744
3745 gcc_assert (single_succ_p (bb));
3746 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3747
3748 if (bb->next_bb == EXIT_BLOCK_PTR
3749 && !gimple_return_retval (ret_stmt))
3750 {
3751 gsi_remove (&gsi, false);
3752 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3753 }
3754 }
3755
3756 gsi = gsi_start (stmts);
3757 if (!gsi_end_p (gsi))
3758 {
3759 stmt = gsi_stmt (gsi);
3760 if (gimple_code (stmt) != GIMPLE_LABEL)
3761 stmt = NULL;
3762 }
3763
3764 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3765
3766 if (stmt || elt)
3767 {
3768 last = get_last_insn ();
3769
3770 if (stmt)
3771 {
3772 expand_gimple_stmt (stmt);
3773 gsi_next (&gsi);
3774 }
3775
3776 if (elt)
3777 emit_label ((rtx) *elt);
3778
3779 /* Java emits line number notes in the top of labels.
3780 ??? Make this go away once line number notes are obsoleted. */
3781 BB_HEAD (bb) = NEXT_INSN (last);
3782 if (NOTE_P (BB_HEAD (bb)))
3783 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3784 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3785
3786 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3787 }
3788 else
3789 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3790
3791 NOTE_BASIC_BLOCK (note) = bb;
3792
3793 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3794 {
3795 basic_block new_bb;
3796
3797 stmt = gsi_stmt (gsi);
3798
3799 /* If this statement is a non-debug one, and we generate debug
3800 insns, then this one might be the last real use of a TERed
3801 SSA_NAME, but where there are still some debug uses further
3802 down. Expanding the current SSA name in such further debug
3803 uses by their RHS might lead to wrong debug info, as coalescing
3804 might make the operands of such RHS be placed into the same
3805 pseudo as something else. Like so:
3806 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3807 use(a_1);
3808 a_2 = ...
3809 #DEBUG ... => a_1
3810 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3811 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3812 the write to a_2 would actually have clobbered the place which
3813 formerly held a_0.
3814
3815 So, instead of that, we recognize the situation, and generate
3816 debug temporaries at the last real use of TERed SSA names:
3817 a_1 = a_0 + 1;
3818 #DEBUG #D1 => a_1
3819 use(a_1);
3820 a_2 = ...
3821 #DEBUG ... => #D1
3822 */
3823 if (MAY_HAVE_DEBUG_INSNS
3824 && SA.values
3825 && !is_gimple_debug (stmt))
3826 {
3827 ssa_op_iter iter;
3828 tree op;
3829 gimple def;
3830
3831 location_t sloc = get_curr_insn_source_location ();
3832 tree sblock = get_curr_insn_block ();
3833
3834 /* Look for SSA names that have their last use here (TERed
3835 names always have only one real use). */
3836 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3837 if ((def = get_gimple_for_ssa_name (op)))
3838 {
3839 imm_use_iterator imm_iter;
3840 use_operand_p use_p;
3841 bool have_debug_uses = false;
3842
3843 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3844 {
3845 if (gimple_debug_bind_p (USE_STMT (use_p)))
3846 {
3847 have_debug_uses = true;
3848 break;
3849 }
3850 }
3851
3852 if (have_debug_uses)
3853 {
3854 /* OP is a TERed SSA name, with DEF it's defining
3855 statement, and where OP is used in further debug
3856 instructions. Generate a debug temporary, and
3857 replace all uses of OP in debug insns with that
3858 temporary. */
3859 gimple debugstmt;
3860 tree value = gimple_assign_rhs_to_tree (def);
3861 tree vexpr = make_node (DEBUG_EXPR_DECL);
3862 rtx val;
3863 enum machine_mode mode;
3864
3865 set_curr_insn_source_location (gimple_location (def));
3866 set_curr_insn_block (gimple_block (def));
3867
3868 DECL_ARTIFICIAL (vexpr) = 1;
3869 TREE_TYPE (vexpr) = TREE_TYPE (value);
3870 if (DECL_P (value))
3871 mode = DECL_MODE (value);
3872 else
3873 mode = TYPE_MODE (TREE_TYPE (value));
3874 DECL_MODE (vexpr) = mode;
3875
3876 val = gen_rtx_VAR_LOCATION
3877 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3878
3879 emit_debug_insn (val);
3880
3881 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3882 {
3883 if (!gimple_debug_bind_p (debugstmt))
3884 continue;
3885
3886 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3887 SET_USE (use_p, vexpr);
3888
3889 update_stmt (debugstmt);
3890 }
3891 }
3892 }
3893 set_curr_insn_source_location (sloc);
3894 set_curr_insn_block (sblock);
3895 }
3896
3897 currently_expanding_gimple_stmt = stmt;
3898
3899 /* Expand this statement, then evaluate the resulting RTL and
3900 fixup the CFG accordingly. */
3901 if (gimple_code (stmt) == GIMPLE_COND)
3902 {
3903 new_bb = expand_gimple_cond (bb, stmt);
3904 if (new_bb)
3905 return new_bb;
3906 }
3907 else if (gimple_debug_bind_p (stmt))
3908 {
3909 location_t sloc = get_curr_insn_source_location ();
3910 tree sblock = get_curr_insn_block ();
3911 gimple_stmt_iterator nsi = gsi;
3912
3913 for (;;)
3914 {
3915 tree var = gimple_debug_bind_get_var (stmt);
3916 tree value;
3917 rtx val;
3918 enum machine_mode mode;
3919
3920 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3921 && TREE_CODE (var) != LABEL_DECL
3922 && !target_for_debug_bind (var))
3923 goto delink_debug_stmt;
3924
3925 if (gimple_debug_bind_has_value_p (stmt))
3926 value = gimple_debug_bind_get_value (stmt);
3927 else
3928 value = NULL_TREE;
3929
3930 last = get_last_insn ();
3931
3932 set_curr_insn_source_location (gimple_location (stmt));
3933 set_curr_insn_block (gimple_block (stmt));
3934
3935 if (DECL_P (var))
3936 mode = DECL_MODE (var);
3937 else
3938 mode = TYPE_MODE (TREE_TYPE (var));
3939
3940 val = gen_rtx_VAR_LOCATION
3941 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3942
3943 emit_debug_insn (val);
3944
3945 if (dump_file && (dump_flags & TDF_DETAILS))
3946 {
3947 /* We can't dump the insn with a TREE where an RTX
3948 is expected. */
3949 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3950 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3951 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3952 }
3953
3954 delink_debug_stmt:
3955 /* In order not to generate too many debug temporaries,
3956 we delink all uses of debug statements we already expanded.
3957 Therefore debug statements between definition and real
3958 use of TERed SSA names will continue to use the SSA name,
3959 and not be replaced with debug temps. */
3960 delink_stmt_imm_use (stmt);
3961
3962 gsi = nsi;
3963 gsi_next (&nsi);
3964 if (gsi_end_p (nsi))
3965 break;
3966 stmt = gsi_stmt (nsi);
3967 if (!gimple_debug_bind_p (stmt))
3968 break;
3969 }
3970
3971 set_curr_insn_source_location (sloc);
3972 set_curr_insn_block (sblock);
3973 }
3974 else if (gimple_debug_source_bind_p (stmt))
3975 {
3976 location_t sloc = get_curr_insn_source_location ();
3977 tree sblock = get_curr_insn_block ();
3978 tree var = gimple_debug_source_bind_get_var (stmt);
3979 tree value = gimple_debug_source_bind_get_value (stmt);
3980 rtx val;
3981 enum machine_mode mode;
3982
3983 last = get_last_insn ();
3984
3985 set_curr_insn_source_location (gimple_location (stmt));
3986 set_curr_insn_block (gimple_block (stmt));
3987
3988 mode = DECL_MODE (var);
3989
3990 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3991 VAR_INIT_STATUS_UNINITIALIZED);
3992
3993 emit_debug_insn (val);
3994
3995 if (dump_file && (dump_flags & TDF_DETAILS))
3996 {
3997 /* We can't dump the insn with a TREE where an RTX
3998 is expected. */
3999 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4000 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4001 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4002 }
4003
4004 set_curr_insn_source_location (sloc);
4005 set_curr_insn_block (sblock);
4006 }
4007 else
4008 {
4009 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
4010 {
4011 bool can_fallthru;
4012 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4013 if (new_bb)
4014 {
4015 if (can_fallthru)
4016 bb = new_bb;
4017 else
4018 return new_bb;
4019 }
4020 }
4021 else
4022 {
4023 def_operand_p def_p;
4024 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4025
4026 if (def_p != NULL)
4027 {
4028 /* Ignore this stmt if it is in the list of
4029 replaceable expressions. */
4030 if (SA.values
4031 && bitmap_bit_p (SA.values,
4032 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4033 continue;
4034 }
4035 last = expand_gimple_stmt (stmt);
4036 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4037 }
4038 }
4039 }
4040
4041 currently_expanding_gimple_stmt = NULL;
4042
4043 /* Expand implicit goto and convert goto_locus. */
4044 FOR_EACH_EDGE (e, ei, bb->succs)
4045 {
4046 if (e->goto_locus && e->goto_block)
4047 {
4048 set_curr_insn_source_location (e->goto_locus);
4049 set_curr_insn_block (e->goto_block);
4050 e->goto_locus = curr_insn_locator ();
4051 }
4052 e->goto_block = NULL;
4053 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4054 {
4055 emit_jump (label_rtx_for_bb (e->dest));
4056 e->flags &= ~EDGE_FALLTHRU;
4057 }
4058 }
4059
4060 /* Expanded RTL can create a jump in the last instruction of block.
4061 This later might be assumed to be a jump to successor and break edge insertion.
4062 We need to insert dummy move to prevent this. PR41440. */
4063 if (single_succ_p (bb)
4064 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4065 && (last = get_last_insn ())
4066 && JUMP_P (last))
4067 {
4068 rtx dummy = gen_reg_rtx (SImode);
4069 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4070 }
4071
4072 do_pending_stack_adjust ();
4073
4074 /* Find the block tail. The last insn in the block is the insn
4075 before a barrier and/or table jump insn. */
4076 last = get_last_insn ();
4077 if (BARRIER_P (last))
4078 last = PREV_INSN (last);
4079 if (JUMP_TABLE_DATA_P (last))
4080 last = PREV_INSN (PREV_INSN (last));
4081 BB_END (bb) = last;
4082
4083 update_bb_for_insn (bb);
4084
4085 return bb;
4086 }
4087
4088
4089 /* Create a basic block for initialization code. */
4090
4091 static basic_block
4092 construct_init_block (void)
4093 {
4094 basic_block init_block, first_block;
4095 edge e = NULL;
4096 int flags;
4097
4098 /* Multiple entry points not supported yet. */
4099 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4100 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4101 init_rtl_bb_info (EXIT_BLOCK_PTR);
4102 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4103 EXIT_BLOCK_PTR->flags |= BB_RTL;
4104
4105 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4106
4107 /* When entry edge points to first basic block, we don't need jump,
4108 otherwise we have to jump into proper target. */
4109 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4110 {
4111 tree label = gimple_block_label (e->dest);
4112
4113 emit_jump (label_rtx (label));
4114 flags = 0;
4115 }
4116 else
4117 flags = EDGE_FALLTHRU;
4118
4119 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4120 get_last_insn (),
4121 ENTRY_BLOCK_PTR);
4122 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4123 init_block->count = ENTRY_BLOCK_PTR->count;
4124 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4125 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
4126 if (e)
4127 {
4128 first_block = e->dest;
4129 redirect_edge_succ (e, init_block);
4130 e = make_edge (init_block, first_block, flags);
4131 }
4132 else
4133 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4134 e->probability = REG_BR_PROB_BASE;
4135 e->count = ENTRY_BLOCK_PTR->count;
4136
4137 update_bb_for_insn (init_block);
4138 return init_block;
4139 }
4140
4141 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4142 found in the block tree. */
4143
4144 static void
4145 set_block_levels (tree block, int level)
4146 {
4147 while (block)
4148 {
4149 BLOCK_NUMBER (block) = level;
4150 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4151 block = BLOCK_CHAIN (block);
4152 }
4153 }
4154
4155 /* Create a block containing landing pads and similar stuff. */
4156
4157 static void
4158 construct_exit_block (void)
4159 {
4160 rtx head = get_last_insn ();
4161 rtx end;
4162 basic_block exit_block;
4163 edge e, e2;
4164 unsigned ix;
4165 edge_iterator ei;
4166 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4167
4168 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4169
4170 /* Make sure the locus is set to the end of the function, so that
4171 epilogue line numbers and warnings are set properly. */
4172 if (cfun->function_end_locus != UNKNOWN_LOCATION)
4173 input_location = cfun->function_end_locus;
4174
4175 /* The following insns belong to the top scope. */
4176 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4177
4178 /* Generate rtl for function exit. */
4179 expand_function_end ();
4180
4181 end = get_last_insn ();
4182 if (head == end)
4183 return;
4184 /* While emitting the function end we could move end of the last basic block.
4185 */
4186 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4187 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4188 head = NEXT_INSN (head);
4189 exit_block = create_basic_block (NEXT_INSN (head), end,
4190 EXIT_BLOCK_PTR->prev_bb);
4191 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4192 exit_block->count = EXIT_BLOCK_PTR->count;
4193 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4194 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
4195
4196 ix = 0;
4197 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4198 {
4199 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4200 if (!(e->flags & EDGE_ABNORMAL))
4201 redirect_edge_succ (e, exit_block);
4202 else
4203 ix++;
4204 }
4205
4206 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4207 e->probability = REG_BR_PROB_BASE;
4208 e->count = EXIT_BLOCK_PTR->count;
4209 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4210 if (e2 != e)
4211 {
4212 e->count -= e2->count;
4213 exit_block->count -= e2->count;
4214 exit_block->frequency -= EDGE_FREQUENCY (e2);
4215 }
4216 if (e->count < 0)
4217 e->count = 0;
4218 if (exit_block->count < 0)
4219 exit_block->count = 0;
4220 if (exit_block->frequency < 0)
4221 exit_block->frequency = 0;
4222 update_bb_for_insn (exit_block);
4223 }
4224
4225 /* Helper function for discover_nonconstant_array_refs.
4226 Look for ARRAY_REF nodes with non-constant indexes and mark them
4227 addressable. */
4228
4229 static tree
4230 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4231 void *data ATTRIBUTE_UNUSED)
4232 {
4233 tree t = *tp;
4234
4235 if (IS_TYPE_OR_DECL_P (t))
4236 *walk_subtrees = 0;
4237 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4238 {
4239 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4240 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4241 && (!TREE_OPERAND (t, 2)
4242 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4243 || (TREE_CODE (t) == COMPONENT_REF
4244 && (!TREE_OPERAND (t,2)
4245 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4246 || TREE_CODE (t) == BIT_FIELD_REF
4247 || TREE_CODE (t) == REALPART_EXPR
4248 || TREE_CODE (t) == IMAGPART_EXPR
4249 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4250 || CONVERT_EXPR_P (t))
4251 t = TREE_OPERAND (t, 0);
4252
4253 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4254 {
4255 t = get_base_address (t);
4256 if (t && DECL_P (t)
4257 && DECL_MODE (t) != BLKmode)
4258 TREE_ADDRESSABLE (t) = 1;
4259 }
4260
4261 *walk_subtrees = 0;
4262 }
4263
4264 return NULL_TREE;
4265 }
4266
4267 /* RTL expansion is not able to compile array references with variable
4268 offsets for arrays stored in single register. Discover such
4269 expressions and mark variables as addressable to avoid this
4270 scenario. */
4271
4272 static void
4273 discover_nonconstant_array_refs (void)
4274 {
4275 basic_block bb;
4276 gimple_stmt_iterator gsi;
4277
4278 FOR_EACH_BB (bb)
4279 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4280 {
4281 gimple stmt = gsi_stmt (gsi);
4282 if (!is_gimple_debug (stmt))
4283 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4284 }
4285 }
4286
4287 /* This function sets crtl->args.internal_arg_pointer to a virtual
4288 register if DRAP is needed. Local register allocator will replace
4289 virtual_incoming_args_rtx with the virtual register. */
4290
4291 static void
4292 expand_stack_alignment (void)
4293 {
4294 rtx drap_rtx;
4295 unsigned int preferred_stack_boundary;
4296
4297 if (! SUPPORTS_STACK_ALIGNMENT)
4298 return;
4299
4300 if (cfun->calls_alloca
4301 || cfun->has_nonlocal_label
4302 || crtl->has_nonlocal_goto)
4303 crtl->need_drap = true;
4304
4305 /* Call update_stack_boundary here again to update incoming stack
4306 boundary. It may set incoming stack alignment to a different
4307 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4308 use the minimum incoming stack alignment to check if it is OK
4309 to perform sibcall optimization since sibcall optimization will
4310 only align the outgoing stack to incoming stack boundary. */
4311 if (targetm.calls.update_stack_boundary)
4312 targetm.calls.update_stack_boundary ();
4313
4314 /* The incoming stack frame has to be aligned at least at
4315 parm_stack_boundary. */
4316 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4317
4318 /* Update crtl->stack_alignment_estimated and use it later to align
4319 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4320 exceptions since callgraph doesn't collect incoming stack alignment
4321 in this case. */
4322 if (cfun->can_throw_non_call_exceptions
4323 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4324 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4325 else
4326 preferred_stack_boundary = crtl->preferred_stack_boundary;
4327 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4328 crtl->stack_alignment_estimated = preferred_stack_boundary;
4329 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4330 crtl->stack_alignment_needed = preferred_stack_boundary;
4331
4332 gcc_assert (crtl->stack_alignment_needed
4333 <= crtl->stack_alignment_estimated);
4334
4335 crtl->stack_realign_needed
4336 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4337 crtl->stack_realign_tried = crtl->stack_realign_needed;
4338
4339 crtl->stack_realign_processed = true;
4340
4341 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4342 alignment. */
4343 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4344 drap_rtx = targetm.calls.get_drap_rtx ();
4345
4346 /* stack_realign_drap and drap_rtx must match. */
4347 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4348
4349 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4350 if (NULL != drap_rtx)
4351 {
4352 crtl->args.internal_arg_pointer = drap_rtx;
4353
4354 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4355 needed. */
4356 fixup_tail_calls ();
4357 }
4358 }
4359
4360 /* Translate the intermediate representation contained in the CFG
4361 from GIMPLE trees to RTL.
4362
4363 We do conversion per basic block and preserve/update the tree CFG.
4364 This implies we have to do some magic as the CFG can simultaneously
4365 consist of basic blocks containing RTL and GIMPLE trees. This can
4366 confuse the CFG hooks, so be careful to not manipulate CFG during
4367 the expansion. */
4368
4369 static unsigned int
4370 gimple_expand_cfg (void)
4371 {
4372 basic_block bb, init_block;
4373 sbitmap blocks;
4374 edge_iterator ei;
4375 edge e;
4376 rtx var_seq;
4377 unsigned i;
4378
4379 timevar_push (TV_OUT_OF_SSA);
4380 rewrite_out_of_ssa (&SA);
4381 timevar_pop (TV_OUT_OF_SSA);
4382 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4383 sizeof (rtx));
4384
4385 /* Make sure all values used by the optimization passes have sane
4386 defaults. */
4387 reg_renumber = 0;
4388
4389 /* Some backends want to know that we are expanding to RTL. */
4390 currently_expanding_to_rtl = 1;
4391 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4392 free_dominance_info (CDI_DOMINATORS);
4393
4394 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4395
4396 insn_locators_alloc ();
4397 if (!DECL_IS_BUILTIN (current_function_decl))
4398 {
4399 /* Eventually, all FEs should explicitly set function_start_locus. */
4400 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4401 set_curr_insn_source_location
4402 (DECL_SOURCE_LOCATION (current_function_decl));
4403 else
4404 set_curr_insn_source_location (cfun->function_start_locus);
4405 }
4406 else
4407 set_curr_insn_source_location (UNKNOWN_LOCATION);
4408 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4409 prologue_locator = curr_insn_locator ();
4410
4411 #ifdef INSN_SCHEDULING
4412 init_sched_attrs ();
4413 #endif
4414
4415 /* Make sure first insn is a note even if we don't want linenums.
4416 This makes sure the first insn will never be deleted.
4417 Also, final expects a note to appear there. */
4418 emit_note (NOTE_INSN_DELETED);
4419
4420 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4421 discover_nonconstant_array_refs ();
4422
4423 targetm.expand_to_rtl_hook ();
4424 crtl->stack_alignment_needed = STACK_BOUNDARY;
4425 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4426 crtl->stack_alignment_estimated = 0;
4427 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4428 cfun->cfg->max_jumptable_ents = 0;
4429
4430 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4431 of the function section at exapnsion time to predict distance of calls. */
4432 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4433
4434 /* Expand the variables recorded during gimple lowering. */
4435 timevar_push (TV_VAR_EXPAND);
4436 start_sequence ();
4437
4438 expand_used_vars ();
4439
4440 var_seq = get_insns ();
4441 end_sequence ();
4442 timevar_pop (TV_VAR_EXPAND);
4443
4444 /* Honor stack protection warnings. */
4445 if (warn_stack_protect)
4446 {
4447 if (cfun->calls_alloca)
4448 warning (OPT_Wstack_protector,
4449 "stack protector not protecting local variables: "
4450 "variable length buffer");
4451 if (has_short_buffer && !crtl->stack_protect_guard)
4452 warning (OPT_Wstack_protector,
4453 "stack protector not protecting function: "
4454 "all local arrays are less than %d bytes long",
4455 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4456 }
4457
4458 /* Set up parameters and prepare for return, for the function. */
4459 expand_function_start (current_function_decl);
4460
4461 /* If we emitted any instructions for setting up the variables,
4462 emit them before the FUNCTION_START note. */
4463 if (var_seq)
4464 {
4465 emit_insn_before (var_seq, parm_birth_insn);
4466
4467 /* In expand_function_end we'll insert the alloca save/restore
4468 before parm_birth_insn. We've just insertted an alloca call.
4469 Adjust the pointer to match. */
4470 parm_birth_insn = var_seq;
4471 }
4472
4473 /* Now that we also have the parameter RTXs, copy them over to our
4474 partitions. */
4475 for (i = 0; i < SA.map->num_partitions; i++)
4476 {
4477 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4478
4479 if (TREE_CODE (var) != VAR_DECL
4480 && !SA.partition_to_pseudo[i])
4481 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4482 gcc_assert (SA.partition_to_pseudo[i]);
4483
4484 /* If this decl was marked as living in multiple places, reset
4485 this now to NULL. */
4486 if (DECL_RTL_IF_SET (var) == pc_rtx)
4487 SET_DECL_RTL (var, NULL);
4488
4489 /* Some RTL parts really want to look at DECL_RTL(x) when x
4490 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4491 SET_DECL_RTL here making this available, but that would mean
4492 to select one of the potentially many RTLs for one DECL. Instead
4493 of doing that we simply reset the MEM_EXPR of the RTL in question,
4494 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4495 if (!DECL_RTL_SET_P (var))
4496 {
4497 if (MEM_P (SA.partition_to_pseudo[i]))
4498 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4499 }
4500 }
4501
4502 /* If we have a class containing differently aligned pointers
4503 we need to merge those into the corresponding RTL pointer
4504 alignment. */
4505 for (i = 1; i < num_ssa_names; i++)
4506 {
4507 tree name = ssa_name (i);
4508 int part;
4509 rtx r;
4510
4511 if (!name
4512 || !POINTER_TYPE_P (TREE_TYPE (name))
4513 /* We might have generated new SSA names in
4514 update_alias_info_with_stack_vars. They will have a NULL
4515 defining statements, and won't be part of the partitioning,
4516 so ignore those. */
4517 || !SSA_NAME_DEF_STMT (name))
4518 continue;
4519 part = var_to_partition (SA.map, name);
4520 if (part == NO_PARTITION)
4521 continue;
4522 r = SA.partition_to_pseudo[part];
4523 if (REG_P (r))
4524 mark_reg_pointer (r, get_pointer_alignment (name));
4525 }
4526
4527 /* If this function is `main', emit a call to `__main'
4528 to run global initializers, etc. */
4529 if (DECL_NAME (current_function_decl)
4530 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4531 && DECL_FILE_SCOPE_P (current_function_decl))
4532 expand_main_function ();
4533
4534 /* Initialize the stack_protect_guard field. This must happen after the
4535 call to __main (if any) so that the external decl is initialized. */
4536 if (crtl->stack_protect_guard)
4537 stack_protect_prologue ();
4538
4539 expand_phi_nodes (&SA);
4540
4541 /* Register rtl specific functions for cfg. */
4542 rtl_register_cfg_hooks ();
4543
4544 init_block = construct_init_block ();
4545
4546 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4547 remaining edges later. */
4548 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4549 e->flags &= ~EDGE_EXECUTABLE;
4550
4551 lab_rtx_for_bb = pointer_map_create ();
4552 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4553 bb = expand_gimple_basic_block (bb);
4554
4555 if (MAY_HAVE_DEBUG_INSNS)
4556 expand_debug_locations ();
4557
4558 /* Free stuff we no longer need after GIMPLE optimizations. */
4559 free_dominance_info (CDI_DOMINATORS);
4560 free_dominance_info (CDI_POST_DOMINATORS);
4561 delete_tree_cfg_annotations ();
4562
4563 timevar_push (TV_OUT_OF_SSA);
4564 finish_out_of_ssa (&SA);
4565 timevar_pop (TV_OUT_OF_SSA);
4566
4567 timevar_push (TV_POST_EXPAND);
4568 /* We are no longer in SSA form. */
4569 cfun->gimple_df->in_ssa_p = false;
4570 if (current_loops)
4571 loops_state_clear (LOOP_CLOSED_SSA);
4572
4573 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4574 conservatively to true until they are all profile aware. */
4575 pointer_map_destroy (lab_rtx_for_bb);
4576 free_histograms ();
4577
4578 construct_exit_block ();
4579 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4580 insn_locators_finalize ();
4581
4582 /* Zap the tree EH table. */
4583 set_eh_throw_stmt_table (cfun, NULL);
4584
4585 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4586 split edges which edge insertions might do. */
4587 rebuild_jump_labels (get_insns ());
4588
4589 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4590 {
4591 edge e;
4592 edge_iterator ei;
4593 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4594 {
4595 if (e->insns.r)
4596 {
4597 rebuild_jump_labels_chain (e->insns.r);
4598 /* Avoid putting insns before parm_birth_insn. */
4599 if (e->src == ENTRY_BLOCK_PTR
4600 && single_succ_p (ENTRY_BLOCK_PTR)
4601 && parm_birth_insn)
4602 {
4603 rtx insns = e->insns.r;
4604 e->insns.r = NULL_RTX;
4605 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4606 }
4607 else
4608 commit_one_edge_insertion (e);
4609 }
4610 else
4611 ei_next (&ei);
4612 }
4613 }
4614
4615 /* We're done expanding trees to RTL. */
4616 currently_expanding_to_rtl = 0;
4617
4618 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4619 {
4620 edge e;
4621 edge_iterator ei;
4622 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4623 {
4624 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4625 e->flags &= ~EDGE_EXECUTABLE;
4626
4627 /* At the moment not all abnormal edges match the RTL
4628 representation. It is safe to remove them here as
4629 find_many_sub_basic_blocks will rediscover them.
4630 In the future we should get this fixed properly. */
4631 if ((e->flags & EDGE_ABNORMAL)
4632 && !(e->flags & EDGE_SIBCALL))
4633 remove_edge (e);
4634 else
4635 ei_next (&ei);
4636 }
4637 }
4638
4639 blocks = sbitmap_alloc (last_basic_block);
4640 sbitmap_ones (blocks);
4641 find_many_sub_basic_blocks (blocks);
4642 sbitmap_free (blocks);
4643 purge_all_dead_edges ();
4644
4645 expand_stack_alignment ();
4646
4647 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4648 function. */
4649 if (crtl->tail_call_emit)
4650 fixup_tail_calls ();
4651
4652 /* After initial rtl generation, call back to finish generating
4653 exception support code. We need to do this before cleaning up
4654 the CFG as the code does not expect dead landing pads. */
4655 if (cfun->eh->region_tree != NULL)
4656 finish_eh_generation ();
4657
4658 /* Remove unreachable blocks, otherwise we cannot compute dominators
4659 which are needed for loop state verification. As a side-effect
4660 this also compacts blocks.
4661 ??? We cannot remove trivially dead insns here as for example
4662 the DRAP reg on i?86 is not magically live at this point.
4663 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4664 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4665
4666 #ifdef ENABLE_CHECKING
4667 verify_flow_info ();
4668 #endif
4669
4670 /* Initialize pseudos allocated for hard registers. */
4671 emit_initial_value_sets ();
4672
4673 /* And finally unshare all RTL. */
4674 unshare_all_rtl ();
4675
4676 /* There's no need to defer outputting this function any more; we
4677 know we want to output it. */
4678 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4679
4680 /* Now that we're done expanding trees to RTL, we shouldn't have any
4681 more CONCATs anywhere. */
4682 generating_concat_p = 0;
4683
4684 if (dump_file)
4685 {
4686 fprintf (dump_file,
4687 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4688 /* And the pass manager will dump RTL for us. */
4689 }
4690
4691 /* If we're emitting a nested function, make sure its parent gets
4692 emitted as well. Doing otherwise confuses debug info. */
4693 {
4694 tree parent;
4695 for (parent = DECL_CONTEXT (current_function_decl);
4696 parent != NULL_TREE;
4697 parent = get_containing_scope (parent))
4698 if (TREE_CODE (parent) == FUNCTION_DECL)
4699 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4700 }
4701
4702 /* We are now committed to emitting code for this function. Do any
4703 preparation, such as emitting abstract debug info for the inline
4704 before it gets mangled by optimization. */
4705 if (cgraph_function_possibly_inlined_p (current_function_decl))
4706 (*debug_hooks->outlining_inline_function) (current_function_decl);
4707
4708 TREE_ASM_WRITTEN (current_function_decl) = 1;
4709
4710 /* After expanding, the return labels are no longer needed. */
4711 return_label = NULL;
4712 naked_return_label = NULL;
4713
4714 /* After expanding, the tm_restart map is no longer needed. */
4715 if (cfun->gimple_df->tm_restart)
4716 {
4717 htab_delete (cfun->gimple_df->tm_restart);
4718 cfun->gimple_df->tm_restart = NULL;
4719 }
4720
4721 /* Tag the blocks with a depth number so that change_scope can find
4722 the common parent easily. */
4723 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4724 default_rtl_profile ();
4725
4726 timevar_pop (TV_POST_EXPAND);
4727
4728 return 0;
4729 }
4730
4731 struct rtl_opt_pass pass_expand =
4732 {
4733 {
4734 RTL_PASS,
4735 "expand", /* name */
4736 NULL, /* gate */
4737 gimple_expand_cfg, /* execute */
4738 NULL, /* sub */
4739 NULL, /* next */
4740 0, /* static_pass_number */
4741 TV_EXPAND, /* tv_id */
4742 PROP_ssa | PROP_gimple_leh | PROP_cfg
4743 | PROP_gimple_lcx, /* properties_required */
4744 PROP_rtl, /* properties_provided */
4745 PROP_ssa | PROP_trees, /* properties_destroyed */
4746 TODO_verify_ssa | TODO_verify_flow
4747 | TODO_verify_stmts, /* todo_flags_start */
4748 TODO_ggc_collect /* todo_flags_finish */
4749 }
4750 };