re PR debug/42800 (VLA DW_AT_upper_bound is no longer emitted)
[gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "toplev.h"
40 #include "debug.h"
41 #include "params.h"
42 #include "tree-inline.h"
43 #include "value-prof.h"
44 #include "target.h"
45 #include "ssaexpand.h"
46
47
48 /* This variable holds information helping the rewriting of SSA trees
49 into RTL. */
50 struct ssaexpand SA;
51
52 /* This variable holds the currently expanded gimple statement for purposes
53 of comminucating the profile info to the builtin expanders. */
54 gimple currently_expanding_gimple_stmt;
55
56 /* Return an expression tree corresponding to the RHS of GIMPLE
57 statement STMT. */
58
59 tree
60 gimple_assign_rhs_to_tree (gimple stmt)
61 {
62 tree t;
63 enum gimple_rhs_class grhs_class;
64
65 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
66
67 if (grhs_class == GIMPLE_BINARY_RHS)
68 t = build2 (gimple_assign_rhs_code (stmt),
69 TREE_TYPE (gimple_assign_lhs (stmt)),
70 gimple_assign_rhs1 (stmt),
71 gimple_assign_rhs2 (stmt));
72 else if (grhs_class == GIMPLE_UNARY_RHS)
73 t = build1 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt));
76 else if (grhs_class == GIMPLE_SINGLE_RHS)
77 {
78 t = gimple_assign_rhs1 (stmt);
79 /* Avoid modifying this tree in place below. */
80 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
81 && gimple_location (stmt) != EXPR_LOCATION (t))
82 t = copy_node (t);
83 }
84 else
85 gcc_unreachable ();
86
87 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
88 SET_EXPR_LOCATION (t, gimple_location (stmt));
89
90 return t;
91 }
92
93
94 #ifndef STACK_ALIGNMENT_NEEDED
95 #define STACK_ALIGNMENT_NEEDED 1
96 #endif
97
98 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
99
100 /* Associate declaration T with storage space X. If T is no
101 SSA name this is exactly SET_DECL_RTL, otherwise make the
102 partition of T associated with X. */
103 static inline void
104 set_rtl (tree t, rtx x)
105 {
106 if (TREE_CODE (t) == SSA_NAME)
107 {
108 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
109 if (x && !MEM_P (x))
110 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
111 /* For the benefit of debug information at -O0 (where vartracking
112 doesn't run) record the place also in the base DECL if it's
113 a normal variable (not a parameter). */
114 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
115 {
116 tree var = SSA_NAME_VAR (t);
117 /* If we don't yet have something recorded, just record it now. */
118 if (!DECL_RTL_SET_P (var))
119 SET_DECL_RTL (var, x);
120 /* If we have it set alrady to "multiple places" don't
121 change this. */
122 else if (DECL_RTL (var) == pc_rtx)
123 ;
124 /* If we have something recorded and it's not the same place
125 as we want to record now, we have multiple partitions for the
126 same base variable, with different places. We can't just
127 randomly chose one, hence we have to say that we don't know.
128 This only happens with optimization, and there var-tracking
129 will figure out the right thing. */
130 else if (DECL_RTL (var) != x)
131 SET_DECL_RTL (var, pc_rtx);
132 }
133 }
134 else
135 SET_DECL_RTL (t, x);
136 }
137
138 /* This structure holds data relevant to one variable that will be
139 placed in a stack slot. */
140 struct stack_var
141 {
142 /* The Variable. */
143 tree decl;
144
145 /* The offset of the variable. During partitioning, this is the
146 offset relative to the partition. After partitioning, this
147 is relative to the stack frame. */
148 HOST_WIDE_INT offset;
149
150 /* Initially, the size of the variable. Later, the size of the partition,
151 if this variable becomes it's partition's representative. */
152 HOST_WIDE_INT size;
153
154 /* The *byte* alignment required for this variable. Or as, with the
155 size, the alignment for this partition. */
156 unsigned int alignb;
157
158 /* The partition representative. */
159 size_t representative;
160
161 /* The next stack variable in the partition, or EOC. */
162 size_t next;
163
164 /* The numbers of conflicting stack variables. */
165 bitmap conflicts;
166 };
167
168 #define EOC ((size_t)-1)
169
170 /* We have an array of such objects while deciding allocation. */
171 static struct stack_var *stack_vars;
172 static size_t stack_vars_alloc;
173 static size_t stack_vars_num;
174
175 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
176 is non-decreasing. */
177 static size_t *stack_vars_sorted;
178
179 /* The phase of the stack frame. This is the known misalignment of
180 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
181 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
182 static int frame_phase;
183
184 /* Used during expand_used_vars to remember if we saw any decls for
185 which we'd like to enable stack smashing protection. */
186 static bool has_protected_decls;
187
188 /* Used during expand_used_vars. Remember if we say a character buffer
189 smaller than our cutoff threshold. Used for -Wstack-protector. */
190 static bool has_short_buffer;
191
192 /* Discover the byte alignment to use for DECL. Ignore alignment
193 we can't do with expected alignment of the stack boundary. */
194
195 static unsigned int
196 get_decl_align_unit (tree decl)
197 {
198 unsigned int align;
199
200 align = LOCAL_DECL_ALIGNMENT (decl);
201
202 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
203 align = MAX_SUPPORTED_STACK_ALIGNMENT;
204
205 if (SUPPORTS_STACK_ALIGNMENT)
206 {
207 if (crtl->stack_alignment_estimated < align)
208 {
209 gcc_assert(!crtl->stack_realign_processed);
210 crtl->stack_alignment_estimated = align;
211 }
212 }
213
214 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
215 So here we only make sure stack_alignment_needed >= align. */
216 if (crtl->stack_alignment_needed < align)
217 crtl->stack_alignment_needed = align;
218 if (crtl->max_used_stack_slot_alignment < align)
219 crtl->max_used_stack_slot_alignment = align;
220
221 return align / BITS_PER_UNIT;
222 }
223
224 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
225 Return the frame offset. */
226
227 static HOST_WIDE_INT
228 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
229 {
230 HOST_WIDE_INT offset, new_frame_offset;
231
232 new_frame_offset = frame_offset;
233 if (FRAME_GROWS_DOWNWARD)
234 {
235 new_frame_offset -= size + frame_phase;
236 new_frame_offset &= -align;
237 new_frame_offset += frame_phase;
238 offset = new_frame_offset;
239 }
240 else
241 {
242 new_frame_offset -= frame_phase;
243 new_frame_offset += align - 1;
244 new_frame_offset &= -align;
245 new_frame_offset += frame_phase;
246 offset = new_frame_offset;
247 new_frame_offset += size;
248 }
249 frame_offset = new_frame_offset;
250
251 if (frame_offset_overflow (frame_offset, cfun->decl))
252 frame_offset = offset = 0;
253
254 return offset;
255 }
256
257 /* Accumulate DECL into STACK_VARS. */
258
259 static void
260 add_stack_var (tree decl)
261 {
262 if (stack_vars_num >= stack_vars_alloc)
263 {
264 if (stack_vars_alloc)
265 stack_vars_alloc = stack_vars_alloc * 3 / 2;
266 else
267 stack_vars_alloc = 32;
268 stack_vars
269 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
270 }
271 stack_vars[stack_vars_num].decl = decl;
272 stack_vars[stack_vars_num].offset = 0;
273 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
275
276 /* All variables are initially in their own partition. */
277 stack_vars[stack_vars_num].representative = stack_vars_num;
278 stack_vars[stack_vars_num].next = EOC;
279
280 /* All variables initially conflict with no other. */
281 stack_vars[stack_vars_num].conflicts = NULL;
282
283 /* Ensure that this decl doesn't get put onto the list twice. */
284 set_rtl (decl, pc_rtx);
285
286 stack_vars_num++;
287 }
288
289 /* Make the decls associated with luid's X and Y conflict. */
290
291 static void
292 add_stack_var_conflict (size_t x, size_t y)
293 {
294 struct stack_var *a = &stack_vars[x];
295 struct stack_var *b = &stack_vars[y];
296 if (!a->conflicts)
297 a->conflicts = BITMAP_ALLOC (NULL);
298 if (!b->conflicts)
299 b->conflicts = BITMAP_ALLOC (NULL);
300 bitmap_set_bit (a->conflicts, y);
301 bitmap_set_bit (b->conflicts, x);
302 }
303
304 /* Check whether the decls associated with luid's X and Y conflict. */
305
306 static bool
307 stack_var_conflict_p (size_t x, size_t y)
308 {
309 struct stack_var *a = &stack_vars[x];
310 struct stack_var *b = &stack_vars[y];
311 if (!a->conflicts || !b->conflicts)
312 return false;
313 return bitmap_bit_p (a->conflicts, y);
314 }
315
316 /* Returns true if TYPE is or contains a union type. */
317
318 static bool
319 aggregate_contains_union_type (tree type)
320 {
321 tree field;
322
323 if (TREE_CODE (type) == UNION_TYPE
324 || TREE_CODE (type) == QUAL_UNION_TYPE)
325 return true;
326 if (TREE_CODE (type) == ARRAY_TYPE)
327 return aggregate_contains_union_type (TREE_TYPE (type));
328 if (TREE_CODE (type) != RECORD_TYPE)
329 return false;
330
331 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
332 if (TREE_CODE (field) == FIELD_DECL)
333 if (aggregate_contains_union_type (TREE_TYPE (field)))
334 return true;
335
336 return false;
337 }
338
339 /* A subroutine of expand_used_vars. If two variables X and Y have alias
340 sets that do not conflict, then do add a conflict for these variables
341 in the interference graph. We also need to make sure to add conflicts
342 for union containing structures. Else RTL alias analysis comes along
343 and due to type based aliasing rules decides that for two overlapping
344 union temporaries { short s; int i; } accesses to the same mem through
345 different types may not alias and happily reorders stores across
346 life-time boundaries of the temporaries (See PR25654).
347 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
348
349 static void
350 add_alias_set_conflicts (void)
351 {
352 size_t i, j, n = stack_vars_num;
353
354 for (i = 0; i < n; ++i)
355 {
356 tree type_i = TREE_TYPE (stack_vars[i].decl);
357 bool aggr_i = AGGREGATE_TYPE_P (type_i);
358 bool contains_union;
359
360 contains_union = aggregate_contains_union_type (type_i);
361 for (j = 0; j < i; ++j)
362 {
363 tree type_j = TREE_TYPE (stack_vars[j].decl);
364 bool aggr_j = AGGREGATE_TYPE_P (type_j);
365 if (aggr_i != aggr_j
366 /* Either the objects conflict by means of type based
367 aliasing rules, or we need to add a conflict. */
368 || !objects_must_conflict_p (type_i, type_j)
369 /* In case the types do not conflict ensure that access
370 to elements will conflict. In case of unions we have
371 to be careful as type based aliasing rules may say
372 access to the same memory does not conflict. So play
373 safe and add a conflict in this case. */
374 || contains_union)
375 add_stack_var_conflict (i, j);
376 }
377 }
378 }
379
380 /* A subroutine of partition_stack_vars. A comparison function for qsort,
381 sorting an array of indices by the size and type of the object. */
382
383 static int
384 stack_var_size_cmp (const void *a, const void *b)
385 {
386 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
387 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
388 tree decla, declb;
389 unsigned int uida, uidb;
390
391 if (sa < sb)
392 return -1;
393 if (sa > sb)
394 return 1;
395 decla = stack_vars[*(const size_t *)a].decl;
396 declb = stack_vars[*(const size_t *)b].decl;
397 /* For stack variables of the same size use and id of the decls
398 to make the sort stable. Two SSA names are compared by their
399 version, SSA names come before non-SSA names, and two normal
400 decls are compared by their DECL_UID. */
401 if (TREE_CODE (decla) == SSA_NAME)
402 {
403 if (TREE_CODE (declb) == SSA_NAME)
404 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
405 else
406 return -1;
407 }
408 else if (TREE_CODE (declb) == SSA_NAME)
409 return 1;
410 else
411 uida = DECL_UID (decla), uidb = DECL_UID (declb);
412 if (uida < uidb)
413 return -1;
414 if (uida > uidb)
415 return 1;
416 return 0;
417 }
418
419
420 /* If the points-to solution *PI points to variables that are in a partition
421 together with other variables add all partition members to the pointed-to
422 variables bitmap. */
423
424 static void
425 add_partitioned_vars_to_ptset (struct pt_solution *pt,
426 struct pointer_map_t *decls_to_partitions,
427 struct pointer_set_t *visited, bitmap temp)
428 {
429 bitmap_iterator bi;
430 unsigned i;
431 bitmap *part;
432
433 if (pt->anything
434 || pt->vars == NULL
435 /* The pointed-to vars bitmap is shared, it is enough to
436 visit it once. */
437 || pointer_set_insert(visited, pt->vars))
438 return;
439
440 bitmap_clear (temp);
441
442 /* By using a temporary bitmap to store all members of the partitions
443 we have to add we make sure to visit each of the partitions only
444 once. */
445 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
446 if ((!temp
447 || !bitmap_bit_p (temp, i))
448 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
449 (void *)(size_t) i)))
450 bitmap_ior_into (temp, *part);
451 if (!bitmap_empty_p (temp))
452 bitmap_ior_into (pt->vars, temp);
453 }
454
455 /* Update points-to sets based on partition info, so we can use them on RTL.
456 The bitmaps representing stack partitions will be saved until expand,
457 where partitioned decls used as bases in memory expressions will be
458 rewritten. */
459
460 static void
461 update_alias_info_with_stack_vars (void)
462 {
463 struct pointer_map_t *decls_to_partitions = NULL;
464 size_t i, j;
465 tree var = NULL_TREE;
466
467 for (i = 0; i < stack_vars_num; i++)
468 {
469 bitmap part = NULL;
470 tree name;
471 struct ptr_info_def *pi;
472
473 /* Not interested in partitions with single variable. */
474 if (stack_vars[i].representative != i
475 || stack_vars[i].next == EOC)
476 continue;
477
478 if (!decls_to_partitions)
479 {
480 decls_to_partitions = pointer_map_create ();
481 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
482 }
483
484 /* Create an SSA_NAME that points to the partition for use
485 as base during alias-oracle queries on RTL for bases that
486 have been partitioned. */
487 if (var == NULL_TREE)
488 var = create_tmp_var (ptr_type_node, NULL);
489 name = make_ssa_name (var, NULL);
490
491 /* Create bitmaps representing partitions. They will be used for
492 points-to sets later, so use GGC alloc. */
493 part = BITMAP_GGC_ALLOC ();
494 for (j = i; j != EOC; j = stack_vars[j].next)
495 {
496 tree decl = stack_vars[j].decl;
497 unsigned int uid = DECL_UID (decl);
498 /* We should never end up partitioning SSA names (though they
499 may end up on the stack). Neither should we allocate stack
500 space to something that is unused and thus unreferenced. */
501 gcc_assert (DECL_P (decl)
502 && referenced_var_lookup (uid));
503 bitmap_set_bit (part, uid);
504 *((bitmap *) pointer_map_insert (decls_to_partitions,
505 (void *)(size_t) uid)) = part;
506 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
507 decl)) = name;
508 }
509
510 /* Make the SSA name point to all partition members. */
511 pi = get_ptr_info (name);
512 pt_solution_set (&pi->pt, part);
513 }
514
515 /* Make all points-to sets that contain one member of a partition
516 contain all members of the partition. */
517 if (decls_to_partitions)
518 {
519 unsigned i;
520 struct pointer_set_t *visited = pointer_set_create ();
521 bitmap temp = BITMAP_ALLOC (NULL);
522
523 for (i = 1; i < num_ssa_names; i++)
524 {
525 tree name = ssa_name (i);
526 struct ptr_info_def *pi;
527
528 if (name
529 && POINTER_TYPE_P (TREE_TYPE (name))
530 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
531 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
532 visited, temp);
533 }
534
535 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
536 decls_to_partitions, visited, temp);
537 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
538 decls_to_partitions, visited, temp);
539
540 pointer_set_destroy (visited);
541 pointer_map_destroy (decls_to_partitions);
542 BITMAP_FREE (temp);
543 }
544 }
545
546 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
547 partitioning algorithm. Partitions A and B are known to be non-conflicting.
548 Merge them into a single partition A.
549
550 At the same time, add OFFSET to all variables in partition B. At the end
551 of the partitioning process we've have a nice block easy to lay out within
552 the stack frame. */
553
554 static void
555 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
556 {
557 size_t i, last;
558 struct stack_var *vb = &stack_vars[b];
559 bitmap_iterator bi;
560 unsigned u;
561
562 /* Update each element of partition B with the given offset,
563 and merge them into partition A. */
564 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
565 {
566 stack_vars[i].offset += offset;
567 stack_vars[i].representative = a;
568 }
569 stack_vars[last].next = stack_vars[a].next;
570 stack_vars[a].next = b;
571
572 /* Update the required alignment of partition A to account for B. */
573 if (stack_vars[a].alignb < stack_vars[b].alignb)
574 stack_vars[a].alignb = stack_vars[b].alignb;
575
576 /* Update the interference graph and merge the conflicts. */
577 if (vb->conflicts)
578 {
579 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
580 add_stack_var_conflict (a, stack_vars[u].representative);
581 BITMAP_FREE (vb->conflicts);
582 }
583 }
584
585 /* A subroutine of expand_used_vars. Binpack the variables into
586 partitions constrained by the interference graph. The overall
587 algorithm used is as follows:
588
589 Sort the objects by size.
590 For each object A {
591 S = size(A)
592 O = 0
593 loop {
594 Look for the largest non-conflicting object B with size <= S.
595 UNION (A, B)
596 offset(B) = O
597 O += size(B)
598 S -= size(B)
599 }
600 }
601 */
602
603 static void
604 partition_stack_vars (void)
605 {
606 size_t si, sj, n = stack_vars_num;
607
608 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
609 for (si = 0; si < n; ++si)
610 stack_vars_sorted[si] = si;
611
612 if (n == 1)
613 return;
614
615 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
616
617 for (si = 0; si < n; ++si)
618 {
619 size_t i = stack_vars_sorted[si];
620 HOST_WIDE_INT isize = stack_vars[i].size;
621 HOST_WIDE_INT offset = 0;
622
623 for (sj = si; sj-- > 0; )
624 {
625 size_t j = stack_vars_sorted[sj];
626 HOST_WIDE_INT jsize = stack_vars[j].size;
627 unsigned int jalign = stack_vars[j].alignb;
628
629 /* Ignore objects that aren't partition representatives. */
630 if (stack_vars[j].representative != j)
631 continue;
632
633 /* Ignore objects too large for the remaining space. */
634 if (isize < jsize)
635 continue;
636
637 /* Ignore conflicting objects. */
638 if (stack_var_conflict_p (i, j))
639 continue;
640
641 /* Refine the remaining space check to include alignment. */
642 if (offset & (jalign - 1))
643 {
644 HOST_WIDE_INT toff = offset;
645 toff += jalign - 1;
646 toff &= -(HOST_WIDE_INT)jalign;
647 if (isize - (toff - offset) < jsize)
648 continue;
649
650 isize -= toff - offset;
651 offset = toff;
652 }
653
654 /* UNION the objects, placing J at OFFSET. */
655 union_stack_vars (i, j, offset);
656
657 isize -= jsize;
658 if (isize == 0)
659 break;
660 }
661 }
662
663 if (optimize)
664 update_alias_info_with_stack_vars ();
665 }
666
667 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
668
669 static void
670 dump_stack_var_partition (void)
671 {
672 size_t si, i, j, n = stack_vars_num;
673
674 for (si = 0; si < n; ++si)
675 {
676 i = stack_vars_sorted[si];
677
678 /* Skip variables that aren't partition representatives, for now. */
679 if (stack_vars[i].representative != i)
680 continue;
681
682 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
683 " align %u\n", (unsigned long) i, stack_vars[i].size,
684 stack_vars[i].alignb);
685
686 for (j = i; j != EOC; j = stack_vars[j].next)
687 {
688 fputc ('\t', dump_file);
689 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
690 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
691 stack_vars[j].offset);
692 }
693 }
694 }
695
696 /* Assign rtl to DECL at frame offset OFFSET. */
697
698 static void
699 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
700 {
701 /* Alignment is unsigned. */
702 unsigned HOST_WIDE_INT align;
703 rtx x;
704
705 /* If this fails, we've overflowed the stack frame. Error nicely? */
706 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
707
708 x = plus_constant (virtual_stack_vars_rtx, offset);
709 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
710
711 if (TREE_CODE (decl) != SSA_NAME)
712 {
713 /* Set alignment we actually gave this decl if it isn't an SSA name.
714 If it is we generate stack slots only accidentally so it isn't as
715 important, we'll simply use the alignment that is already set. */
716 offset -= frame_phase;
717 align = offset & -offset;
718 align *= BITS_PER_UNIT;
719 if (align == 0)
720 align = STACK_BOUNDARY;
721 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
722 align = MAX_SUPPORTED_STACK_ALIGNMENT;
723
724 DECL_ALIGN (decl) = align;
725 DECL_USER_ALIGN (decl) = 0;
726 }
727
728 set_mem_attributes (x, SSAVAR (decl), true);
729 set_rtl (decl, x);
730 }
731
732 /* A subroutine of expand_used_vars. Give each partition representative
733 a unique location within the stack frame. Update each partition member
734 with that location. */
735
736 static void
737 expand_stack_vars (bool (*pred) (tree))
738 {
739 size_t si, i, j, n = stack_vars_num;
740
741 for (si = 0; si < n; ++si)
742 {
743 HOST_WIDE_INT offset;
744
745 i = stack_vars_sorted[si];
746
747 /* Skip variables that aren't partition representatives, for now. */
748 if (stack_vars[i].representative != i)
749 continue;
750
751 /* Skip variables that have already had rtl assigned. See also
752 add_stack_var where we perpetrate this pc_rtx hack. */
753 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
754 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
755 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
756 continue;
757
758 /* Check the predicate to see whether this variable should be
759 allocated in this pass. */
760 if (pred && !pred (stack_vars[i].decl))
761 continue;
762
763 offset = alloc_stack_frame_space (stack_vars[i].size,
764 stack_vars[i].alignb);
765
766 /* Create rtl for each variable based on their location within the
767 partition. */
768 for (j = i; j != EOC; j = stack_vars[j].next)
769 {
770 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
771 expand_one_stack_var_at (stack_vars[j].decl,
772 stack_vars[j].offset + offset);
773 }
774 }
775 }
776
777 /* Take into account all sizes of partitions and reset DECL_RTLs. */
778 static HOST_WIDE_INT
779 account_stack_vars (void)
780 {
781 size_t si, j, i, n = stack_vars_num;
782 HOST_WIDE_INT size = 0;
783
784 for (si = 0; si < n; ++si)
785 {
786 i = stack_vars_sorted[si];
787
788 /* Skip variables that aren't partition representatives, for now. */
789 if (stack_vars[i].representative != i)
790 continue;
791
792 size += stack_vars[i].size;
793 for (j = i; j != EOC; j = stack_vars[j].next)
794 set_rtl (stack_vars[j].decl, NULL);
795 }
796 return size;
797 }
798
799 /* A subroutine of expand_one_var. Called to immediately assign rtl
800 to a variable to be allocated in the stack frame. */
801
802 static void
803 expand_one_stack_var (tree var)
804 {
805 HOST_WIDE_INT size, offset, align;
806
807 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
808 align = get_decl_align_unit (SSAVAR (var));
809 offset = alloc_stack_frame_space (size, align);
810
811 expand_one_stack_var_at (var, offset);
812 }
813
814 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
815 that will reside in a hard register. */
816
817 static void
818 expand_one_hard_reg_var (tree var)
819 {
820 rest_of_decl_compilation (var, 0, 0);
821 }
822
823 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
824 that will reside in a pseudo register. */
825
826 static void
827 expand_one_register_var (tree var)
828 {
829 tree decl = SSAVAR (var);
830 tree type = TREE_TYPE (decl);
831 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
832 rtx x = gen_reg_rtx (reg_mode);
833
834 set_rtl (var, x);
835
836 /* Note if the object is a user variable. */
837 if (!DECL_ARTIFICIAL (decl))
838 mark_user_reg (x);
839
840 if (POINTER_TYPE_P (type))
841 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
842 }
843
844 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
845 has some associated error, e.g. its type is error-mark. We just need
846 to pick something that won't crash the rest of the compiler. */
847
848 static void
849 expand_one_error_var (tree var)
850 {
851 enum machine_mode mode = DECL_MODE (var);
852 rtx x;
853
854 if (mode == BLKmode)
855 x = gen_rtx_MEM (BLKmode, const0_rtx);
856 else if (mode == VOIDmode)
857 x = const0_rtx;
858 else
859 x = gen_reg_rtx (mode);
860
861 SET_DECL_RTL (var, x);
862 }
863
864 /* A subroutine of expand_one_var. VAR is a variable that will be
865 allocated to the local stack frame. Return true if we wish to
866 add VAR to STACK_VARS so that it will be coalesced with other
867 variables. Return false to allocate VAR immediately.
868
869 This function is used to reduce the number of variables considered
870 for coalescing, which reduces the size of the quadratic problem. */
871
872 static bool
873 defer_stack_allocation (tree var, bool toplevel)
874 {
875 /* If stack protection is enabled, *all* stack variables must be deferred,
876 so that we can re-order the strings to the top of the frame. */
877 if (flag_stack_protect)
878 return true;
879
880 /* Variables in the outermost scope automatically conflict with
881 every other variable. The only reason to want to defer them
882 at all is that, after sorting, we can more efficiently pack
883 small variables in the stack frame. Continue to defer at -O2. */
884 if (toplevel && optimize < 2)
885 return false;
886
887 /* Without optimization, *most* variables are allocated from the
888 stack, which makes the quadratic problem large exactly when we
889 want compilation to proceed as quickly as possible. On the
890 other hand, we don't want the function's stack frame size to
891 get completely out of hand. So we avoid adding scalars and
892 "small" aggregates to the list at all. */
893 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
894 return false;
895
896 return true;
897 }
898
899 /* A subroutine of expand_used_vars. Expand one variable according to
900 its flavor. Variables to be placed on the stack are not actually
901 expanded yet, merely recorded.
902 When REALLY_EXPAND is false, only add stack values to be allocated.
903 Return stack usage this variable is supposed to take.
904 */
905
906 static HOST_WIDE_INT
907 expand_one_var (tree var, bool toplevel, bool really_expand)
908 {
909 tree origvar = var;
910 var = SSAVAR (var);
911
912 if (SUPPORTS_STACK_ALIGNMENT
913 && TREE_TYPE (var) != error_mark_node
914 && TREE_CODE (var) == VAR_DECL)
915 {
916 unsigned int align;
917
918 /* Because we don't know if VAR will be in register or on stack,
919 we conservatively assume it will be on stack even if VAR is
920 eventually put into register after RA pass. For non-automatic
921 variables, which won't be on stack, we collect alignment of
922 type and ignore user specified alignment. */
923 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
924 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
925 TYPE_MODE (TREE_TYPE (var)),
926 TYPE_ALIGN (TREE_TYPE (var)));
927 else
928 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
929
930 if (crtl->stack_alignment_estimated < align)
931 {
932 /* stack_alignment_estimated shouldn't change after stack
933 realign decision made */
934 gcc_assert(!crtl->stack_realign_processed);
935 crtl->stack_alignment_estimated = align;
936 }
937 }
938
939 if (TREE_CODE (origvar) == SSA_NAME)
940 {
941 gcc_assert (TREE_CODE (var) != VAR_DECL
942 || (!DECL_EXTERNAL (var)
943 && !DECL_HAS_VALUE_EXPR_P (var)
944 && !TREE_STATIC (var)
945 && TREE_TYPE (var) != error_mark_node
946 && !DECL_HARD_REGISTER (var)
947 && really_expand));
948 }
949 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
950 ;
951 else if (DECL_EXTERNAL (var))
952 ;
953 else if (DECL_HAS_VALUE_EXPR_P (var))
954 ;
955 else if (TREE_STATIC (var))
956 ;
957 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
958 ;
959 else if (TREE_TYPE (var) == error_mark_node)
960 {
961 if (really_expand)
962 expand_one_error_var (var);
963 }
964 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
965 {
966 if (really_expand)
967 expand_one_hard_reg_var (var);
968 }
969 else if (use_register_for_decl (var))
970 {
971 if (really_expand)
972 expand_one_register_var (origvar);
973 }
974 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
975 {
976 if (really_expand)
977 {
978 error ("size of variable %q+D is too large", var);
979 expand_one_error_var (var);
980 }
981 }
982 else if (defer_stack_allocation (var, toplevel))
983 add_stack_var (origvar);
984 else
985 {
986 if (really_expand)
987 expand_one_stack_var (origvar);
988 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
989 }
990 return 0;
991 }
992
993 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
994 expanding variables. Those variables that can be put into registers
995 are allocated pseudos; those that can't are put on the stack.
996
997 TOPLEVEL is true if this is the outermost BLOCK. */
998
999 static void
1000 expand_used_vars_for_block (tree block, bool toplevel)
1001 {
1002 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1003 tree t;
1004
1005 old_sv_num = toplevel ? 0 : stack_vars_num;
1006
1007 /* Expand all variables at this level. */
1008 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1009 if (TREE_USED (t))
1010 expand_one_var (t, toplevel, true);
1011
1012 this_sv_num = stack_vars_num;
1013
1014 /* Expand all variables at containing levels. */
1015 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1016 expand_used_vars_for_block (t, false);
1017
1018 /* Since we do not track exact variable lifetimes (which is not even
1019 possible for variables whose address escapes), we mirror the block
1020 tree in the interference graph. Here we cause all variables at this
1021 level, and all sublevels, to conflict. */
1022 if (old_sv_num < this_sv_num)
1023 {
1024 new_sv_num = stack_vars_num;
1025
1026 for (i = old_sv_num; i < new_sv_num; ++i)
1027 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1028 add_stack_var_conflict (i, j);
1029 }
1030 }
1031
1032 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1033 and clear TREE_USED on all local variables. */
1034
1035 static void
1036 clear_tree_used (tree block)
1037 {
1038 tree t;
1039
1040 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1041 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1042 TREE_USED (t) = 0;
1043
1044 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1045 clear_tree_used (t);
1046 }
1047
1048 /* Examine TYPE and determine a bit mask of the following features. */
1049
1050 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1051 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1052 #define SPCT_HAS_ARRAY 4
1053 #define SPCT_HAS_AGGREGATE 8
1054
1055 static unsigned int
1056 stack_protect_classify_type (tree type)
1057 {
1058 unsigned int ret = 0;
1059 tree t;
1060
1061 switch (TREE_CODE (type))
1062 {
1063 case ARRAY_TYPE:
1064 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1065 if (t == char_type_node
1066 || t == signed_char_type_node
1067 || t == unsigned_char_type_node)
1068 {
1069 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1070 unsigned HOST_WIDE_INT len;
1071
1072 if (!TYPE_SIZE_UNIT (type)
1073 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1074 len = max;
1075 else
1076 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1077
1078 if (len < max)
1079 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1080 else
1081 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1082 }
1083 else
1084 ret = SPCT_HAS_ARRAY;
1085 break;
1086
1087 case UNION_TYPE:
1088 case QUAL_UNION_TYPE:
1089 case RECORD_TYPE:
1090 ret = SPCT_HAS_AGGREGATE;
1091 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1092 if (TREE_CODE (t) == FIELD_DECL)
1093 ret |= stack_protect_classify_type (TREE_TYPE (t));
1094 break;
1095
1096 default:
1097 break;
1098 }
1099
1100 return ret;
1101 }
1102
1103 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1104 part of the local stack frame. Remember if we ever return nonzero for
1105 any variable in this function. The return value is the phase number in
1106 which the variable should be allocated. */
1107
1108 static int
1109 stack_protect_decl_phase (tree decl)
1110 {
1111 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1112 int ret = 0;
1113
1114 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1115 has_short_buffer = true;
1116
1117 if (flag_stack_protect == 2)
1118 {
1119 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1120 && !(bits & SPCT_HAS_AGGREGATE))
1121 ret = 1;
1122 else if (bits & SPCT_HAS_ARRAY)
1123 ret = 2;
1124 }
1125 else
1126 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1127
1128 if (ret)
1129 has_protected_decls = true;
1130
1131 return ret;
1132 }
1133
1134 /* Two helper routines that check for phase 1 and phase 2. These are used
1135 as callbacks for expand_stack_vars. */
1136
1137 static bool
1138 stack_protect_decl_phase_1 (tree decl)
1139 {
1140 return stack_protect_decl_phase (decl) == 1;
1141 }
1142
1143 static bool
1144 stack_protect_decl_phase_2 (tree decl)
1145 {
1146 return stack_protect_decl_phase (decl) == 2;
1147 }
1148
1149 /* Ensure that variables in different stack protection phases conflict
1150 so that they are not merged and share the same stack slot. */
1151
1152 static void
1153 add_stack_protection_conflicts (void)
1154 {
1155 size_t i, j, n = stack_vars_num;
1156 unsigned char *phase;
1157
1158 phase = XNEWVEC (unsigned char, n);
1159 for (i = 0; i < n; ++i)
1160 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1161
1162 for (i = 0; i < n; ++i)
1163 {
1164 unsigned char ph_i = phase[i];
1165 for (j = 0; j < i; ++j)
1166 if (ph_i != phase[j])
1167 add_stack_var_conflict (i, j);
1168 }
1169
1170 XDELETEVEC (phase);
1171 }
1172
1173 /* Create a decl for the guard at the top of the stack frame. */
1174
1175 static void
1176 create_stack_guard (void)
1177 {
1178 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1179 VAR_DECL, NULL, ptr_type_node);
1180 TREE_THIS_VOLATILE (guard) = 1;
1181 TREE_USED (guard) = 1;
1182 expand_one_stack_var (guard);
1183 crtl->stack_protect_guard = guard;
1184 }
1185
1186 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1187 expanding variables. Those variables that can be put into registers
1188 are allocated pseudos; those that can't are put on the stack.
1189
1190 TOPLEVEL is true if this is the outermost BLOCK. */
1191
1192 static HOST_WIDE_INT
1193 account_used_vars_for_block (tree block, bool toplevel)
1194 {
1195 tree t;
1196 HOST_WIDE_INT size = 0;
1197
1198 /* Expand all variables at this level. */
1199 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1200 if (TREE_USED (t))
1201 size += expand_one_var (t, toplevel, false);
1202
1203 /* Expand all variables at containing levels. */
1204 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1205 size += account_used_vars_for_block (t, false);
1206
1207 return size;
1208 }
1209
1210 /* Prepare for expanding variables. */
1211 static void
1212 init_vars_expansion (void)
1213 {
1214 tree t;
1215 /* Set TREE_USED on all variables in the local_decls. */
1216 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1217 TREE_USED (TREE_VALUE (t)) = 1;
1218
1219 /* Clear TREE_USED on all variables associated with a block scope. */
1220 clear_tree_used (DECL_INITIAL (current_function_decl));
1221
1222 /* Initialize local stack smashing state. */
1223 has_protected_decls = false;
1224 has_short_buffer = false;
1225 }
1226
1227 /* Free up stack variable graph data. */
1228 static void
1229 fini_vars_expansion (void)
1230 {
1231 size_t i, n = stack_vars_num;
1232 for (i = 0; i < n; i++)
1233 BITMAP_FREE (stack_vars[i].conflicts);
1234 XDELETEVEC (stack_vars);
1235 XDELETEVEC (stack_vars_sorted);
1236 stack_vars = NULL;
1237 stack_vars_alloc = stack_vars_num = 0;
1238 }
1239
1240 /* Make a fair guess for the size of the stack frame of the current
1241 function. This doesn't have to be exact, the result is only used
1242 in the inline heuristics. So we don't want to run the full stack
1243 var packing algorithm (which is quadratic in the number of stack
1244 vars). Instead, we calculate the total size of all stack vars.
1245 This turns out to be a pretty fair estimate -- packing of stack
1246 vars doesn't happen very often. */
1247
1248 HOST_WIDE_INT
1249 estimated_stack_frame_size (void)
1250 {
1251 HOST_WIDE_INT size = 0;
1252 size_t i;
1253 tree t, outer_block = DECL_INITIAL (current_function_decl);
1254
1255 init_vars_expansion ();
1256
1257 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1258 {
1259 tree var = TREE_VALUE (t);
1260
1261 if (TREE_USED (var))
1262 size += expand_one_var (var, true, false);
1263 TREE_USED (var) = 1;
1264 }
1265 size += account_used_vars_for_block (outer_block, true);
1266
1267 if (stack_vars_num > 0)
1268 {
1269 /* Fake sorting the stack vars for account_stack_vars (). */
1270 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1271 for (i = 0; i < stack_vars_num; ++i)
1272 stack_vars_sorted[i] = i;
1273 size += account_stack_vars ();
1274 fini_vars_expansion ();
1275 }
1276
1277 return size;
1278 }
1279
1280 /* Expand all variables used in the function. */
1281
1282 static void
1283 expand_used_vars (void)
1284 {
1285 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1286 unsigned i;
1287
1288 /* Compute the phase of the stack frame for this function. */
1289 {
1290 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1291 int off = STARTING_FRAME_OFFSET % align;
1292 frame_phase = off ? align - off : 0;
1293 }
1294
1295 init_vars_expansion ();
1296
1297 for (i = 0; i < SA.map->num_partitions; i++)
1298 {
1299 tree var = partition_to_var (SA.map, i);
1300
1301 gcc_assert (is_gimple_reg (var));
1302 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1303 expand_one_var (var, true, true);
1304 else
1305 {
1306 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1307 contain the default def (representing the parm or result itself)
1308 we don't do anything here. But those which don't contain the
1309 default def (representing a temporary based on the parm/result)
1310 we need to allocate space just like for normal VAR_DECLs. */
1311 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1312 {
1313 expand_one_var (var, true, true);
1314 gcc_assert (SA.partition_to_pseudo[i]);
1315 }
1316 }
1317 }
1318
1319 /* At this point all variables on the local_decls with TREE_USED
1320 set are not associated with any block scope. Lay them out. */
1321 t = cfun->local_decls;
1322 cfun->local_decls = NULL_TREE;
1323 for (; t; t = next)
1324 {
1325 tree var = TREE_VALUE (t);
1326 bool expand_now = false;
1327
1328 next = TREE_CHAIN (t);
1329
1330 /* Expanded above already. */
1331 if (is_gimple_reg (var))
1332 {
1333 TREE_USED (var) = 0;
1334 goto next;
1335 }
1336 /* We didn't set a block for static or extern because it's hard
1337 to tell the difference between a global variable (re)declared
1338 in a local scope, and one that's really declared there to
1339 begin with. And it doesn't really matter much, since we're
1340 not giving them stack space. Expand them now. */
1341 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1342 expand_now = true;
1343
1344 /* If the variable is not associated with any block, then it
1345 was created by the optimizers, and could be live anywhere
1346 in the function. */
1347 else if (TREE_USED (var))
1348 expand_now = true;
1349
1350 /* Finally, mark all variables on the list as used. We'll use
1351 this in a moment when we expand those associated with scopes. */
1352 TREE_USED (var) = 1;
1353
1354 if (expand_now)
1355 expand_one_var (var, true, true);
1356
1357 next:
1358 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1359 {
1360 rtx rtl = DECL_RTL_IF_SET (var);
1361
1362 /* Keep artificial non-ignored vars in cfun->local_decls
1363 chain until instantiate_decls. */
1364 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1365 {
1366 TREE_CHAIN (t) = cfun->local_decls;
1367 cfun->local_decls = t;
1368 continue;
1369 }
1370 }
1371
1372 ggc_free (t);
1373 }
1374
1375 /* At this point, all variables within the block tree with TREE_USED
1376 set are actually used by the optimized function. Lay them out. */
1377 expand_used_vars_for_block (outer_block, true);
1378
1379 if (stack_vars_num > 0)
1380 {
1381 /* Due to the way alias sets work, no variables with non-conflicting
1382 alias sets may be assigned the same address. Add conflicts to
1383 reflect this. */
1384 add_alias_set_conflicts ();
1385
1386 /* If stack protection is enabled, we don't share space between
1387 vulnerable data and non-vulnerable data. */
1388 if (flag_stack_protect)
1389 add_stack_protection_conflicts ();
1390
1391 /* Now that we have collected all stack variables, and have computed a
1392 minimal interference graph, attempt to save some stack space. */
1393 partition_stack_vars ();
1394 if (dump_file)
1395 dump_stack_var_partition ();
1396 }
1397
1398 /* There are several conditions under which we should create a
1399 stack guard: protect-all, alloca used, protected decls present. */
1400 if (flag_stack_protect == 2
1401 || (flag_stack_protect
1402 && (cfun->calls_alloca || has_protected_decls)))
1403 create_stack_guard ();
1404
1405 /* Assign rtl to each variable based on these partitions. */
1406 if (stack_vars_num > 0)
1407 {
1408 /* Reorder decls to be protected by iterating over the variables
1409 array multiple times, and allocating out of each phase in turn. */
1410 /* ??? We could probably integrate this into the qsort we did
1411 earlier, such that we naturally see these variables first,
1412 and thus naturally allocate things in the right order. */
1413 if (has_protected_decls)
1414 {
1415 /* Phase 1 contains only character arrays. */
1416 expand_stack_vars (stack_protect_decl_phase_1);
1417
1418 /* Phase 2 contains other kinds of arrays. */
1419 if (flag_stack_protect == 2)
1420 expand_stack_vars (stack_protect_decl_phase_2);
1421 }
1422
1423 expand_stack_vars (NULL);
1424
1425 fini_vars_expansion ();
1426 }
1427
1428 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1429 if (STACK_ALIGNMENT_NEEDED)
1430 {
1431 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1432 if (!FRAME_GROWS_DOWNWARD)
1433 frame_offset += align - 1;
1434 frame_offset &= -align;
1435 }
1436 }
1437
1438
1439 /* If we need to produce a detailed dump, print the tree representation
1440 for STMT to the dump file. SINCE is the last RTX after which the RTL
1441 generated for STMT should have been appended. */
1442
1443 static void
1444 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1445 {
1446 if (dump_file && (dump_flags & TDF_DETAILS))
1447 {
1448 fprintf (dump_file, "\n;; ");
1449 print_gimple_stmt (dump_file, stmt, 0,
1450 TDF_SLIM | (dump_flags & TDF_LINENO));
1451 fprintf (dump_file, "\n");
1452
1453 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1454 }
1455 }
1456
1457 /* Maps the blocks that do not contain tree labels to rtx labels. */
1458
1459 static struct pointer_map_t *lab_rtx_for_bb;
1460
1461 /* Returns the label_rtx expression for a label starting basic block BB. */
1462
1463 static rtx
1464 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1465 {
1466 gimple_stmt_iterator gsi;
1467 tree lab;
1468 gimple lab_stmt;
1469 void **elt;
1470
1471 if (bb->flags & BB_RTL)
1472 return block_label (bb);
1473
1474 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1475 if (elt)
1476 return (rtx) *elt;
1477
1478 /* Find the tree label if it is present. */
1479
1480 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1481 {
1482 lab_stmt = gsi_stmt (gsi);
1483 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1484 break;
1485
1486 lab = gimple_label_label (lab_stmt);
1487 if (DECL_NONLOCAL (lab))
1488 break;
1489
1490 return label_rtx (lab);
1491 }
1492
1493 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1494 *elt = gen_label_rtx ();
1495 return (rtx) *elt;
1496 }
1497
1498
1499 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1500 of a basic block where we just expanded the conditional at the end,
1501 possibly clean up the CFG and instruction sequence. LAST is the
1502 last instruction before the just emitted jump sequence. */
1503
1504 static void
1505 maybe_cleanup_end_of_block (edge e, rtx last)
1506 {
1507 /* Special case: when jumpif decides that the condition is
1508 trivial it emits an unconditional jump (and the necessary
1509 barrier). But we still have two edges, the fallthru one is
1510 wrong. purge_dead_edges would clean this up later. Unfortunately
1511 we have to insert insns (and split edges) before
1512 find_many_sub_basic_blocks and hence before purge_dead_edges.
1513 But splitting edges might create new blocks which depend on the
1514 fact that if there are two edges there's no barrier. So the
1515 barrier would get lost and verify_flow_info would ICE. Instead
1516 of auditing all edge splitters to care for the barrier (which
1517 normally isn't there in a cleaned CFG), fix it here. */
1518 if (BARRIER_P (get_last_insn ()))
1519 {
1520 rtx insn;
1521 remove_edge (e);
1522 /* Now, we have a single successor block, if we have insns to
1523 insert on the remaining edge we potentially will insert
1524 it at the end of this block (if the dest block isn't feasible)
1525 in order to avoid splitting the edge. This insertion will take
1526 place in front of the last jump. But we might have emitted
1527 multiple jumps (conditional and one unconditional) to the
1528 same destination. Inserting in front of the last one then
1529 is a problem. See PR 40021. We fix this by deleting all
1530 jumps except the last unconditional one. */
1531 insn = PREV_INSN (get_last_insn ());
1532 /* Make sure we have an unconditional jump. Otherwise we're
1533 confused. */
1534 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1535 for (insn = PREV_INSN (insn); insn != last;)
1536 {
1537 insn = PREV_INSN (insn);
1538 if (JUMP_P (NEXT_INSN (insn)))
1539 delete_insn (NEXT_INSN (insn));
1540 }
1541 }
1542 }
1543
1544 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1545 Returns a new basic block if we've terminated the current basic
1546 block and created a new one. */
1547
1548 static basic_block
1549 expand_gimple_cond (basic_block bb, gimple stmt)
1550 {
1551 basic_block new_bb, dest;
1552 edge new_edge;
1553 edge true_edge;
1554 edge false_edge;
1555 rtx last2, last;
1556 enum tree_code code;
1557 tree op0, op1;
1558
1559 code = gimple_cond_code (stmt);
1560 op0 = gimple_cond_lhs (stmt);
1561 op1 = gimple_cond_rhs (stmt);
1562 /* We're sometimes presented with such code:
1563 D.123_1 = x < y;
1564 if (D.123_1 != 0)
1565 ...
1566 This would expand to two comparisons which then later might
1567 be cleaned up by combine. But some pattern matchers like if-conversion
1568 work better when there's only one compare, so make up for this
1569 here as special exception if TER would have made the same change. */
1570 if (gimple_cond_single_var_p (stmt)
1571 && SA.values
1572 && TREE_CODE (op0) == SSA_NAME
1573 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1574 {
1575 gimple second = SSA_NAME_DEF_STMT (op0);
1576 if (gimple_code (second) == GIMPLE_ASSIGN)
1577 {
1578 enum tree_code code2 = gimple_assign_rhs_code (second);
1579 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1580 {
1581 code = code2;
1582 op0 = gimple_assign_rhs1 (second);
1583 op1 = gimple_assign_rhs2 (second);
1584 }
1585 /* If jumps are cheap turn some more codes into
1586 jumpy sequences. */
1587 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1588 {
1589 if ((code2 == BIT_AND_EXPR
1590 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1591 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1592 || code2 == TRUTH_AND_EXPR)
1593 {
1594 code = TRUTH_ANDIF_EXPR;
1595 op0 = gimple_assign_rhs1 (second);
1596 op1 = gimple_assign_rhs2 (second);
1597 }
1598 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1599 {
1600 code = TRUTH_ORIF_EXPR;
1601 op0 = gimple_assign_rhs1 (second);
1602 op1 = gimple_assign_rhs2 (second);
1603 }
1604 }
1605 }
1606 }
1607
1608 last2 = last = get_last_insn ();
1609
1610 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1611 if (gimple_has_location (stmt))
1612 {
1613 set_curr_insn_source_location (gimple_location (stmt));
1614 set_curr_insn_block (gimple_block (stmt));
1615 }
1616
1617 /* These flags have no purpose in RTL land. */
1618 true_edge->flags &= ~EDGE_TRUE_VALUE;
1619 false_edge->flags &= ~EDGE_FALSE_VALUE;
1620
1621 /* We can either have a pure conditional jump with one fallthru edge or
1622 two-way jump that needs to be decomposed into two basic blocks. */
1623 if (false_edge->dest == bb->next_bb)
1624 {
1625 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1626 true_edge->probability);
1627 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1628 if (true_edge->goto_locus)
1629 {
1630 set_curr_insn_source_location (true_edge->goto_locus);
1631 set_curr_insn_block (true_edge->goto_block);
1632 true_edge->goto_locus = curr_insn_locator ();
1633 }
1634 true_edge->goto_block = NULL;
1635 false_edge->flags |= EDGE_FALLTHRU;
1636 maybe_cleanup_end_of_block (false_edge, last);
1637 return NULL;
1638 }
1639 if (true_edge->dest == bb->next_bb)
1640 {
1641 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1642 false_edge->probability);
1643 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1644 if (false_edge->goto_locus)
1645 {
1646 set_curr_insn_source_location (false_edge->goto_locus);
1647 set_curr_insn_block (false_edge->goto_block);
1648 false_edge->goto_locus = curr_insn_locator ();
1649 }
1650 false_edge->goto_block = NULL;
1651 true_edge->flags |= EDGE_FALLTHRU;
1652 maybe_cleanup_end_of_block (true_edge, last);
1653 return NULL;
1654 }
1655
1656 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1657 true_edge->probability);
1658 last = get_last_insn ();
1659 if (false_edge->goto_locus)
1660 {
1661 set_curr_insn_source_location (false_edge->goto_locus);
1662 set_curr_insn_block (false_edge->goto_block);
1663 false_edge->goto_locus = curr_insn_locator ();
1664 }
1665 false_edge->goto_block = NULL;
1666 emit_jump (label_rtx_for_bb (false_edge->dest));
1667
1668 BB_END (bb) = last;
1669 if (BARRIER_P (BB_END (bb)))
1670 BB_END (bb) = PREV_INSN (BB_END (bb));
1671 update_bb_for_insn (bb);
1672
1673 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1674 dest = false_edge->dest;
1675 redirect_edge_succ (false_edge, new_bb);
1676 false_edge->flags |= EDGE_FALLTHRU;
1677 new_bb->count = false_edge->count;
1678 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1679 new_edge = make_edge (new_bb, dest, 0);
1680 new_edge->probability = REG_BR_PROB_BASE;
1681 new_edge->count = new_bb->count;
1682 if (BARRIER_P (BB_END (new_bb)))
1683 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1684 update_bb_for_insn (new_bb);
1685
1686 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1687
1688 if (true_edge->goto_locus)
1689 {
1690 set_curr_insn_source_location (true_edge->goto_locus);
1691 set_curr_insn_block (true_edge->goto_block);
1692 true_edge->goto_locus = curr_insn_locator ();
1693 }
1694 true_edge->goto_block = NULL;
1695
1696 return new_bb;
1697 }
1698
1699 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1700 statement STMT. */
1701
1702 static void
1703 expand_call_stmt (gimple stmt)
1704 {
1705 tree exp;
1706 tree lhs = gimple_call_lhs (stmt);
1707 size_t i;
1708 bool builtin_p;
1709 tree decl;
1710
1711 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1712
1713 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1714 decl = gimple_call_fndecl (stmt);
1715 builtin_p = decl && DECL_BUILT_IN (decl);
1716
1717 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1718 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1719
1720 for (i = 0; i < gimple_call_num_args (stmt); i++)
1721 {
1722 tree arg = gimple_call_arg (stmt, i);
1723 gimple def;
1724 /* TER addresses into arguments of builtin functions so we have a
1725 chance to infer more correct alignment information. See PR39954. */
1726 if (builtin_p
1727 && TREE_CODE (arg) == SSA_NAME
1728 && (def = get_gimple_for_ssa_name (arg))
1729 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1730 arg = gimple_assign_rhs1 (def);
1731 CALL_EXPR_ARG (exp, i) = arg;
1732 }
1733
1734 if (gimple_has_side_effects (stmt))
1735 TREE_SIDE_EFFECTS (exp) = 1;
1736
1737 if (gimple_call_nothrow_p (stmt))
1738 TREE_NOTHROW (exp) = 1;
1739
1740 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1741 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1742 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1743 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1744 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1745 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1746 TREE_BLOCK (exp) = gimple_block (stmt);
1747
1748 if (lhs)
1749 expand_assignment (lhs, exp, false);
1750 else
1751 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1752 }
1753
1754 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1755 STMT that doesn't require special handling for outgoing edges. That
1756 is no tailcalls and no GIMPLE_COND. */
1757
1758 static void
1759 expand_gimple_stmt_1 (gimple stmt)
1760 {
1761 tree op0;
1762 switch (gimple_code (stmt))
1763 {
1764 case GIMPLE_GOTO:
1765 op0 = gimple_goto_dest (stmt);
1766 if (TREE_CODE (op0) == LABEL_DECL)
1767 expand_goto (op0);
1768 else
1769 expand_computed_goto (op0);
1770 break;
1771 case GIMPLE_LABEL:
1772 expand_label (gimple_label_label (stmt));
1773 break;
1774 case GIMPLE_NOP:
1775 case GIMPLE_PREDICT:
1776 break;
1777 case GIMPLE_SWITCH:
1778 expand_case (stmt);
1779 break;
1780 case GIMPLE_ASM:
1781 expand_asm_stmt (stmt);
1782 break;
1783 case GIMPLE_CALL:
1784 expand_call_stmt (stmt);
1785 break;
1786
1787 case GIMPLE_RETURN:
1788 op0 = gimple_return_retval (stmt);
1789
1790 if (op0 && op0 != error_mark_node)
1791 {
1792 tree result = DECL_RESULT (current_function_decl);
1793
1794 /* If we are not returning the current function's RESULT_DECL,
1795 build an assignment to it. */
1796 if (op0 != result)
1797 {
1798 /* I believe that a function's RESULT_DECL is unique. */
1799 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1800
1801 /* ??? We'd like to use simply expand_assignment here,
1802 but this fails if the value is of BLKmode but the return
1803 decl is a register. expand_return has special handling
1804 for this combination, which eventually should move
1805 to common code. See comments there. Until then, let's
1806 build a modify expression :-/ */
1807 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1808 result, op0);
1809 }
1810 }
1811 if (!op0)
1812 expand_null_return ();
1813 else
1814 expand_return (op0);
1815 break;
1816
1817 case GIMPLE_ASSIGN:
1818 {
1819 tree lhs = gimple_assign_lhs (stmt);
1820
1821 /* Tree expand used to fiddle with |= and &= of two bitfield
1822 COMPONENT_REFs here. This can't happen with gimple, the LHS
1823 of binary assigns must be a gimple reg. */
1824
1825 if (TREE_CODE (lhs) != SSA_NAME
1826 || get_gimple_rhs_class (gimple_expr_code (stmt))
1827 == GIMPLE_SINGLE_RHS)
1828 {
1829 tree rhs = gimple_assign_rhs1 (stmt);
1830 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1831 == GIMPLE_SINGLE_RHS);
1832 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1833 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1834 expand_assignment (lhs, rhs,
1835 gimple_assign_nontemporal_move_p (stmt));
1836 }
1837 else
1838 {
1839 rtx target, temp;
1840 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1841 struct separate_ops ops;
1842 bool promoted = false;
1843
1844 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1845 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1846 promoted = true;
1847
1848 ops.code = gimple_assign_rhs_code (stmt);
1849 ops.type = TREE_TYPE (lhs);
1850 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1851 {
1852 case GIMPLE_BINARY_RHS:
1853 ops.op1 = gimple_assign_rhs2 (stmt);
1854 /* Fallthru */
1855 case GIMPLE_UNARY_RHS:
1856 ops.op0 = gimple_assign_rhs1 (stmt);
1857 break;
1858 default:
1859 gcc_unreachable ();
1860 }
1861 ops.location = gimple_location (stmt);
1862
1863 /* If we want to use a nontemporal store, force the value to
1864 register first. If we store into a promoted register,
1865 don't directly expand to target. */
1866 temp = nontemporal || promoted ? NULL_RTX : target;
1867 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1868 EXPAND_NORMAL);
1869
1870 if (temp == target)
1871 ;
1872 else if (promoted)
1873 {
1874 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
1875 /* If TEMP is a VOIDmode constant, use convert_modes to make
1876 sure that we properly convert it. */
1877 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1878 {
1879 temp = convert_modes (GET_MODE (target),
1880 TYPE_MODE (ops.type),
1881 temp, unsignedp);
1882 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
1883 GET_MODE (target), temp, unsignedp);
1884 }
1885
1886 convert_move (SUBREG_REG (target), temp, unsignedp);
1887 }
1888 else if (nontemporal && emit_storent_insn (target, temp))
1889 ;
1890 else
1891 {
1892 temp = force_operand (temp, target);
1893 if (temp != target)
1894 emit_move_insn (target, temp);
1895 }
1896 }
1897 }
1898 break;
1899
1900 default:
1901 gcc_unreachable ();
1902 }
1903 }
1904
1905 /* Expand one gimple statement STMT and return the last RTL instruction
1906 before any of the newly generated ones.
1907
1908 In addition to generating the necessary RTL instructions this also
1909 sets REG_EH_REGION notes if necessary and sets the current source
1910 location for diagnostics. */
1911
1912 static rtx
1913 expand_gimple_stmt (gimple stmt)
1914 {
1915 int lp_nr = 0;
1916 rtx last = NULL;
1917 location_t saved_location = input_location;
1918
1919 last = get_last_insn ();
1920
1921 /* If this is an expression of some kind and it has an associated line
1922 number, then emit the line number before expanding the expression.
1923
1924 We need to save and restore the file and line information so that
1925 errors discovered during expansion are emitted with the right
1926 information. It would be better of the diagnostic routines
1927 used the file/line information embedded in the tree nodes rather
1928 than globals. */
1929 gcc_assert (cfun);
1930
1931 if (gimple_has_location (stmt))
1932 {
1933 input_location = gimple_location (stmt);
1934 set_curr_insn_source_location (input_location);
1935
1936 /* Record where the insns produced belong. */
1937 set_curr_insn_block (gimple_block (stmt));
1938 }
1939
1940 expand_gimple_stmt_1 (stmt);
1941 /* Free any temporaries used to evaluate this statement. */
1942 free_temp_slots ();
1943
1944 input_location = saved_location;
1945
1946 /* Mark all insns that may trap. */
1947 lp_nr = lookup_stmt_eh_lp (stmt);
1948 if (lp_nr)
1949 {
1950 rtx insn;
1951 for (insn = next_real_insn (last); insn;
1952 insn = next_real_insn (insn))
1953 {
1954 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1955 /* If we want exceptions for non-call insns, any
1956 may_trap_p instruction may throw. */
1957 && GET_CODE (PATTERN (insn)) != CLOBBER
1958 && GET_CODE (PATTERN (insn)) != USE
1959 && insn_could_throw_p (insn))
1960 make_reg_eh_region_note (insn, 0, lp_nr);
1961 }
1962 }
1963
1964 return last;
1965 }
1966
1967 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
1968 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1969 generated a tail call (something that might be denied by the ABI
1970 rules governing the call; see calls.c).
1971
1972 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1973 can still reach the rest of BB. The case here is __builtin_sqrt,
1974 where the NaN result goes through the external function (with a
1975 tailcall) and the normal result happens via a sqrt instruction. */
1976
1977 static basic_block
1978 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
1979 {
1980 rtx last2, last;
1981 edge e;
1982 edge_iterator ei;
1983 int probability;
1984 gcov_type count;
1985
1986 last2 = last = expand_gimple_stmt (stmt);
1987
1988 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1989 if (CALL_P (last) && SIBLING_CALL_P (last))
1990 goto found;
1991
1992 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1993
1994 *can_fallthru = true;
1995 return NULL;
1996
1997 found:
1998 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1999 Any instructions emitted here are about to be deleted. */
2000 do_pending_stack_adjust ();
2001
2002 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2003 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2004 EH or abnormal edges, we shouldn't have created a tail call in
2005 the first place. So it seems to me we should just be removing
2006 all edges here, or redirecting the existing fallthru edge to
2007 the exit block. */
2008
2009 probability = 0;
2010 count = 0;
2011
2012 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2013 {
2014 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2015 {
2016 if (e->dest != EXIT_BLOCK_PTR)
2017 {
2018 e->dest->count -= e->count;
2019 e->dest->frequency -= EDGE_FREQUENCY (e);
2020 if (e->dest->count < 0)
2021 e->dest->count = 0;
2022 if (e->dest->frequency < 0)
2023 e->dest->frequency = 0;
2024 }
2025 count += e->count;
2026 probability += e->probability;
2027 remove_edge (e);
2028 }
2029 else
2030 ei_next (&ei);
2031 }
2032
2033 /* This is somewhat ugly: the call_expr expander often emits instructions
2034 after the sibcall (to perform the function return). These confuse the
2035 find_many_sub_basic_blocks code, so we need to get rid of these. */
2036 last = NEXT_INSN (last);
2037 gcc_assert (BARRIER_P (last));
2038
2039 *can_fallthru = false;
2040 while (NEXT_INSN (last))
2041 {
2042 /* For instance an sqrt builtin expander expands if with
2043 sibcall in the then and label for `else`. */
2044 if (LABEL_P (NEXT_INSN (last)))
2045 {
2046 *can_fallthru = true;
2047 break;
2048 }
2049 delete_insn (NEXT_INSN (last));
2050 }
2051
2052 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2053 e->probability += probability;
2054 e->count += count;
2055 BB_END (bb) = last;
2056 update_bb_for_insn (bb);
2057
2058 if (NEXT_INSN (last))
2059 {
2060 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2061
2062 last = BB_END (bb);
2063 if (BARRIER_P (last))
2064 BB_END (bb) = PREV_INSN (last);
2065 }
2066
2067 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2068
2069 return bb;
2070 }
2071
2072 /* Return the difference between the floor and the truncated result of
2073 a signed division by OP1 with remainder MOD. */
2074 static rtx
2075 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2076 {
2077 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2078 return gen_rtx_IF_THEN_ELSE
2079 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2080 gen_rtx_IF_THEN_ELSE
2081 (mode, gen_rtx_LT (BImode,
2082 gen_rtx_DIV (mode, op1, mod),
2083 const0_rtx),
2084 constm1_rtx, const0_rtx),
2085 const0_rtx);
2086 }
2087
2088 /* Return the difference between the ceil and the truncated result of
2089 a signed division by OP1 with remainder MOD. */
2090 static rtx
2091 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2092 {
2093 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2094 return gen_rtx_IF_THEN_ELSE
2095 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2096 gen_rtx_IF_THEN_ELSE
2097 (mode, gen_rtx_GT (BImode,
2098 gen_rtx_DIV (mode, op1, mod),
2099 const0_rtx),
2100 const1_rtx, const0_rtx),
2101 const0_rtx);
2102 }
2103
2104 /* Return the difference between the ceil and the truncated result of
2105 an unsigned division by OP1 with remainder MOD. */
2106 static rtx
2107 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2108 {
2109 /* (mod != 0 ? 1 : 0) */
2110 return gen_rtx_IF_THEN_ELSE
2111 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2112 const1_rtx, const0_rtx);
2113 }
2114
2115 /* Return the difference between the rounded and the truncated result
2116 of a signed division by OP1 with remainder MOD. Halfway cases are
2117 rounded away from zero, rather than to the nearest even number. */
2118 static rtx
2119 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2120 {
2121 /* (abs (mod) >= abs (op1) - abs (mod)
2122 ? (op1 / mod > 0 ? 1 : -1)
2123 : 0) */
2124 return gen_rtx_IF_THEN_ELSE
2125 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2126 gen_rtx_MINUS (mode,
2127 gen_rtx_ABS (mode, op1),
2128 gen_rtx_ABS (mode, mod))),
2129 gen_rtx_IF_THEN_ELSE
2130 (mode, gen_rtx_GT (BImode,
2131 gen_rtx_DIV (mode, op1, mod),
2132 const0_rtx),
2133 const1_rtx, constm1_rtx),
2134 const0_rtx);
2135 }
2136
2137 /* Return the difference between the rounded and the truncated result
2138 of a unsigned division by OP1 with remainder MOD. Halfway cases
2139 are rounded away from zero, rather than to the nearest even
2140 number. */
2141 static rtx
2142 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2143 {
2144 /* (mod >= op1 - mod ? 1 : 0) */
2145 return gen_rtx_IF_THEN_ELSE
2146 (mode, gen_rtx_GE (BImode, mod,
2147 gen_rtx_MINUS (mode, op1, mod)),
2148 const1_rtx, const0_rtx);
2149 }
2150
2151 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2152 any rtl. */
2153
2154 static rtx
2155 convert_debug_memory_address (enum machine_mode mode, rtx x)
2156 {
2157 enum machine_mode xmode = GET_MODE (x);
2158
2159 #ifndef POINTERS_EXTEND_UNSIGNED
2160 gcc_assert (mode == Pmode);
2161 gcc_assert (xmode == mode || xmode == VOIDmode);
2162 #else
2163 gcc_assert (mode == Pmode || mode == ptr_mode);
2164
2165 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2166 return x;
2167
2168 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2169 x = simplify_gen_subreg (mode, x, xmode,
2170 subreg_lowpart_offset
2171 (mode, xmode));
2172 else if (POINTERS_EXTEND_UNSIGNED > 0)
2173 x = gen_rtx_ZERO_EXTEND (mode, x);
2174 else if (!POINTERS_EXTEND_UNSIGNED)
2175 x = gen_rtx_SIGN_EXTEND (mode, x);
2176 else
2177 gcc_unreachable ();
2178 #endif /* POINTERS_EXTEND_UNSIGNED */
2179
2180 return x;
2181 }
2182
2183 /* Return an RTX equivalent to the value of the tree expression
2184 EXP. */
2185
2186 static rtx
2187 expand_debug_expr (tree exp)
2188 {
2189 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2190 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2191 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2192 addr_space_t as;
2193 enum machine_mode address_mode;
2194
2195 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2196 {
2197 case tcc_expression:
2198 switch (TREE_CODE (exp))
2199 {
2200 case COND_EXPR:
2201 goto ternary;
2202
2203 case TRUTH_ANDIF_EXPR:
2204 case TRUTH_ORIF_EXPR:
2205 case TRUTH_AND_EXPR:
2206 case TRUTH_OR_EXPR:
2207 case TRUTH_XOR_EXPR:
2208 goto binary;
2209
2210 case TRUTH_NOT_EXPR:
2211 goto unary;
2212
2213 default:
2214 break;
2215 }
2216 break;
2217
2218 ternary:
2219 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2220 if (!op2)
2221 return NULL_RTX;
2222 /* Fall through. */
2223
2224 binary:
2225 case tcc_binary:
2226 case tcc_comparison:
2227 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2228 if (!op1)
2229 return NULL_RTX;
2230 /* Fall through. */
2231
2232 unary:
2233 case tcc_unary:
2234 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2235 if (!op0)
2236 return NULL_RTX;
2237 break;
2238
2239 case tcc_type:
2240 case tcc_statement:
2241 gcc_unreachable ();
2242
2243 case tcc_constant:
2244 case tcc_exceptional:
2245 case tcc_declaration:
2246 case tcc_reference:
2247 case tcc_vl_exp:
2248 break;
2249 }
2250
2251 switch (TREE_CODE (exp))
2252 {
2253 case STRING_CST:
2254 if (!lookup_constant_def (exp))
2255 {
2256 if (strlen (TREE_STRING_POINTER (exp)) + 1
2257 != (size_t) TREE_STRING_LENGTH (exp))
2258 return NULL_RTX;
2259 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2260 op0 = gen_rtx_MEM (BLKmode, op0);
2261 set_mem_attributes (op0, exp, 0);
2262 return op0;
2263 }
2264 /* Fall through... */
2265
2266 case INTEGER_CST:
2267 case REAL_CST:
2268 case FIXED_CST:
2269 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2270 return op0;
2271
2272 case COMPLEX_CST:
2273 gcc_assert (COMPLEX_MODE_P (mode));
2274 op0 = expand_debug_expr (TREE_REALPART (exp));
2275 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2276 return gen_rtx_CONCAT (mode, op0, op1);
2277
2278 case DEBUG_EXPR_DECL:
2279 op0 = DECL_RTL_IF_SET (exp);
2280
2281 if (op0)
2282 return op0;
2283
2284 op0 = gen_rtx_DEBUG_EXPR (mode);
2285 DEBUG_EXPR_TREE_DECL (op0) = exp;
2286 SET_DECL_RTL (exp, op0);
2287
2288 return op0;
2289
2290 case VAR_DECL:
2291 case PARM_DECL:
2292 case FUNCTION_DECL:
2293 case LABEL_DECL:
2294 case CONST_DECL:
2295 case RESULT_DECL:
2296 op0 = DECL_RTL_IF_SET (exp);
2297
2298 /* This decl was probably optimized away. */
2299 if (!op0)
2300 {
2301 if (TREE_CODE (exp) != VAR_DECL
2302 || DECL_EXTERNAL (exp)
2303 || !TREE_STATIC (exp)
2304 || !DECL_NAME (exp)
2305 || DECL_HARD_REGISTER (exp)
2306 || mode == VOIDmode)
2307 return NULL;
2308
2309 op0 = DECL_RTL (exp);
2310 SET_DECL_RTL (exp, NULL);
2311 if (!MEM_P (op0)
2312 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2313 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2314 return NULL;
2315 }
2316 else
2317 op0 = copy_rtx (op0);
2318
2319 if (GET_MODE (op0) == BLKmode)
2320 {
2321 gcc_assert (MEM_P (op0));
2322 op0 = adjust_address_nv (op0, mode, 0);
2323 return op0;
2324 }
2325
2326 /* Fall through. */
2327
2328 adjust_mode:
2329 case PAREN_EXPR:
2330 case NOP_EXPR:
2331 case CONVERT_EXPR:
2332 {
2333 enum machine_mode inner_mode = GET_MODE (op0);
2334
2335 if (mode == inner_mode)
2336 return op0;
2337
2338 if (inner_mode == VOIDmode)
2339 {
2340 if (TREE_CODE (exp) == SSA_NAME)
2341 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2342 else
2343 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2344 if (mode == inner_mode)
2345 return op0;
2346 }
2347
2348 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2349 {
2350 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2351 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2352 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2353 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2354 else
2355 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2356 }
2357 else if (FLOAT_MODE_P (mode))
2358 {
2359 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2360 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2361 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2362 else
2363 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2364 }
2365 else if (FLOAT_MODE_P (inner_mode))
2366 {
2367 if (unsignedp)
2368 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2369 else
2370 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2371 }
2372 else if (CONSTANT_P (op0)
2373 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2374 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2375 subreg_lowpart_offset (mode,
2376 inner_mode));
2377 else if (unsignedp)
2378 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2379 else
2380 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2381
2382 return op0;
2383 }
2384
2385 case INDIRECT_REF:
2386 case ALIGN_INDIRECT_REF:
2387 case MISALIGNED_INDIRECT_REF:
2388 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2389 if (!op0)
2390 return NULL;
2391
2392 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2393 {
2394 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2395 address_mode = targetm.addr_space.address_mode (as);
2396 }
2397 else
2398 {
2399 as = ADDR_SPACE_GENERIC;
2400 address_mode = Pmode;
2401 }
2402
2403 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
2404 {
2405 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
2406 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
2407 }
2408
2409 op0 = gen_rtx_MEM (mode, op0);
2410
2411 set_mem_attributes (op0, exp, 0);
2412 set_mem_addr_space (op0, as);
2413
2414 return op0;
2415
2416 case TARGET_MEM_REF:
2417 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2418 return NULL;
2419
2420 op0 = expand_debug_expr
2421 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2422 if (!op0)
2423 return NULL;
2424
2425 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2426
2427 op0 = gen_rtx_MEM (mode, op0);
2428
2429 set_mem_attributes (op0, exp, 0);
2430 set_mem_addr_space (op0, as);
2431
2432 return op0;
2433
2434 case ARRAY_REF:
2435 case ARRAY_RANGE_REF:
2436 case COMPONENT_REF:
2437 case BIT_FIELD_REF:
2438 case REALPART_EXPR:
2439 case IMAGPART_EXPR:
2440 case VIEW_CONVERT_EXPR:
2441 {
2442 enum machine_mode mode1;
2443 HOST_WIDE_INT bitsize, bitpos;
2444 tree offset;
2445 int volatilep = 0;
2446 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2447 &mode1, &unsignedp, &volatilep, false);
2448 rtx orig_op0;
2449
2450 if (bitsize == 0)
2451 return NULL;
2452
2453 orig_op0 = op0 = expand_debug_expr (tem);
2454
2455 if (!op0)
2456 return NULL;
2457
2458 if (offset)
2459 {
2460 enum machine_mode addrmode, offmode;
2461
2462 gcc_assert (MEM_P (op0));
2463
2464 op0 = XEXP (op0, 0);
2465 addrmode = GET_MODE (op0);
2466 if (addrmode == VOIDmode)
2467 addrmode = Pmode;
2468
2469 op1 = expand_debug_expr (offset);
2470 if (!op1)
2471 return NULL;
2472
2473 offmode = GET_MODE (op1);
2474 if (offmode == VOIDmode)
2475 offmode = TYPE_MODE (TREE_TYPE (offset));
2476
2477 if (addrmode != offmode)
2478 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2479 subreg_lowpart_offset (addrmode,
2480 offmode));
2481
2482 /* Don't use offset_address here, we don't need a
2483 recognizable address, and we don't want to generate
2484 code. */
2485 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2486 }
2487
2488 if (MEM_P (op0))
2489 {
2490 if (mode1 == VOIDmode)
2491 /* Bitfield. */
2492 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2493 if (bitpos >= BITS_PER_UNIT)
2494 {
2495 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2496 bitpos %= BITS_PER_UNIT;
2497 }
2498 else if (bitpos < 0)
2499 {
2500 HOST_WIDE_INT units
2501 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2502 op0 = adjust_address_nv (op0, mode1, units);
2503 bitpos += units * BITS_PER_UNIT;
2504 }
2505 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2506 op0 = adjust_address_nv (op0, mode, 0);
2507 else if (GET_MODE (op0) != mode1)
2508 op0 = adjust_address_nv (op0, mode1, 0);
2509 else
2510 op0 = copy_rtx (op0);
2511 if (op0 == orig_op0)
2512 op0 = shallow_copy_rtx (op0);
2513 set_mem_attributes (op0, exp, 0);
2514 }
2515
2516 if (bitpos == 0 && mode == GET_MODE (op0))
2517 return op0;
2518
2519 if (bitpos < 0)
2520 return NULL;
2521
2522 if ((bitpos % BITS_PER_UNIT) == 0
2523 && bitsize == GET_MODE_BITSIZE (mode1))
2524 {
2525 enum machine_mode opmode = GET_MODE (op0);
2526
2527 gcc_assert (opmode != BLKmode);
2528
2529 if (opmode == VOIDmode)
2530 opmode = mode1;
2531
2532 /* This condition may hold if we're expanding the address
2533 right past the end of an array that turned out not to
2534 be addressable (i.e., the address was only computed in
2535 debug stmts). The gen_subreg below would rightfully
2536 crash, and the address doesn't really exist, so just
2537 drop it. */
2538 if (bitpos >= GET_MODE_BITSIZE (opmode))
2539 return NULL;
2540
2541 return simplify_gen_subreg (mode, op0, opmode,
2542 bitpos / BITS_PER_UNIT);
2543 }
2544
2545 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2546 && TYPE_UNSIGNED (TREE_TYPE (exp))
2547 ? SIGN_EXTRACT
2548 : ZERO_EXTRACT, mode,
2549 GET_MODE (op0) != VOIDmode
2550 ? GET_MODE (op0) : mode1,
2551 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2552 }
2553
2554 case ABS_EXPR:
2555 return gen_rtx_ABS (mode, op0);
2556
2557 case NEGATE_EXPR:
2558 return gen_rtx_NEG (mode, op0);
2559
2560 case BIT_NOT_EXPR:
2561 return gen_rtx_NOT (mode, op0);
2562
2563 case FLOAT_EXPR:
2564 if (unsignedp)
2565 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2566 else
2567 return gen_rtx_FLOAT (mode, op0);
2568
2569 case FIX_TRUNC_EXPR:
2570 if (unsignedp)
2571 return gen_rtx_UNSIGNED_FIX (mode, op0);
2572 else
2573 return gen_rtx_FIX (mode, op0);
2574
2575 case POINTER_PLUS_EXPR:
2576 case PLUS_EXPR:
2577 return gen_rtx_PLUS (mode, op0, op1);
2578
2579 case MINUS_EXPR:
2580 return gen_rtx_MINUS (mode, op0, op1);
2581
2582 case MULT_EXPR:
2583 return gen_rtx_MULT (mode, op0, op1);
2584
2585 case RDIV_EXPR:
2586 case TRUNC_DIV_EXPR:
2587 case EXACT_DIV_EXPR:
2588 if (unsignedp)
2589 return gen_rtx_UDIV (mode, op0, op1);
2590 else
2591 return gen_rtx_DIV (mode, op0, op1);
2592
2593 case TRUNC_MOD_EXPR:
2594 if (unsignedp)
2595 return gen_rtx_UMOD (mode, op0, op1);
2596 else
2597 return gen_rtx_MOD (mode, op0, op1);
2598
2599 case FLOOR_DIV_EXPR:
2600 if (unsignedp)
2601 return gen_rtx_UDIV (mode, op0, op1);
2602 else
2603 {
2604 rtx div = gen_rtx_DIV (mode, op0, op1);
2605 rtx mod = gen_rtx_MOD (mode, op0, op1);
2606 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2607 return gen_rtx_PLUS (mode, div, adj);
2608 }
2609
2610 case FLOOR_MOD_EXPR:
2611 if (unsignedp)
2612 return gen_rtx_UMOD (mode, op0, op1);
2613 else
2614 {
2615 rtx mod = gen_rtx_MOD (mode, op0, op1);
2616 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2617 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2618 return gen_rtx_PLUS (mode, mod, adj);
2619 }
2620
2621 case CEIL_DIV_EXPR:
2622 if (unsignedp)
2623 {
2624 rtx div = gen_rtx_UDIV (mode, op0, op1);
2625 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2626 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2627 return gen_rtx_PLUS (mode, div, adj);
2628 }
2629 else
2630 {
2631 rtx div = gen_rtx_DIV (mode, op0, op1);
2632 rtx mod = gen_rtx_MOD (mode, op0, op1);
2633 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2634 return gen_rtx_PLUS (mode, div, adj);
2635 }
2636
2637 case CEIL_MOD_EXPR:
2638 if (unsignedp)
2639 {
2640 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2641 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2642 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2643 return gen_rtx_PLUS (mode, mod, adj);
2644 }
2645 else
2646 {
2647 rtx mod = gen_rtx_MOD (mode, op0, op1);
2648 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2649 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2650 return gen_rtx_PLUS (mode, mod, adj);
2651 }
2652
2653 case ROUND_DIV_EXPR:
2654 if (unsignedp)
2655 {
2656 rtx div = gen_rtx_UDIV (mode, op0, op1);
2657 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2658 rtx adj = round_udiv_adjust (mode, mod, op1);
2659 return gen_rtx_PLUS (mode, div, adj);
2660 }
2661 else
2662 {
2663 rtx div = gen_rtx_DIV (mode, op0, op1);
2664 rtx mod = gen_rtx_MOD (mode, op0, op1);
2665 rtx adj = round_sdiv_adjust (mode, mod, op1);
2666 return gen_rtx_PLUS (mode, div, adj);
2667 }
2668
2669 case ROUND_MOD_EXPR:
2670 if (unsignedp)
2671 {
2672 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2673 rtx adj = round_udiv_adjust (mode, mod, op1);
2674 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2675 return gen_rtx_PLUS (mode, mod, adj);
2676 }
2677 else
2678 {
2679 rtx mod = gen_rtx_MOD (mode, op0, op1);
2680 rtx adj = round_sdiv_adjust (mode, mod, op1);
2681 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2682 return gen_rtx_PLUS (mode, mod, adj);
2683 }
2684
2685 case LSHIFT_EXPR:
2686 return gen_rtx_ASHIFT (mode, op0, op1);
2687
2688 case RSHIFT_EXPR:
2689 if (unsignedp)
2690 return gen_rtx_LSHIFTRT (mode, op0, op1);
2691 else
2692 return gen_rtx_ASHIFTRT (mode, op0, op1);
2693
2694 case LROTATE_EXPR:
2695 return gen_rtx_ROTATE (mode, op0, op1);
2696
2697 case RROTATE_EXPR:
2698 return gen_rtx_ROTATERT (mode, op0, op1);
2699
2700 case MIN_EXPR:
2701 if (unsignedp)
2702 return gen_rtx_UMIN (mode, op0, op1);
2703 else
2704 return gen_rtx_SMIN (mode, op0, op1);
2705
2706 case MAX_EXPR:
2707 if (unsignedp)
2708 return gen_rtx_UMAX (mode, op0, op1);
2709 else
2710 return gen_rtx_SMAX (mode, op0, op1);
2711
2712 case BIT_AND_EXPR:
2713 case TRUTH_AND_EXPR:
2714 return gen_rtx_AND (mode, op0, op1);
2715
2716 case BIT_IOR_EXPR:
2717 case TRUTH_OR_EXPR:
2718 return gen_rtx_IOR (mode, op0, op1);
2719
2720 case BIT_XOR_EXPR:
2721 case TRUTH_XOR_EXPR:
2722 return gen_rtx_XOR (mode, op0, op1);
2723
2724 case TRUTH_ANDIF_EXPR:
2725 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2726
2727 case TRUTH_ORIF_EXPR:
2728 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2729
2730 case TRUTH_NOT_EXPR:
2731 return gen_rtx_EQ (mode, op0, const0_rtx);
2732
2733 case LT_EXPR:
2734 if (unsignedp)
2735 return gen_rtx_LTU (mode, op0, op1);
2736 else
2737 return gen_rtx_LT (mode, op0, op1);
2738
2739 case LE_EXPR:
2740 if (unsignedp)
2741 return gen_rtx_LEU (mode, op0, op1);
2742 else
2743 return gen_rtx_LE (mode, op0, op1);
2744
2745 case GT_EXPR:
2746 if (unsignedp)
2747 return gen_rtx_GTU (mode, op0, op1);
2748 else
2749 return gen_rtx_GT (mode, op0, op1);
2750
2751 case GE_EXPR:
2752 if (unsignedp)
2753 return gen_rtx_GEU (mode, op0, op1);
2754 else
2755 return gen_rtx_GE (mode, op0, op1);
2756
2757 case EQ_EXPR:
2758 return gen_rtx_EQ (mode, op0, op1);
2759
2760 case NE_EXPR:
2761 return gen_rtx_NE (mode, op0, op1);
2762
2763 case UNORDERED_EXPR:
2764 return gen_rtx_UNORDERED (mode, op0, op1);
2765
2766 case ORDERED_EXPR:
2767 return gen_rtx_ORDERED (mode, op0, op1);
2768
2769 case UNLT_EXPR:
2770 return gen_rtx_UNLT (mode, op0, op1);
2771
2772 case UNLE_EXPR:
2773 return gen_rtx_UNLE (mode, op0, op1);
2774
2775 case UNGT_EXPR:
2776 return gen_rtx_UNGT (mode, op0, op1);
2777
2778 case UNGE_EXPR:
2779 return gen_rtx_UNGE (mode, op0, op1);
2780
2781 case UNEQ_EXPR:
2782 return gen_rtx_UNEQ (mode, op0, op1);
2783
2784 case LTGT_EXPR:
2785 return gen_rtx_LTGT (mode, op0, op1);
2786
2787 case COND_EXPR:
2788 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2789
2790 case COMPLEX_EXPR:
2791 gcc_assert (COMPLEX_MODE_P (mode));
2792 if (GET_MODE (op0) == VOIDmode)
2793 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2794 if (GET_MODE (op1) == VOIDmode)
2795 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2796 return gen_rtx_CONCAT (mode, op0, op1);
2797
2798 case CONJ_EXPR:
2799 if (GET_CODE (op0) == CONCAT)
2800 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2801 gen_rtx_NEG (GET_MODE_INNER (mode),
2802 XEXP (op0, 1)));
2803 else
2804 {
2805 enum machine_mode imode = GET_MODE_INNER (mode);
2806 rtx re, im;
2807
2808 if (MEM_P (op0))
2809 {
2810 re = adjust_address_nv (op0, imode, 0);
2811 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2812 }
2813 else
2814 {
2815 enum machine_mode ifmode = int_mode_for_mode (mode);
2816 enum machine_mode ihmode = int_mode_for_mode (imode);
2817 rtx halfsize;
2818 if (ifmode == BLKmode || ihmode == BLKmode)
2819 return NULL;
2820 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2821 re = op0;
2822 if (mode != ifmode)
2823 re = gen_rtx_SUBREG (ifmode, re, 0);
2824 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2825 if (imode != ihmode)
2826 re = gen_rtx_SUBREG (imode, re, 0);
2827 im = copy_rtx (op0);
2828 if (mode != ifmode)
2829 im = gen_rtx_SUBREG (ifmode, im, 0);
2830 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2831 if (imode != ihmode)
2832 im = gen_rtx_SUBREG (imode, im, 0);
2833 }
2834 im = gen_rtx_NEG (imode, im);
2835 return gen_rtx_CONCAT (mode, re, im);
2836 }
2837
2838 case ADDR_EXPR:
2839 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2840 if (!op0 || !MEM_P (op0))
2841 return NULL;
2842
2843 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2844
2845 return op0;
2846
2847 case VECTOR_CST:
2848 exp = build_constructor_from_list (TREE_TYPE (exp),
2849 TREE_VECTOR_CST_ELTS (exp));
2850 /* Fall through. */
2851
2852 case CONSTRUCTOR:
2853 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2854 {
2855 unsigned i;
2856 tree val;
2857
2858 op0 = gen_rtx_CONCATN
2859 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2860
2861 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2862 {
2863 op1 = expand_debug_expr (val);
2864 if (!op1)
2865 return NULL;
2866 XVECEXP (op0, 0, i) = op1;
2867 }
2868
2869 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2870 {
2871 op1 = expand_debug_expr
2872 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2873
2874 if (!op1)
2875 return NULL;
2876
2877 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2878 XVECEXP (op0, 0, i) = op1;
2879 }
2880
2881 return op0;
2882 }
2883 else
2884 goto flag_unsupported;
2885
2886 case CALL_EXPR:
2887 /* ??? Maybe handle some builtins? */
2888 return NULL;
2889
2890 case SSA_NAME:
2891 {
2892 gimple g = get_gimple_for_ssa_name (exp);
2893 if (g)
2894 {
2895 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
2896 if (!op0)
2897 return NULL;
2898 }
2899 else
2900 {
2901 int part = var_to_partition (SA.map, exp);
2902
2903 if (part == NO_PARTITION)
2904 return NULL;
2905
2906 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
2907
2908 op0 = SA.partition_to_pseudo[part];
2909 }
2910 goto adjust_mode;
2911 }
2912
2913 case ERROR_MARK:
2914 return NULL;
2915
2916 default:
2917 flag_unsupported:
2918 #ifdef ENABLE_CHECKING
2919 debug_tree (exp);
2920 gcc_unreachable ();
2921 #else
2922 return NULL;
2923 #endif
2924 }
2925 }
2926
2927 /* Expand the _LOCs in debug insns. We run this after expanding all
2928 regular insns, so that any variables referenced in the function
2929 will have their DECL_RTLs set. */
2930
2931 static void
2932 expand_debug_locations (void)
2933 {
2934 rtx insn;
2935 rtx last = get_last_insn ();
2936 int save_strict_alias = flag_strict_aliasing;
2937
2938 /* New alias sets while setting up memory attributes cause
2939 -fcompare-debug failures, even though it doesn't bring about any
2940 codegen changes. */
2941 flag_strict_aliasing = 0;
2942
2943 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2944 if (DEBUG_INSN_P (insn))
2945 {
2946 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
2947 rtx val;
2948 enum machine_mode mode;
2949
2950 if (value == NULL_TREE)
2951 val = NULL_RTX;
2952 else
2953 {
2954 val = expand_debug_expr (value);
2955 gcc_assert (last == get_last_insn ());
2956 }
2957
2958 if (!val)
2959 val = gen_rtx_UNKNOWN_VAR_LOC ();
2960 else
2961 {
2962 mode = GET_MODE (INSN_VAR_LOCATION (insn));
2963
2964 gcc_assert (mode == GET_MODE (val)
2965 || (GET_MODE (val) == VOIDmode
2966 && (CONST_INT_P (val)
2967 || GET_CODE (val) == CONST_FIXED
2968 || GET_CODE (val) == CONST_DOUBLE
2969 || GET_CODE (val) == LABEL_REF)));
2970 }
2971
2972 INSN_VAR_LOCATION_LOC (insn) = val;
2973 }
2974
2975 flag_strict_aliasing = save_strict_alias;
2976 }
2977
2978 /* Expand basic block BB from GIMPLE trees to RTL. */
2979
2980 static basic_block
2981 expand_gimple_basic_block (basic_block bb)
2982 {
2983 gimple_stmt_iterator gsi;
2984 gimple_seq stmts;
2985 gimple stmt = NULL;
2986 rtx note, last;
2987 edge e;
2988 edge_iterator ei;
2989 void **elt;
2990
2991 if (dump_file)
2992 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
2993 bb->index);
2994
2995 /* Note that since we are now transitioning from GIMPLE to RTL, we
2996 cannot use the gsi_*_bb() routines because they expect the basic
2997 block to be in GIMPLE, instead of RTL. Therefore, we need to
2998 access the BB sequence directly. */
2999 stmts = bb_seq (bb);
3000 bb->il.gimple = NULL;
3001 rtl_profile_for_bb (bb);
3002 init_rtl_bb_info (bb);
3003 bb->flags |= BB_RTL;
3004
3005 /* Remove the RETURN_EXPR if we may fall though to the exit
3006 instead. */
3007 gsi = gsi_last (stmts);
3008 if (!gsi_end_p (gsi)
3009 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3010 {
3011 gimple ret_stmt = gsi_stmt (gsi);
3012
3013 gcc_assert (single_succ_p (bb));
3014 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3015
3016 if (bb->next_bb == EXIT_BLOCK_PTR
3017 && !gimple_return_retval (ret_stmt))
3018 {
3019 gsi_remove (&gsi, false);
3020 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3021 }
3022 }
3023
3024 gsi = gsi_start (stmts);
3025 if (!gsi_end_p (gsi))
3026 {
3027 stmt = gsi_stmt (gsi);
3028 if (gimple_code (stmt) != GIMPLE_LABEL)
3029 stmt = NULL;
3030 }
3031
3032 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3033
3034 if (stmt || elt)
3035 {
3036 last = get_last_insn ();
3037
3038 if (stmt)
3039 {
3040 expand_gimple_stmt (stmt);
3041 gsi_next (&gsi);
3042 }
3043
3044 if (elt)
3045 emit_label ((rtx) *elt);
3046
3047 /* Java emits line number notes in the top of labels.
3048 ??? Make this go away once line number notes are obsoleted. */
3049 BB_HEAD (bb) = NEXT_INSN (last);
3050 if (NOTE_P (BB_HEAD (bb)))
3051 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3052 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3053
3054 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3055 }
3056 else
3057 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3058
3059 NOTE_BASIC_BLOCK (note) = bb;
3060
3061 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3062 {
3063 basic_block new_bb;
3064
3065 stmt = gsi_stmt (gsi);
3066
3067 /* If this statement is a non-debug one, and we generate debug
3068 insns, then this one might be the last real use of a TERed
3069 SSA_NAME, but where there are still some debug uses further
3070 down. Expanding the current SSA name in such further debug
3071 uses by their RHS might lead to wrong debug info, as coalescing
3072 might make the operands of such RHS be placed into the same
3073 pseudo as something else. Like so:
3074 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3075 use(a_1);
3076 a_2 = ...
3077 #DEBUG ... => a_1
3078 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3079 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3080 the write to a_2 would actually have clobbered the place which
3081 formerly held a_0.
3082
3083 So, instead of that, we recognize the situation, and generate
3084 debug temporaries at the last real use of TERed SSA names:
3085 a_1 = a_0 + 1;
3086 #DEBUG #D1 => a_1
3087 use(a_1);
3088 a_2 = ...
3089 #DEBUG ... => #D1
3090 */
3091 if (MAY_HAVE_DEBUG_INSNS
3092 && SA.values
3093 && !is_gimple_debug (stmt))
3094 {
3095 ssa_op_iter iter;
3096 tree op;
3097 gimple def;
3098
3099 location_t sloc = get_curr_insn_source_location ();
3100 tree sblock = get_curr_insn_block ();
3101
3102 /* Look for SSA names that have their last use here (TERed
3103 names always have only one real use). */
3104 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3105 if ((def = get_gimple_for_ssa_name (op)))
3106 {
3107 imm_use_iterator imm_iter;
3108 use_operand_p use_p;
3109 bool have_debug_uses = false;
3110
3111 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3112 {
3113 if (gimple_debug_bind_p (USE_STMT (use_p)))
3114 {
3115 have_debug_uses = true;
3116 break;
3117 }
3118 }
3119
3120 if (have_debug_uses)
3121 {
3122 /* OP is a TERed SSA name, with DEF it's defining
3123 statement, and where OP is used in further debug
3124 instructions. Generate a debug temporary, and
3125 replace all uses of OP in debug insns with that
3126 temporary. */
3127 gimple debugstmt;
3128 tree value = gimple_assign_rhs_to_tree (def);
3129 tree vexpr = make_node (DEBUG_EXPR_DECL);
3130 rtx val;
3131 enum machine_mode mode;
3132
3133 set_curr_insn_source_location (gimple_location (def));
3134 set_curr_insn_block (gimple_block (def));
3135
3136 DECL_ARTIFICIAL (vexpr) = 1;
3137 TREE_TYPE (vexpr) = TREE_TYPE (value);
3138 if (DECL_P (value))
3139 mode = DECL_MODE (value);
3140 else
3141 mode = TYPE_MODE (TREE_TYPE (value));
3142 DECL_MODE (vexpr) = mode;
3143
3144 val = gen_rtx_VAR_LOCATION
3145 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3146
3147 val = emit_debug_insn (val);
3148
3149 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3150 {
3151 if (!gimple_debug_bind_p (debugstmt))
3152 continue;
3153
3154 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3155 SET_USE (use_p, vexpr);
3156
3157 update_stmt (debugstmt);
3158 }
3159 }
3160 }
3161 set_curr_insn_source_location (sloc);
3162 set_curr_insn_block (sblock);
3163 }
3164
3165 currently_expanding_gimple_stmt = stmt;
3166
3167 /* Expand this statement, then evaluate the resulting RTL and
3168 fixup the CFG accordingly. */
3169 if (gimple_code (stmt) == GIMPLE_COND)
3170 {
3171 new_bb = expand_gimple_cond (bb, stmt);
3172 if (new_bb)
3173 return new_bb;
3174 }
3175 else if (gimple_debug_bind_p (stmt))
3176 {
3177 location_t sloc = get_curr_insn_source_location ();
3178 tree sblock = get_curr_insn_block ();
3179 gimple_stmt_iterator nsi = gsi;
3180
3181 for (;;)
3182 {
3183 tree var = gimple_debug_bind_get_var (stmt);
3184 tree value;
3185 rtx val;
3186 enum machine_mode mode;
3187
3188 if (gimple_debug_bind_has_value_p (stmt))
3189 value = gimple_debug_bind_get_value (stmt);
3190 else
3191 value = NULL_TREE;
3192
3193 last = get_last_insn ();
3194
3195 set_curr_insn_source_location (gimple_location (stmt));
3196 set_curr_insn_block (gimple_block (stmt));
3197
3198 if (DECL_P (var))
3199 mode = DECL_MODE (var);
3200 else
3201 mode = TYPE_MODE (TREE_TYPE (var));
3202
3203 val = gen_rtx_VAR_LOCATION
3204 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3205
3206 val = emit_debug_insn (val);
3207
3208 if (dump_file && (dump_flags & TDF_DETAILS))
3209 {
3210 /* We can't dump the insn with a TREE where an RTX
3211 is expected. */
3212 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3213 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3214 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3215 }
3216
3217 /* In order not to generate too many debug temporaries,
3218 we delink all uses of debug statements we already expanded.
3219 Therefore debug statements between definition and real
3220 use of TERed SSA names will continue to use the SSA name,
3221 and not be replaced with debug temps. */
3222 delink_stmt_imm_use (stmt);
3223
3224 gsi = nsi;
3225 gsi_next (&nsi);
3226 if (gsi_end_p (nsi))
3227 break;
3228 stmt = gsi_stmt (nsi);
3229 if (!gimple_debug_bind_p (stmt))
3230 break;
3231 }
3232
3233 set_curr_insn_source_location (sloc);
3234 set_curr_insn_block (sblock);
3235 }
3236 else
3237 {
3238 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3239 {
3240 bool can_fallthru;
3241 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3242 if (new_bb)
3243 {
3244 if (can_fallthru)
3245 bb = new_bb;
3246 else
3247 return new_bb;
3248 }
3249 }
3250 else
3251 {
3252 def_operand_p def_p;
3253 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3254
3255 if (def_p != NULL)
3256 {
3257 /* Ignore this stmt if it is in the list of
3258 replaceable expressions. */
3259 if (SA.values
3260 && bitmap_bit_p (SA.values,
3261 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3262 continue;
3263 }
3264 last = expand_gimple_stmt (stmt);
3265 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3266 }
3267 }
3268 }
3269
3270 currently_expanding_gimple_stmt = NULL;
3271
3272 /* Expand implicit goto and convert goto_locus. */
3273 FOR_EACH_EDGE (e, ei, bb->succs)
3274 {
3275 if (e->goto_locus && e->goto_block)
3276 {
3277 set_curr_insn_source_location (e->goto_locus);
3278 set_curr_insn_block (e->goto_block);
3279 e->goto_locus = curr_insn_locator ();
3280 }
3281 e->goto_block = NULL;
3282 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3283 {
3284 emit_jump (label_rtx_for_bb (e->dest));
3285 e->flags &= ~EDGE_FALLTHRU;
3286 }
3287 }
3288
3289 /* Expanded RTL can create a jump in the last instruction of block.
3290 This later might be assumed to be a jump to successor and break edge insertion.
3291 We need to insert dummy move to prevent this. PR41440. */
3292 if (single_succ_p (bb)
3293 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3294 && (last = get_last_insn ())
3295 && JUMP_P (last))
3296 {
3297 rtx dummy = gen_reg_rtx (SImode);
3298 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3299 }
3300
3301 do_pending_stack_adjust ();
3302
3303 /* Find the block tail. The last insn in the block is the insn
3304 before a barrier and/or table jump insn. */
3305 last = get_last_insn ();
3306 if (BARRIER_P (last))
3307 last = PREV_INSN (last);
3308 if (JUMP_TABLE_DATA_P (last))
3309 last = PREV_INSN (PREV_INSN (last));
3310 BB_END (bb) = last;
3311
3312 update_bb_for_insn (bb);
3313
3314 return bb;
3315 }
3316
3317
3318 /* Create a basic block for initialization code. */
3319
3320 static basic_block
3321 construct_init_block (void)
3322 {
3323 basic_block init_block, first_block;
3324 edge e = NULL;
3325 int flags;
3326
3327 /* Multiple entry points not supported yet. */
3328 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3329 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3330 init_rtl_bb_info (EXIT_BLOCK_PTR);
3331 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3332 EXIT_BLOCK_PTR->flags |= BB_RTL;
3333
3334 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3335
3336 /* When entry edge points to first basic block, we don't need jump,
3337 otherwise we have to jump into proper target. */
3338 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3339 {
3340 tree label = gimple_block_label (e->dest);
3341
3342 emit_jump (label_rtx (label));
3343 flags = 0;
3344 }
3345 else
3346 flags = EDGE_FALLTHRU;
3347
3348 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3349 get_last_insn (),
3350 ENTRY_BLOCK_PTR);
3351 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3352 init_block->count = ENTRY_BLOCK_PTR->count;
3353 if (e)
3354 {
3355 first_block = e->dest;
3356 redirect_edge_succ (e, init_block);
3357 e = make_edge (init_block, first_block, flags);
3358 }
3359 else
3360 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3361 e->probability = REG_BR_PROB_BASE;
3362 e->count = ENTRY_BLOCK_PTR->count;
3363
3364 update_bb_for_insn (init_block);
3365 return init_block;
3366 }
3367
3368 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3369 found in the block tree. */
3370
3371 static void
3372 set_block_levels (tree block, int level)
3373 {
3374 while (block)
3375 {
3376 BLOCK_NUMBER (block) = level;
3377 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3378 block = BLOCK_CHAIN (block);
3379 }
3380 }
3381
3382 /* Create a block containing landing pads and similar stuff. */
3383
3384 static void
3385 construct_exit_block (void)
3386 {
3387 rtx head = get_last_insn ();
3388 rtx end;
3389 basic_block exit_block;
3390 edge e, e2;
3391 unsigned ix;
3392 edge_iterator ei;
3393 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3394
3395 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3396
3397 /* Make sure the locus is set to the end of the function, so that
3398 epilogue line numbers and warnings are set properly. */
3399 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3400 input_location = cfun->function_end_locus;
3401
3402 /* The following insns belong to the top scope. */
3403 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3404
3405 /* Generate rtl for function exit. */
3406 expand_function_end ();
3407
3408 end = get_last_insn ();
3409 if (head == end)
3410 return;
3411 /* While emitting the function end we could move end of the last basic block.
3412 */
3413 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3414 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3415 head = NEXT_INSN (head);
3416 exit_block = create_basic_block (NEXT_INSN (head), end,
3417 EXIT_BLOCK_PTR->prev_bb);
3418 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3419 exit_block->count = EXIT_BLOCK_PTR->count;
3420
3421 ix = 0;
3422 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3423 {
3424 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3425 if (!(e->flags & EDGE_ABNORMAL))
3426 redirect_edge_succ (e, exit_block);
3427 else
3428 ix++;
3429 }
3430
3431 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3432 e->probability = REG_BR_PROB_BASE;
3433 e->count = EXIT_BLOCK_PTR->count;
3434 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3435 if (e2 != e)
3436 {
3437 e->count -= e2->count;
3438 exit_block->count -= e2->count;
3439 exit_block->frequency -= EDGE_FREQUENCY (e2);
3440 }
3441 if (e->count < 0)
3442 e->count = 0;
3443 if (exit_block->count < 0)
3444 exit_block->count = 0;
3445 if (exit_block->frequency < 0)
3446 exit_block->frequency = 0;
3447 update_bb_for_insn (exit_block);
3448 }
3449
3450 /* Helper function for discover_nonconstant_array_refs.
3451 Look for ARRAY_REF nodes with non-constant indexes and mark them
3452 addressable. */
3453
3454 static tree
3455 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3456 void *data ATTRIBUTE_UNUSED)
3457 {
3458 tree t = *tp;
3459
3460 if (IS_TYPE_OR_DECL_P (t))
3461 *walk_subtrees = 0;
3462 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3463 {
3464 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3465 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3466 && (!TREE_OPERAND (t, 2)
3467 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3468 || (TREE_CODE (t) == COMPONENT_REF
3469 && (!TREE_OPERAND (t,2)
3470 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3471 || TREE_CODE (t) == BIT_FIELD_REF
3472 || TREE_CODE (t) == REALPART_EXPR
3473 || TREE_CODE (t) == IMAGPART_EXPR
3474 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3475 || CONVERT_EXPR_P (t))
3476 t = TREE_OPERAND (t, 0);
3477
3478 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3479 {
3480 t = get_base_address (t);
3481 if (t && DECL_P (t)
3482 && DECL_MODE (t) != BLKmode)
3483 TREE_ADDRESSABLE (t) = 1;
3484 }
3485
3486 *walk_subtrees = 0;
3487 }
3488
3489 return NULL_TREE;
3490 }
3491
3492 /* RTL expansion is not able to compile array references with variable
3493 offsets for arrays stored in single register. Discover such
3494 expressions and mark variables as addressable to avoid this
3495 scenario. */
3496
3497 static void
3498 discover_nonconstant_array_refs (void)
3499 {
3500 basic_block bb;
3501 gimple_stmt_iterator gsi;
3502
3503 FOR_EACH_BB (bb)
3504 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3505 {
3506 gimple stmt = gsi_stmt (gsi);
3507 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3508 }
3509 }
3510
3511 /* This function sets crtl->args.internal_arg_pointer to a virtual
3512 register if DRAP is needed. Local register allocator will replace
3513 virtual_incoming_args_rtx with the virtual register. */
3514
3515 static void
3516 expand_stack_alignment (void)
3517 {
3518 rtx drap_rtx;
3519 unsigned int preferred_stack_boundary;
3520
3521 if (! SUPPORTS_STACK_ALIGNMENT)
3522 return;
3523
3524 if (cfun->calls_alloca
3525 || cfun->has_nonlocal_label
3526 || crtl->has_nonlocal_goto)
3527 crtl->need_drap = true;
3528
3529 /* Call update_stack_boundary here again to update incoming stack
3530 boundary. It may set incoming stack alignment to a different
3531 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3532 use the minimum incoming stack alignment to check if it is OK
3533 to perform sibcall optimization since sibcall optimization will
3534 only align the outgoing stack to incoming stack boundary. */
3535 if (targetm.calls.update_stack_boundary)
3536 targetm.calls.update_stack_boundary ();
3537
3538 /* The incoming stack frame has to be aligned at least at
3539 parm_stack_boundary. */
3540 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3541
3542 /* Update crtl->stack_alignment_estimated and use it later to align
3543 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3544 exceptions since callgraph doesn't collect incoming stack alignment
3545 in this case. */
3546 if (flag_non_call_exceptions
3547 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3548 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3549 else
3550 preferred_stack_boundary = crtl->preferred_stack_boundary;
3551 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3552 crtl->stack_alignment_estimated = preferred_stack_boundary;
3553 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3554 crtl->stack_alignment_needed = preferred_stack_boundary;
3555
3556 gcc_assert (crtl->stack_alignment_needed
3557 <= crtl->stack_alignment_estimated);
3558
3559 crtl->stack_realign_needed
3560 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3561 crtl->stack_realign_tried = crtl->stack_realign_needed;
3562
3563 crtl->stack_realign_processed = true;
3564
3565 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3566 alignment. */
3567 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3568 drap_rtx = targetm.calls.get_drap_rtx ();
3569
3570 /* stack_realign_drap and drap_rtx must match. */
3571 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3572
3573 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3574 if (NULL != drap_rtx)
3575 {
3576 crtl->args.internal_arg_pointer = drap_rtx;
3577
3578 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3579 needed. */
3580 fixup_tail_calls ();
3581 }
3582 }
3583
3584 /* Translate the intermediate representation contained in the CFG
3585 from GIMPLE trees to RTL.
3586
3587 We do conversion per basic block and preserve/update the tree CFG.
3588 This implies we have to do some magic as the CFG can simultaneously
3589 consist of basic blocks containing RTL and GIMPLE trees. This can
3590 confuse the CFG hooks, so be careful to not manipulate CFG during
3591 the expansion. */
3592
3593 static unsigned int
3594 gimple_expand_cfg (void)
3595 {
3596 basic_block bb, init_block;
3597 sbitmap blocks;
3598 edge_iterator ei;
3599 edge e;
3600 unsigned i;
3601
3602 rewrite_out_of_ssa (&SA);
3603 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3604 sizeof (rtx));
3605
3606 /* Some backends want to know that we are expanding to RTL. */
3607 currently_expanding_to_rtl = 1;
3608
3609 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3610
3611 insn_locators_alloc ();
3612 if (!DECL_IS_BUILTIN (current_function_decl))
3613 {
3614 /* Eventually, all FEs should explicitly set function_start_locus. */
3615 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3616 set_curr_insn_source_location
3617 (DECL_SOURCE_LOCATION (current_function_decl));
3618 else
3619 set_curr_insn_source_location (cfun->function_start_locus);
3620 }
3621 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3622 prologue_locator = curr_insn_locator ();
3623
3624 /* Make sure first insn is a note even if we don't want linenums.
3625 This makes sure the first insn will never be deleted.
3626 Also, final expects a note to appear there. */
3627 emit_note (NOTE_INSN_DELETED);
3628
3629 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3630 discover_nonconstant_array_refs ();
3631
3632 targetm.expand_to_rtl_hook ();
3633 crtl->stack_alignment_needed = STACK_BOUNDARY;
3634 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3635 crtl->stack_alignment_estimated = 0;
3636 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3637 cfun->cfg->max_jumptable_ents = 0;
3638
3639
3640 /* Expand the variables recorded during gimple lowering. */
3641 expand_used_vars ();
3642
3643 /* Honor stack protection warnings. */
3644 if (warn_stack_protect)
3645 {
3646 if (cfun->calls_alloca)
3647 warning (OPT_Wstack_protector,
3648 "not protecting local variables: variable length buffer");
3649 if (has_short_buffer && !crtl->stack_protect_guard)
3650 warning (OPT_Wstack_protector,
3651 "not protecting function: no buffer at least %d bytes long",
3652 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3653 }
3654
3655 /* Set up parameters and prepare for return, for the function. */
3656 expand_function_start (current_function_decl);
3657
3658 /* Now that we also have the parameter RTXs, copy them over to our
3659 partitions. */
3660 for (i = 0; i < SA.map->num_partitions; i++)
3661 {
3662 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3663
3664 if (TREE_CODE (var) != VAR_DECL
3665 && !SA.partition_to_pseudo[i])
3666 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3667 gcc_assert (SA.partition_to_pseudo[i]);
3668
3669 /* If this decl was marked as living in multiple places, reset
3670 this now to NULL. */
3671 if (DECL_RTL_IF_SET (var) == pc_rtx)
3672 SET_DECL_RTL (var, NULL);
3673
3674 /* Some RTL parts really want to look at DECL_RTL(x) when x
3675 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3676 SET_DECL_RTL here making this available, but that would mean
3677 to select one of the potentially many RTLs for one DECL. Instead
3678 of doing that we simply reset the MEM_EXPR of the RTL in question,
3679 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3680 if (!DECL_RTL_SET_P (var))
3681 {
3682 if (MEM_P (SA.partition_to_pseudo[i]))
3683 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3684 }
3685 }
3686
3687 /* If this function is `main', emit a call to `__main'
3688 to run global initializers, etc. */
3689 if (DECL_NAME (current_function_decl)
3690 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3691 && DECL_FILE_SCOPE_P (current_function_decl))
3692 expand_main_function ();
3693
3694 /* Initialize the stack_protect_guard field. This must happen after the
3695 call to __main (if any) so that the external decl is initialized. */
3696 if (crtl->stack_protect_guard)
3697 stack_protect_prologue ();
3698
3699 expand_phi_nodes (&SA);
3700
3701 /* Register rtl specific functions for cfg. */
3702 rtl_register_cfg_hooks ();
3703
3704 init_block = construct_init_block ();
3705
3706 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
3707 remaining edges later. */
3708 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3709 e->flags &= ~EDGE_EXECUTABLE;
3710
3711 lab_rtx_for_bb = pointer_map_create ();
3712 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
3713 bb = expand_gimple_basic_block (bb);
3714
3715 if (MAY_HAVE_DEBUG_INSNS)
3716 expand_debug_locations ();
3717
3718 execute_free_datastructures ();
3719 finish_out_of_ssa (&SA);
3720
3721 /* We are no longer in SSA form. */
3722 cfun->gimple_df->in_ssa_p = false;
3723
3724 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3725 conservatively to true until they are all profile aware. */
3726 pointer_map_destroy (lab_rtx_for_bb);
3727 free_histograms ();
3728
3729 construct_exit_block ();
3730 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3731 insn_locators_finalize ();
3732
3733 /* Zap the tree EH table. */
3734 set_eh_throw_stmt_table (cfun, NULL);
3735
3736 rebuild_jump_labels (get_insns ());
3737
3738 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3739 {
3740 edge e;
3741 edge_iterator ei;
3742 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3743 {
3744 if (e->insns.r)
3745 commit_one_edge_insertion (e);
3746 else
3747 ei_next (&ei);
3748 }
3749 }
3750
3751 /* We're done expanding trees to RTL. */
3752 currently_expanding_to_rtl = 0;
3753
3754 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3755 {
3756 edge e;
3757 edge_iterator ei;
3758 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3759 {
3760 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3761 e->flags &= ~EDGE_EXECUTABLE;
3762
3763 /* At the moment not all abnormal edges match the RTL
3764 representation. It is safe to remove them here as
3765 find_many_sub_basic_blocks will rediscover them.
3766 In the future we should get this fixed properly. */
3767 if ((e->flags & EDGE_ABNORMAL)
3768 && !(e->flags & EDGE_SIBCALL))
3769 remove_edge (e);
3770 else
3771 ei_next (&ei);
3772 }
3773 }
3774
3775 blocks = sbitmap_alloc (last_basic_block);
3776 sbitmap_ones (blocks);
3777 find_many_sub_basic_blocks (blocks);
3778 sbitmap_free (blocks);
3779 purge_all_dead_edges ();
3780
3781 compact_blocks ();
3782
3783 expand_stack_alignment ();
3784
3785 #ifdef ENABLE_CHECKING
3786 verify_flow_info ();
3787 #endif
3788
3789 /* There's no need to defer outputting this function any more; we
3790 know we want to output it. */
3791 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3792
3793 /* Now that we're done expanding trees to RTL, we shouldn't have any
3794 more CONCATs anywhere. */
3795 generating_concat_p = 0;
3796
3797 if (dump_file)
3798 {
3799 fprintf (dump_file,
3800 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3801 /* And the pass manager will dump RTL for us. */
3802 }
3803
3804 /* If we're emitting a nested function, make sure its parent gets
3805 emitted as well. Doing otherwise confuses debug info. */
3806 {
3807 tree parent;
3808 for (parent = DECL_CONTEXT (current_function_decl);
3809 parent != NULL_TREE;
3810 parent = get_containing_scope (parent))
3811 if (TREE_CODE (parent) == FUNCTION_DECL)
3812 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
3813 }
3814
3815 /* We are now committed to emitting code for this function. Do any
3816 preparation, such as emitting abstract debug info for the inline
3817 before it gets mangled by optimization. */
3818 if (cgraph_function_possibly_inlined_p (current_function_decl))
3819 (*debug_hooks->outlining_inline_function) (current_function_decl);
3820
3821 TREE_ASM_WRITTEN (current_function_decl) = 1;
3822
3823 /* After expanding, the return labels are no longer needed. */
3824 return_label = NULL;
3825 naked_return_label = NULL;
3826 /* Tag the blocks with a depth number so that change_scope can find
3827 the common parent easily. */
3828 set_block_levels (DECL_INITIAL (cfun->decl), 0);
3829 default_rtl_profile ();
3830 return 0;
3831 }
3832
3833 struct rtl_opt_pass pass_expand =
3834 {
3835 {
3836 RTL_PASS,
3837 "expand", /* name */
3838 NULL, /* gate */
3839 gimple_expand_cfg, /* execute */
3840 NULL, /* sub */
3841 NULL, /* next */
3842 0, /* static_pass_number */
3843 TV_EXPAND, /* tv_id */
3844 PROP_ssa | PROP_gimple_leh | PROP_cfg
3845 | PROP_gimple_lcx, /* properties_required */
3846 PROP_rtl, /* properties_provided */
3847 PROP_ssa | PROP_trees, /* properties_destroyed */
3848 TODO_verify_ssa | TODO_verify_flow
3849 | TODO_verify_stmts, /* todo_flags_start */
3850 TODO_dump_func
3851 | TODO_ggc_collect /* todo_flags_finish */
3852 }
3853 };