Add 2006 to copyright line
[gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "toplev.h"
40 #include "debug.h"
41 #include "params.h"
42
43 /* Verify that there is exactly single jump instruction since last and attach
44 REG_BR_PROB note specifying probability.
45 ??? We really ought to pass the probability down to RTL expanders and let it
46 re-distribute it when the conditional expands into multiple conditionals.
47 This is however difficult to do. */
48 static void
49 add_reg_br_prob_note (FILE *dump_file, rtx last, int probability)
50 {
51 if (profile_status == PROFILE_ABSENT)
52 return;
53 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
54 if (JUMP_P (last))
55 {
56 /* It is common to emit condjump-around-jump sequence when we don't know
57 how to reverse the conditional. Special case this. */
58 if (!any_condjump_p (last)
59 || !JUMP_P (NEXT_INSN (last))
60 || !simplejump_p (NEXT_INSN (last))
61 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
62 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
63 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
64 goto failed;
65 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
66 REG_NOTES (last)
67 = gen_rtx_EXPR_LIST (REG_BR_PROB,
68 GEN_INT (REG_BR_PROB_BASE - probability),
69 REG_NOTES (last));
70 return;
71 }
72 if (!last || !JUMP_P (last) || !any_condjump_p (last))
73 goto failed;
74 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
75 REG_NOTES (last)
76 = gen_rtx_EXPR_LIST (REG_BR_PROB,
77 GEN_INT (probability), REG_NOTES (last));
78 return;
79 failed:
80 if (dump_file)
81 fprintf (dump_file, "Failed to add probability note\n");
82 }
83
84
85 #ifndef LOCAL_ALIGNMENT
86 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
87 #endif
88
89 #ifndef STACK_ALIGNMENT_NEEDED
90 #define STACK_ALIGNMENT_NEEDED 1
91 #endif
92
93
94 /* This structure holds data relevant to one variable that will be
95 placed in a stack slot. */
96 struct stack_var
97 {
98 /* The Variable. */
99 tree decl;
100
101 /* The offset of the variable. During partitioning, this is the
102 offset relative to the partition. After partitioning, this
103 is relative to the stack frame. */
104 HOST_WIDE_INT offset;
105
106 /* Initially, the size of the variable. Later, the size of the partition,
107 if this variable becomes it's partition's representative. */
108 HOST_WIDE_INT size;
109
110 /* The *byte* alignment required for this variable. Or as, with the
111 size, the alignment for this partition. */
112 unsigned int alignb;
113
114 /* The partition representative. */
115 size_t representative;
116
117 /* The next stack variable in the partition, or EOC. */
118 size_t next;
119 };
120
121 #define EOC ((size_t)-1)
122
123 /* We have an array of such objects while deciding allocation. */
124 static struct stack_var *stack_vars;
125 static size_t stack_vars_alloc;
126 static size_t stack_vars_num;
127
128 /* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
129 is non-decreasing. */
130 static size_t *stack_vars_sorted;
131
132 /* We have an interference graph between such objects. This graph
133 is lower triangular. */
134 static bool *stack_vars_conflict;
135 static size_t stack_vars_conflict_alloc;
136
137 /* The phase of the stack frame. This is the known misalignment of
138 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
139 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
140 static int frame_phase;
141
142 /* Used during expand_used_vars to remember if we saw any decls for
143 which we'd like to enable stack smashing protection. */
144 static bool has_protected_decls;
145
146 /* Used during expand_used_vars. Remember if we say a character buffer
147 smaller than our cutoff threshold. Used for -Wstack-protector. */
148 static bool has_short_buffer;
149
150 /* Discover the byte alignment to use for DECL. Ignore alignment
151 we can't do with expected alignment of the stack boundary. */
152
153 static unsigned int
154 get_decl_align_unit (tree decl)
155 {
156 unsigned int align;
157
158 align = DECL_ALIGN (decl);
159 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
160 if (align > PREFERRED_STACK_BOUNDARY)
161 align = PREFERRED_STACK_BOUNDARY;
162 if (cfun->stack_alignment_needed < align)
163 cfun->stack_alignment_needed = align;
164
165 return align / BITS_PER_UNIT;
166 }
167
168 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
169 Return the frame offset. */
170
171 static HOST_WIDE_INT
172 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
173 {
174 HOST_WIDE_INT offset, new_frame_offset;
175
176 new_frame_offset = frame_offset;
177 if (FRAME_GROWS_DOWNWARD)
178 {
179 new_frame_offset -= size + frame_phase;
180 new_frame_offset &= -align;
181 new_frame_offset += frame_phase;
182 offset = new_frame_offset;
183 }
184 else
185 {
186 new_frame_offset -= frame_phase;
187 new_frame_offset += align - 1;
188 new_frame_offset &= -align;
189 new_frame_offset += frame_phase;
190 offset = new_frame_offset;
191 new_frame_offset += size;
192 }
193 frame_offset = new_frame_offset;
194
195 return offset;
196 }
197
198 /* Accumulate DECL into STACK_VARS. */
199
200 static void
201 add_stack_var (tree decl)
202 {
203 if (stack_vars_num >= stack_vars_alloc)
204 {
205 if (stack_vars_alloc)
206 stack_vars_alloc = stack_vars_alloc * 3 / 2;
207 else
208 stack_vars_alloc = 32;
209 stack_vars
210 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
211 }
212 stack_vars[stack_vars_num].decl = decl;
213 stack_vars[stack_vars_num].offset = 0;
214 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
215 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
216
217 /* All variables are initially in their own partition. */
218 stack_vars[stack_vars_num].representative = stack_vars_num;
219 stack_vars[stack_vars_num].next = EOC;
220
221 /* Ensure that this decl doesn't get put onto the list twice. */
222 SET_DECL_RTL (decl, pc_rtx);
223
224 stack_vars_num++;
225 }
226
227 /* Compute the linear index of a lower-triangular coordinate (I, J). */
228
229 static size_t
230 triangular_index (size_t i, size_t j)
231 {
232 if (i < j)
233 {
234 size_t t;
235 t = i, i = j, j = t;
236 }
237 return (i * (i + 1)) / 2 + j;
238 }
239
240 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
241
242 static void
243 resize_stack_vars_conflict (size_t n)
244 {
245 size_t size = triangular_index (n-1, n-1) + 1;
246
247 if (size <= stack_vars_conflict_alloc)
248 return;
249
250 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
251 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
252 (size - stack_vars_conflict_alloc) * sizeof (bool));
253 stack_vars_conflict_alloc = size;
254 }
255
256 /* Make the decls associated with luid's X and Y conflict. */
257
258 static void
259 add_stack_var_conflict (size_t x, size_t y)
260 {
261 size_t index = triangular_index (x, y);
262 gcc_assert (index < stack_vars_conflict_alloc);
263 stack_vars_conflict[index] = true;
264 }
265
266 /* Check whether the decls associated with luid's X and Y conflict. */
267
268 static bool
269 stack_var_conflict_p (size_t x, size_t y)
270 {
271 size_t index = triangular_index (x, y);
272 gcc_assert (index < stack_vars_conflict_alloc);
273 return stack_vars_conflict[index];
274 }
275
276 /* Returns true if TYPE is or contains a union type. */
277
278 static bool
279 aggregate_contains_union_type (tree type)
280 {
281 tree field;
282
283 if (TREE_CODE (type) == UNION_TYPE
284 || TREE_CODE (type) == QUAL_UNION_TYPE)
285 return true;
286 if (TREE_CODE (type) == ARRAY_TYPE)
287 return aggregate_contains_union_type (TREE_TYPE (type));
288 if (TREE_CODE (type) != RECORD_TYPE)
289 return false;
290
291 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
292 if (TREE_CODE (field) == FIELD_DECL)
293 if (aggregate_contains_union_type (TREE_TYPE (field)))
294 return true;
295
296 return false;
297 }
298
299 /* A subroutine of expand_used_vars. If two variables X and Y have alias
300 sets that do not conflict, then do add a conflict for these variables
301 in the interference graph. We also need to make sure to add conflicts
302 for union containing structures. Else RTL alias analysis comes along
303 and due to type based aliasing rules decides that for two overlapping
304 union temporaries { short s; int i; } accesses to the same mem through
305 different types may not alias and happily reorders stores across
306 life-time boundaries of the temporaries (See PR25654).
307 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
308
309 static void
310 add_alias_set_conflicts (void)
311 {
312 size_t i, j, n = stack_vars_num;
313
314 for (i = 0; i < n; ++i)
315 {
316 tree type_i = TREE_TYPE (stack_vars[i].decl);
317 bool aggr_i = AGGREGATE_TYPE_P (type_i);
318 bool contains_union;
319
320 contains_union = aggregate_contains_union_type (type_i);
321 for (j = 0; j < i; ++j)
322 {
323 tree type_j = TREE_TYPE (stack_vars[j].decl);
324 bool aggr_j = AGGREGATE_TYPE_P (type_j);
325 if (aggr_i != aggr_j
326 /* Either the objects conflict by means of type based
327 aliasing rules, or we need to add a conflict. */
328 || !objects_must_conflict_p (type_i, type_j)
329 /* In case the types do not conflict ensure that access
330 to elements will conflict. In case of unions we have
331 to be careful as type based aliasing rules may say
332 access to the same memory does not conflict. So play
333 safe and add a conflict in this case. */
334 || contains_union)
335 add_stack_var_conflict (i, j);
336 }
337 }
338 }
339
340 /* A subroutine of partition_stack_vars. A comparison function for qsort,
341 sorting an array of indicies by the size of the object. */
342
343 static int
344 stack_var_size_cmp (const void *a, const void *b)
345 {
346 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
347 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
348
349 if (sa < sb)
350 return -1;
351 if (sa > sb)
352 return 1;
353 return 0;
354 }
355
356 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
357 partitioning algorithm. Partitions A and B are known to be non-conflicting.
358 Merge them into a single partition A.
359
360 At the same time, add OFFSET to all variables in partition B. At the end
361 of the partitioning process we've have a nice block easy to lay out within
362 the stack frame. */
363
364 static void
365 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
366 {
367 size_t i, last;
368
369 /* Update each element of partition B with the given offset,
370 and merge them into partition A. */
371 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
372 {
373 stack_vars[i].offset += offset;
374 stack_vars[i].representative = a;
375 }
376 stack_vars[last].next = stack_vars[a].next;
377 stack_vars[a].next = b;
378
379 /* Update the required alignment of partition A to account for B. */
380 if (stack_vars[a].alignb < stack_vars[b].alignb)
381 stack_vars[a].alignb = stack_vars[b].alignb;
382
383 /* Update the interference graph and merge the conflicts. */
384 for (last = stack_vars_num, i = 0; i < last; ++i)
385 if (stack_var_conflict_p (b, i))
386 add_stack_var_conflict (a, i);
387 }
388
389 /* A subroutine of expand_used_vars. Binpack the variables into
390 partitions constrained by the interference graph. The overall
391 algorithm used is as follows:
392
393 Sort the objects by size.
394 For each object A {
395 S = size(A)
396 O = 0
397 loop {
398 Look for the largest non-conflicting object B with size <= S.
399 UNION (A, B)
400 offset(B) = O
401 O += size(B)
402 S -= size(B)
403 }
404 }
405 */
406
407 static void
408 partition_stack_vars (void)
409 {
410 size_t si, sj, n = stack_vars_num;
411
412 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
413 for (si = 0; si < n; ++si)
414 stack_vars_sorted[si] = si;
415
416 if (n == 1)
417 return;
418
419 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
420
421 /* Special case: detect when all variables conflict, and thus we can't
422 do anything during the partitioning loop. It isn't uncommon (with
423 C code at least) to declare all variables at the top of the function,
424 and if we're not inlining, then all variables will be in the same scope.
425 Take advantage of very fast libc routines for this scan. */
426 gcc_assert (sizeof(bool) == sizeof(char));
427 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
428 return;
429
430 for (si = 0; si < n; ++si)
431 {
432 size_t i = stack_vars_sorted[si];
433 HOST_WIDE_INT isize = stack_vars[i].size;
434 HOST_WIDE_INT offset = 0;
435
436 for (sj = si; sj-- > 0; )
437 {
438 size_t j = stack_vars_sorted[sj];
439 HOST_WIDE_INT jsize = stack_vars[j].size;
440 unsigned int jalign = stack_vars[j].alignb;
441
442 /* Ignore objects that aren't partition representatives. */
443 if (stack_vars[j].representative != j)
444 continue;
445
446 /* Ignore objects too large for the remaining space. */
447 if (isize < jsize)
448 continue;
449
450 /* Ignore conflicting objects. */
451 if (stack_var_conflict_p (i, j))
452 continue;
453
454 /* Refine the remaining space check to include alignment. */
455 if (offset & (jalign - 1))
456 {
457 HOST_WIDE_INT toff = offset;
458 toff += jalign - 1;
459 toff &= -(HOST_WIDE_INT)jalign;
460 if (isize - (toff - offset) < jsize)
461 continue;
462
463 isize -= toff - offset;
464 offset = toff;
465 }
466
467 /* UNION the objects, placing J at OFFSET. */
468 union_stack_vars (i, j, offset);
469
470 isize -= jsize;
471 if (isize == 0)
472 break;
473 }
474 }
475 }
476
477 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
478
479 static void
480 dump_stack_var_partition (void)
481 {
482 size_t si, i, j, n = stack_vars_num;
483
484 for (si = 0; si < n; ++si)
485 {
486 i = stack_vars_sorted[si];
487
488 /* Skip variables that aren't partition representatives, for now. */
489 if (stack_vars[i].representative != i)
490 continue;
491
492 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
493 " align %u\n", (unsigned long) i, stack_vars[i].size,
494 stack_vars[i].alignb);
495
496 for (j = i; j != EOC; j = stack_vars[j].next)
497 {
498 fputc ('\t', dump_file);
499 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
500 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
501 stack_vars[i].offset);
502 }
503 }
504 }
505
506 /* Assign rtl to DECL at frame offset OFFSET. */
507
508 static void
509 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
510 {
511 HOST_WIDE_INT align;
512 rtx x;
513
514 /* If this fails, we've overflowed the stack frame. Error nicely? */
515 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
516
517 x = plus_constant (virtual_stack_vars_rtx, offset);
518 x = gen_rtx_MEM (DECL_MODE (decl), x);
519
520 /* Set alignment we actually gave this decl. */
521 offset -= frame_phase;
522 align = offset & -offset;
523 align *= BITS_PER_UNIT;
524 if (align > STACK_BOUNDARY || align == 0)
525 align = STACK_BOUNDARY;
526 DECL_ALIGN (decl) = align;
527 DECL_USER_ALIGN (decl) = 0;
528
529 set_mem_attributes (x, decl, true);
530 SET_DECL_RTL (decl, x);
531 }
532
533 /* A subroutine of expand_used_vars. Give each partition representative
534 a unique location within the stack frame. Update each partition member
535 with that location. */
536
537 static void
538 expand_stack_vars (bool (*pred) (tree))
539 {
540 size_t si, i, j, n = stack_vars_num;
541
542 for (si = 0; si < n; ++si)
543 {
544 HOST_WIDE_INT offset;
545
546 i = stack_vars_sorted[si];
547
548 /* Skip variables that aren't partition representatives, for now. */
549 if (stack_vars[i].representative != i)
550 continue;
551
552 /* Skip variables that have already had rtl assigned. See also
553 add_stack_var where we perpetrate this pc_rtx hack. */
554 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
555 continue;
556
557 /* Check the predicate to see whether this variable should be
558 allocated in this pass. */
559 if (pred && !pred (stack_vars[i].decl))
560 continue;
561
562 offset = alloc_stack_frame_space (stack_vars[i].size,
563 stack_vars[i].alignb);
564
565 /* Create rtl for each variable based on their location within the
566 partition. */
567 for (j = i; j != EOC; j = stack_vars[j].next)
568 expand_one_stack_var_at (stack_vars[j].decl,
569 stack_vars[j].offset + offset);
570 }
571 }
572
573 /* A subroutine of expand_one_var. Called to immediately assign rtl
574 to a variable to be allocated in the stack frame. */
575
576 static void
577 expand_one_stack_var (tree var)
578 {
579 HOST_WIDE_INT size, offset, align;
580
581 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
582 align = get_decl_align_unit (var);
583 offset = alloc_stack_frame_space (size, align);
584
585 expand_one_stack_var_at (var, offset);
586 }
587
588 /* A subroutine of expand_one_var. Called to assign rtl
589 to a TREE_STATIC VAR_DECL. */
590
591 static void
592 expand_one_static_var (tree var)
593 {
594 /* In unit-at-a-time all the static variables are expanded at the end
595 of compilation process. */
596 if (flag_unit_at_a_time)
597 return;
598 /* If this is an inlined copy of a static local variable,
599 look up the original. */
600 var = DECL_ORIGIN (var);
601
602 /* If we've already processed this variable because of that, do nothing. */
603 if (TREE_ASM_WRITTEN (var))
604 return;
605
606 /* Give the front end a chance to do whatever. In practice, this is
607 resolving duplicate names for IMA in C. */
608 if (lang_hooks.expand_decl (var))
609 return;
610
611 /* Otherwise, just emit the variable. */
612 rest_of_decl_compilation (var, 0, 0);
613 }
614
615 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
616 that will reside in a hard register. */
617
618 static void
619 expand_one_hard_reg_var (tree var)
620 {
621 rest_of_decl_compilation (var, 0, 0);
622 }
623
624 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
625 that will reside in a pseudo register. */
626
627 static void
628 expand_one_register_var (tree var)
629 {
630 tree type = TREE_TYPE (var);
631 int unsignedp = TYPE_UNSIGNED (type);
632 enum machine_mode reg_mode
633 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
634 rtx x = gen_reg_rtx (reg_mode);
635
636 SET_DECL_RTL (var, x);
637
638 /* Note if the object is a user variable. */
639 if (!DECL_ARTIFICIAL (var))
640 {
641 mark_user_reg (x);
642
643 /* Trust user variables which have a pointer type to really
644 be pointers. Do not trust compiler generated temporaries
645 as our type system is totally busted as it relates to
646 pointer arithmetic which translates into lots of compiler
647 generated objects with pointer types, but which are not really
648 pointers. */
649 if (POINTER_TYPE_P (type))
650 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
651 }
652 }
653
654 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
655 has some associated error, e.g. its type is error-mark. We just need
656 to pick something that won't crash the rest of the compiler. */
657
658 static void
659 expand_one_error_var (tree var)
660 {
661 enum machine_mode mode = DECL_MODE (var);
662 rtx x;
663
664 if (mode == BLKmode)
665 x = gen_rtx_MEM (BLKmode, const0_rtx);
666 else if (mode == VOIDmode)
667 x = const0_rtx;
668 else
669 x = gen_reg_rtx (mode);
670
671 SET_DECL_RTL (var, x);
672 }
673
674 /* A subroutine of expand_one_var. VAR is a variable that will be
675 allocated to the local stack frame. Return true if we wish to
676 add VAR to STACK_VARS so that it will be coalesced with other
677 variables. Return false to allocate VAR immediately.
678
679 This function is used to reduce the number of variables considered
680 for coalescing, which reduces the size of the quadratic problem. */
681
682 static bool
683 defer_stack_allocation (tree var, bool toplevel)
684 {
685 /* If stack protection is enabled, *all* stack variables must be deferred,
686 so that we can re-order the strings to the top of the frame. */
687 if (flag_stack_protect)
688 return true;
689
690 /* Variables in the outermost scope automatically conflict with
691 every other variable. The only reason to want to defer them
692 at all is that, after sorting, we can more efficiently pack
693 small variables in the stack frame. Continue to defer at -O2. */
694 if (toplevel && optimize < 2)
695 return false;
696
697 /* Without optimization, *most* variables are allocated from the
698 stack, which makes the quadratic problem large exactly when we
699 want compilation to proceed as quickly as possible. On the
700 other hand, we don't want the function's stack frame size to
701 get completely out of hand. So we avoid adding scalars and
702 "small" aggregates to the list at all. */
703 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
704 return false;
705
706 return true;
707 }
708
709 /* A subroutine of expand_used_vars. Expand one variable according to
710 its flavor. Variables to be placed on the stack are not actually
711 expanded yet, merely recorded. */
712
713 static void
714 expand_one_var (tree var, bool toplevel)
715 {
716 if (TREE_CODE (var) != VAR_DECL)
717 lang_hooks.expand_decl (var);
718 else if (DECL_EXTERNAL (var))
719 ;
720 else if (DECL_HAS_VALUE_EXPR_P (var))
721 ;
722 else if (TREE_STATIC (var))
723 expand_one_static_var (var);
724 else if (DECL_RTL_SET_P (var))
725 ;
726 else if (TREE_TYPE (var) == error_mark_node)
727 expand_one_error_var (var);
728 else if (DECL_HARD_REGISTER (var))
729 expand_one_hard_reg_var (var);
730 else if (use_register_for_decl (var))
731 expand_one_register_var (var);
732 else if (defer_stack_allocation (var, toplevel))
733 add_stack_var (var);
734 else
735 expand_one_stack_var (var);
736 }
737
738 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
739 expanding variables. Those variables that can be put into registers
740 are allocated pseudos; those that can't are put on the stack.
741
742 TOPLEVEL is true if this is the outermost BLOCK. */
743
744 static void
745 expand_used_vars_for_block (tree block, bool toplevel)
746 {
747 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
748 tree t;
749
750 old_sv_num = toplevel ? 0 : stack_vars_num;
751
752 /* Expand all variables at this level. */
753 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
754 if (TREE_USED (t))
755 expand_one_var (t, toplevel);
756
757 this_sv_num = stack_vars_num;
758
759 /* Expand all variables at containing levels. */
760 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
761 expand_used_vars_for_block (t, false);
762
763 /* Since we do not track exact variable lifetimes (which is not even
764 possible for varibles whose address escapes), we mirror the block
765 tree in the interference graph. Here we cause all variables at this
766 level, and all sublevels, to conflict. Do make certain that a
767 variable conflicts with itself. */
768 if (old_sv_num < this_sv_num)
769 {
770 new_sv_num = stack_vars_num;
771 resize_stack_vars_conflict (new_sv_num);
772
773 for (i = old_sv_num; i < new_sv_num; ++i)
774 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
775 add_stack_var_conflict (i, j);
776 }
777 }
778
779 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
780 and clear TREE_USED on all local variables. */
781
782 static void
783 clear_tree_used (tree block)
784 {
785 tree t;
786
787 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
788 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
789 TREE_USED (t) = 0;
790
791 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
792 clear_tree_used (t);
793 }
794
795 /* Examine TYPE and determine a bit mask of the following features. */
796
797 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
798 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
799 #define SPCT_HAS_ARRAY 4
800 #define SPCT_HAS_AGGREGATE 8
801
802 static unsigned int
803 stack_protect_classify_type (tree type)
804 {
805 unsigned int ret = 0;
806 tree t;
807
808 switch (TREE_CODE (type))
809 {
810 case ARRAY_TYPE:
811 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
812 if (t == char_type_node
813 || t == signed_char_type_node
814 || t == unsigned_char_type_node)
815 {
816 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
817 unsigned HOST_WIDE_INT len;
818
819 if (!TYPE_SIZE_UNIT (type)
820 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
821 len = max;
822 else
823 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
824
825 if (len < max)
826 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
827 else
828 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
829 }
830 else
831 ret = SPCT_HAS_ARRAY;
832 break;
833
834 case UNION_TYPE:
835 case QUAL_UNION_TYPE:
836 case RECORD_TYPE:
837 ret = SPCT_HAS_AGGREGATE;
838 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
839 if (TREE_CODE (t) == FIELD_DECL)
840 ret |= stack_protect_classify_type (TREE_TYPE (t));
841 break;
842
843 default:
844 break;
845 }
846
847 return ret;
848 }
849
850 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
851 part of the local stack frame. Remember if we ever return nonzero for
852 any variable in this function. The return value is the phase number in
853 which the variable should be allocated. */
854
855 static int
856 stack_protect_decl_phase (tree decl)
857 {
858 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
859 int ret = 0;
860
861 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
862 has_short_buffer = true;
863
864 if (flag_stack_protect == 2)
865 {
866 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
867 && !(bits & SPCT_HAS_AGGREGATE))
868 ret = 1;
869 else if (bits & SPCT_HAS_ARRAY)
870 ret = 2;
871 }
872 else
873 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
874
875 if (ret)
876 has_protected_decls = true;
877
878 return ret;
879 }
880
881 /* Two helper routines that check for phase 1 and phase 2. These are used
882 as callbacks for expand_stack_vars. */
883
884 static bool
885 stack_protect_decl_phase_1 (tree decl)
886 {
887 return stack_protect_decl_phase (decl) == 1;
888 }
889
890 static bool
891 stack_protect_decl_phase_2 (tree decl)
892 {
893 return stack_protect_decl_phase (decl) == 2;
894 }
895
896 /* Ensure that variables in different stack protection phases conflict
897 so that they are not merged and share the same stack slot. */
898
899 static void
900 add_stack_protection_conflicts (void)
901 {
902 size_t i, j, n = stack_vars_num;
903 unsigned char *phase;
904
905 phase = XNEWVEC (unsigned char, n);
906 for (i = 0; i < n; ++i)
907 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
908
909 for (i = 0; i < n; ++i)
910 {
911 unsigned char ph_i = phase[i];
912 for (j = 0; j < i; ++j)
913 if (ph_i != phase[j])
914 add_stack_var_conflict (i, j);
915 }
916
917 XDELETEVEC (phase);
918 }
919
920 /* Create a decl for the guard at the top of the stack frame. */
921
922 static void
923 create_stack_guard (void)
924 {
925 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
926 TREE_THIS_VOLATILE (guard) = 1;
927 TREE_USED (guard) = 1;
928 expand_one_stack_var (guard);
929 cfun->stack_protect_guard = guard;
930 }
931
932 /* Expand all variables used in the function. */
933
934 static void
935 expand_used_vars (void)
936 {
937 tree t, outer_block = DECL_INITIAL (current_function_decl);
938
939 /* Compute the phase of the stack frame for this function. */
940 {
941 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
942 int off = STARTING_FRAME_OFFSET % align;
943 frame_phase = off ? align - off : 0;
944 }
945
946 /* Set TREE_USED on all variables in the unexpanded_var_list. */
947 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
948 TREE_USED (TREE_VALUE (t)) = 1;
949
950 /* Clear TREE_USED on all variables associated with a block scope. */
951 clear_tree_used (outer_block);
952
953 /* Initialize local stack smashing state. */
954 has_protected_decls = false;
955 has_short_buffer = false;
956
957 /* At this point all variables on the unexpanded_var_list with TREE_USED
958 set are not associated with any block scope. Lay them out. */
959 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
960 {
961 tree var = TREE_VALUE (t);
962 bool expand_now = false;
963
964 /* We didn't set a block for static or extern because it's hard
965 to tell the difference between a global variable (re)declared
966 in a local scope, and one that's really declared there to
967 begin with. And it doesn't really matter much, since we're
968 not giving them stack space. Expand them now. */
969 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
970 expand_now = true;
971
972 /* Any variable that could have been hoisted into an SSA_NAME
973 will have been propagated anywhere the optimizers chose,
974 i.e. not confined to their original block. Allocate them
975 as if they were defined in the outermost scope. */
976 else if (is_gimple_reg (var))
977 expand_now = true;
978
979 /* If the variable is not associated with any block, then it
980 was created by the optimizers, and could be live anywhere
981 in the function. */
982 else if (TREE_USED (var))
983 expand_now = true;
984
985 /* Finally, mark all variables on the list as used. We'll use
986 this in a moment when we expand those associated with scopes. */
987 TREE_USED (var) = 1;
988
989 if (expand_now)
990 expand_one_var (var, true);
991 }
992 cfun->unexpanded_var_list = NULL_TREE;
993
994 /* At this point, all variables within the block tree with TREE_USED
995 set are actually used by the optimized function. Lay them out. */
996 expand_used_vars_for_block (outer_block, true);
997
998 if (stack_vars_num > 0)
999 {
1000 /* Due to the way alias sets work, no variables with non-conflicting
1001 alias sets may be assigned the same address. Add conflicts to
1002 reflect this. */
1003 add_alias_set_conflicts ();
1004
1005 /* If stack protection is enabled, we don't share space between
1006 vulnerable data and non-vulnerable data. */
1007 if (flag_stack_protect)
1008 add_stack_protection_conflicts ();
1009
1010 /* Now that we have collected all stack variables, and have computed a
1011 minimal interference graph, attempt to save some stack space. */
1012 partition_stack_vars ();
1013 if (dump_file)
1014 dump_stack_var_partition ();
1015 }
1016
1017 /* There are several conditions under which we should create a
1018 stack guard: protect-all, alloca used, protected decls present. */
1019 if (flag_stack_protect == 2
1020 || (flag_stack_protect
1021 && (current_function_calls_alloca || has_protected_decls)))
1022 create_stack_guard ();
1023
1024 /* Assign rtl to each variable based on these partitions. */
1025 if (stack_vars_num > 0)
1026 {
1027 /* Reorder decls to be protected by iterating over the variables
1028 array multiple times, and allocating out of each phase in turn. */
1029 /* ??? We could probably integrate this into the qsort we did
1030 earlier, such that we naturally see these variables first,
1031 and thus naturally allocate things in the right order. */
1032 if (has_protected_decls)
1033 {
1034 /* Phase 1 contains only character arrays. */
1035 expand_stack_vars (stack_protect_decl_phase_1);
1036
1037 /* Phase 2 contains other kinds of arrays. */
1038 if (flag_stack_protect == 2)
1039 expand_stack_vars (stack_protect_decl_phase_2);
1040 }
1041
1042 expand_stack_vars (NULL);
1043
1044 /* Free up stack variable graph data. */
1045 XDELETEVEC (stack_vars);
1046 XDELETEVEC (stack_vars_sorted);
1047 XDELETEVEC (stack_vars_conflict);
1048 stack_vars = NULL;
1049 stack_vars_alloc = stack_vars_num = 0;
1050 stack_vars_conflict = NULL;
1051 stack_vars_conflict_alloc = 0;
1052 }
1053
1054 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1055 if (STACK_ALIGNMENT_NEEDED)
1056 {
1057 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1058 if (!FRAME_GROWS_DOWNWARD)
1059 frame_offset += align - 1;
1060 frame_offset &= -align;
1061 }
1062 }
1063
1064
1065 /* If we need to produce a detailed dump, print the tree representation
1066 for STMT to the dump file. SINCE is the last RTX after which the RTL
1067 generated for STMT should have been appended. */
1068
1069 static void
1070 maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1071 {
1072 if (dump_file && (dump_flags & TDF_DETAILS))
1073 {
1074 fprintf (dump_file, "\n;; ");
1075 print_generic_expr (dump_file, stmt, TDF_SLIM);
1076 fprintf (dump_file, "\n");
1077
1078 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1079 }
1080 }
1081
1082 /* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR.
1083 Returns a new basic block if we've terminated the current basic
1084 block and created a new one. */
1085
1086 static basic_block
1087 expand_gimple_cond_expr (basic_block bb, tree stmt)
1088 {
1089 basic_block new_bb, dest;
1090 edge new_edge;
1091 edge true_edge;
1092 edge false_edge;
1093 tree pred = COND_EXPR_COND (stmt);
1094 tree then_exp = COND_EXPR_THEN (stmt);
1095 tree else_exp = COND_EXPR_ELSE (stmt);
1096 rtx last2, last;
1097
1098 last2 = last = get_last_insn ();
1099
1100 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1101 if (EXPR_LOCUS (stmt))
1102 {
1103 emit_line_note (*(EXPR_LOCUS (stmt)));
1104 record_block_change (TREE_BLOCK (stmt));
1105 }
1106
1107 /* These flags have no purpose in RTL land. */
1108 true_edge->flags &= ~EDGE_TRUE_VALUE;
1109 false_edge->flags &= ~EDGE_FALSE_VALUE;
1110
1111 /* We can either have a pure conditional jump with one fallthru edge or
1112 two-way jump that needs to be decomposed into two basic blocks. */
1113 if (TREE_CODE (then_exp) == GOTO_EXPR && IS_EMPTY_STMT (else_exp))
1114 {
1115 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1116 add_reg_br_prob_note (dump_file, last, true_edge->probability);
1117 maybe_dump_rtl_for_tree_stmt (stmt, last);
1118 if (EXPR_LOCUS (then_exp))
1119 emit_line_note (*(EXPR_LOCUS (then_exp)));
1120 return NULL;
1121 }
1122 if (TREE_CODE (else_exp) == GOTO_EXPR && IS_EMPTY_STMT (then_exp))
1123 {
1124 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_exp)));
1125 add_reg_br_prob_note (dump_file, last, false_edge->probability);
1126 maybe_dump_rtl_for_tree_stmt (stmt, last);
1127 if (EXPR_LOCUS (else_exp))
1128 emit_line_note (*(EXPR_LOCUS (else_exp)));
1129 return NULL;
1130 }
1131 gcc_assert (TREE_CODE (then_exp) == GOTO_EXPR
1132 && TREE_CODE (else_exp) == GOTO_EXPR);
1133
1134 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1135 add_reg_br_prob_note (dump_file, last, true_edge->probability);
1136 last = get_last_insn ();
1137 expand_expr (else_exp, const0_rtx, VOIDmode, 0);
1138
1139 BB_END (bb) = last;
1140 if (BARRIER_P (BB_END (bb)))
1141 BB_END (bb) = PREV_INSN (BB_END (bb));
1142 update_bb_for_insn (bb);
1143
1144 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1145 dest = false_edge->dest;
1146 redirect_edge_succ (false_edge, new_bb);
1147 false_edge->flags |= EDGE_FALLTHRU;
1148 new_bb->count = false_edge->count;
1149 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1150 new_edge = make_edge (new_bb, dest, 0);
1151 new_edge->probability = REG_BR_PROB_BASE;
1152 new_edge->count = new_bb->count;
1153 if (BARRIER_P (BB_END (new_bb)))
1154 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1155 update_bb_for_insn (new_bb);
1156
1157 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1158
1159 if (EXPR_LOCUS (else_exp))
1160 emit_line_note (*(EXPR_LOCUS (else_exp)));
1161
1162 return new_bb;
1163 }
1164
1165 /* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR
1166 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1167 generated a tail call (something that might be denied by the ABI
1168 rules governing the call; see calls.c).
1169
1170 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1171 can still reach the rest of BB. The case here is __builtin_sqrt,
1172 where the NaN result goes through the external function (with a
1173 tailcall) and the normal result happens via a sqrt instruction. */
1174
1175 static basic_block
1176 expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
1177 {
1178 rtx last2, last;
1179 edge e;
1180 edge_iterator ei;
1181 int probability;
1182 gcov_type count;
1183
1184 last2 = last = get_last_insn ();
1185
1186 expand_expr_stmt (stmt);
1187
1188 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1189 if (CALL_P (last) && SIBLING_CALL_P (last))
1190 goto found;
1191
1192 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1193
1194 *can_fallthru = true;
1195 return NULL;
1196
1197 found:
1198 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1199 Any instructions emitted here are about to be deleted. */
1200 do_pending_stack_adjust ();
1201
1202 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1203 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1204 EH or abnormal edges, we shouldn't have created a tail call in
1205 the first place. So it seems to me we should just be removing
1206 all edges here, or redirecting the existing fallthru edge to
1207 the exit block. */
1208
1209 probability = 0;
1210 count = 0;
1211
1212 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1213 {
1214 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1215 {
1216 if (e->dest != EXIT_BLOCK_PTR)
1217 {
1218 e->dest->count -= e->count;
1219 e->dest->frequency -= EDGE_FREQUENCY (e);
1220 if (e->dest->count < 0)
1221 e->dest->count = 0;
1222 if (e->dest->frequency < 0)
1223 e->dest->frequency = 0;
1224 }
1225 count += e->count;
1226 probability += e->probability;
1227 remove_edge (e);
1228 }
1229 else
1230 ei_next (&ei);
1231 }
1232
1233 /* This is somewhat ugly: the call_expr expander often emits instructions
1234 after the sibcall (to perform the function return). These confuse the
1235 find_many_sub_basic_blocks code, so we need to get rid of these. */
1236 last = NEXT_INSN (last);
1237 gcc_assert (BARRIER_P (last));
1238
1239 *can_fallthru = false;
1240 while (NEXT_INSN (last))
1241 {
1242 /* For instance an sqrt builtin expander expands if with
1243 sibcall in the then and label for `else`. */
1244 if (LABEL_P (NEXT_INSN (last)))
1245 {
1246 *can_fallthru = true;
1247 break;
1248 }
1249 delete_insn (NEXT_INSN (last));
1250 }
1251
1252 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1253 e->probability += probability;
1254 e->count += count;
1255 BB_END (bb) = last;
1256 update_bb_for_insn (bb);
1257
1258 if (NEXT_INSN (last))
1259 {
1260 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1261
1262 last = BB_END (bb);
1263 if (BARRIER_P (last))
1264 BB_END (bb) = PREV_INSN (last);
1265 }
1266
1267 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1268
1269 return bb;
1270 }
1271
1272 /* Expand basic block BB from GIMPLE trees to RTL. */
1273
1274 static basic_block
1275 expand_gimple_basic_block (basic_block bb, FILE * dump_file)
1276 {
1277 block_stmt_iterator bsi = bsi_start (bb);
1278 tree stmt = NULL;
1279 rtx note, last;
1280 edge e;
1281 edge_iterator ei;
1282
1283 if (dump_file)
1284 {
1285 fprintf (dump_file,
1286 "\n;; Generating RTL for tree basic block %d\n",
1287 bb->index);
1288 }
1289
1290 init_rtl_bb_info (bb);
1291 bb->flags |= BB_RTL;
1292
1293 if (!bsi_end_p (bsi))
1294 stmt = bsi_stmt (bsi);
1295
1296 if (stmt && TREE_CODE (stmt) == LABEL_EXPR)
1297 {
1298 last = get_last_insn ();
1299
1300 expand_expr_stmt (stmt);
1301
1302 /* Java emits line number notes in the top of labels.
1303 ??? Make this go away once line number notes are obsoleted. */
1304 BB_HEAD (bb) = NEXT_INSN (last);
1305 if (NOTE_P (BB_HEAD (bb)))
1306 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1307 bsi_next (&bsi);
1308 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1309
1310 maybe_dump_rtl_for_tree_stmt (stmt, last);
1311 }
1312 else
1313 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1314
1315 NOTE_BASIC_BLOCK (note) = bb;
1316
1317 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1318 {
1319 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1320 e->flags &= ~EDGE_EXECUTABLE;
1321
1322 /* At the moment not all abnormal edges match the RTL representation.
1323 It is safe to remove them here as find_many_sub_basic_blocks will
1324 rediscover them. In the future we should get this fixed properly. */
1325 if (e->flags & EDGE_ABNORMAL)
1326 remove_edge (e);
1327 else
1328 ei_next (&ei);
1329 }
1330
1331 for (; !bsi_end_p (bsi); bsi_next (&bsi))
1332 {
1333 tree stmt = bsi_stmt (bsi);
1334 basic_block new_bb;
1335
1336 if (!stmt)
1337 continue;
1338
1339 /* Expand this statement, then evaluate the resulting RTL and
1340 fixup the CFG accordingly. */
1341 if (TREE_CODE (stmt) == COND_EXPR)
1342 {
1343 new_bb = expand_gimple_cond_expr (bb, stmt);
1344 if (new_bb)
1345 return new_bb;
1346 }
1347 else
1348 {
1349 tree call = get_call_expr_in (stmt);
1350 if (call && CALL_EXPR_TAILCALL (call))
1351 {
1352 bool can_fallthru;
1353 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1354 if (new_bb)
1355 {
1356 if (can_fallthru)
1357 bb = new_bb;
1358 else
1359 return new_bb;
1360 }
1361 }
1362 else
1363 {
1364 last = get_last_insn ();
1365 expand_expr_stmt (stmt);
1366 maybe_dump_rtl_for_tree_stmt (stmt, last);
1367 }
1368 }
1369 }
1370
1371 do_pending_stack_adjust ();
1372
1373 /* Find the block tail. The last insn in the block is the insn
1374 before a barrier and/or table jump insn. */
1375 last = get_last_insn ();
1376 if (BARRIER_P (last))
1377 last = PREV_INSN (last);
1378 if (JUMP_TABLE_DATA_P (last))
1379 last = PREV_INSN (PREV_INSN (last));
1380 BB_END (bb) = last;
1381
1382 update_bb_for_insn (bb);
1383
1384 return bb;
1385 }
1386
1387
1388 /* Create a basic block for initialization code. */
1389
1390 static basic_block
1391 construct_init_block (void)
1392 {
1393 basic_block init_block, first_block;
1394 edge e = NULL;
1395 int flags;
1396
1397 /* Multiple entry points not supported yet. */
1398 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
1399 init_rtl_bb_info (ENTRY_BLOCK_PTR);
1400 init_rtl_bb_info (EXIT_BLOCK_PTR);
1401 ENTRY_BLOCK_PTR->flags |= BB_RTL;
1402 EXIT_BLOCK_PTR->flags |= BB_RTL;
1403
1404 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
1405
1406 /* When entry edge points to first basic block, we don't need jump,
1407 otherwise we have to jump into proper target. */
1408 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1409 {
1410 tree label = tree_block_label (e->dest);
1411
1412 emit_jump (label_rtx (label));
1413 flags = 0;
1414 }
1415 else
1416 flags = EDGE_FALLTHRU;
1417
1418 init_block = create_basic_block (NEXT_INSN (get_insns ()),
1419 get_last_insn (),
1420 ENTRY_BLOCK_PTR);
1421 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1422 init_block->count = ENTRY_BLOCK_PTR->count;
1423 if (e)
1424 {
1425 first_block = e->dest;
1426 redirect_edge_succ (e, init_block);
1427 e = make_edge (init_block, first_block, flags);
1428 }
1429 else
1430 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1431 e->probability = REG_BR_PROB_BASE;
1432 e->count = ENTRY_BLOCK_PTR->count;
1433
1434 update_bb_for_insn (init_block);
1435 return init_block;
1436 }
1437
1438
1439 /* Create a block containing landing pads and similar stuff. */
1440
1441 static void
1442 construct_exit_block (void)
1443 {
1444 rtx head = get_last_insn ();
1445 rtx end;
1446 basic_block exit_block;
1447 edge e, e2;
1448 unsigned ix;
1449 edge_iterator ei;
1450
1451 /* Make sure the locus is set to the end of the function, so that
1452 epilogue line numbers and warnings are set properly. */
1453 #ifdef USE_MAPPED_LOCATION
1454 if (cfun->function_end_locus != UNKNOWN_LOCATION)
1455 #else
1456 if (cfun->function_end_locus.file)
1457 #endif
1458 input_location = cfun->function_end_locus;
1459
1460 /* The following insns belong to the top scope. */
1461 record_block_change (DECL_INITIAL (current_function_decl));
1462
1463 /* Generate rtl for function exit. */
1464 expand_function_end ();
1465
1466 end = get_last_insn ();
1467 if (head == end)
1468 return;
1469 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
1470 head = NEXT_INSN (head);
1471 exit_block = create_basic_block (NEXT_INSN (head), end,
1472 EXIT_BLOCK_PTR->prev_bb);
1473 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1474 exit_block->count = EXIT_BLOCK_PTR->count;
1475
1476 ix = 0;
1477 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
1478 {
1479 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
1480 if (!(e->flags & EDGE_ABNORMAL))
1481 redirect_edge_succ (e, exit_block);
1482 else
1483 ix++;
1484 }
1485
1486 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1487 e->probability = REG_BR_PROB_BASE;
1488 e->count = EXIT_BLOCK_PTR->count;
1489 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
1490 if (e2 != e)
1491 {
1492 e->count -= e2->count;
1493 exit_block->count -= e2->count;
1494 exit_block->frequency -= EDGE_FREQUENCY (e2);
1495 }
1496 if (e->count < 0)
1497 e->count = 0;
1498 if (exit_block->count < 0)
1499 exit_block->count = 0;
1500 if (exit_block->frequency < 0)
1501 exit_block->frequency = 0;
1502 update_bb_for_insn (exit_block);
1503 }
1504
1505 /* Helper function for discover_nonconstant_array_refs.
1506 Look for ARRAY_REF nodes with non-constant indexes and mark them
1507 addressable. */
1508
1509 static tree
1510 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
1511 void *data ATTRIBUTE_UNUSED)
1512 {
1513 tree t = *tp;
1514
1515 if (IS_TYPE_OR_DECL_P (t))
1516 *walk_subtrees = 0;
1517 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1518 {
1519 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1520 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
1521 && (!TREE_OPERAND (t, 2)
1522 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1523 || (TREE_CODE (t) == COMPONENT_REF
1524 && (!TREE_OPERAND (t,2)
1525 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1526 || TREE_CODE (t) == BIT_FIELD_REF
1527 || TREE_CODE (t) == REALPART_EXPR
1528 || TREE_CODE (t) == IMAGPART_EXPR
1529 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1530 || TREE_CODE (t) == NOP_EXPR
1531 || TREE_CODE (t) == CONVERT_EXPR)
1532 t = TREE_OPERAND (t, 0);
1533
1534 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1535 {
1536 t = get_base_address (t);
1537 if (t && DECL_P (t))
1538 TREE_ADDRESSABLE (t) = 1;
1539 }
1540
1541 *walk_subtrees = 0;
1542 }
1543
1544 return NULL_TREE;
1545 }
1546
1547 /* RTL expansion is not able to compile array references with variable
1548 offsets for arrays stored in single register. Discover such
1549 expressions and mark variables as addressable to avoid this
1550 scenario. */
1551
1552 static void
1553 discover_nonconstant_array_refs (void)
1554 {
1555 basic_block bb;
1556 block_stmt_iterator bsi;
1557
1558 FOR_EACH_BB (bb)
1559 {
1560 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1561 walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r,
1562 NULL , NULL);
1563 }
1564 }
1565
1566 /* Translate the intermediate representation contained in the CFG
1567 from GIMPLE trees to RTL.
1568
1569 We do conversion per basic block and preserve/update the tree CFG.
1570 This implies we have to do some magic as the CFG can simultaneously
1571 consist of basic blocks containing RTL and GIMPLE trees. This can
1572 confuse the CFG hooks, so be careful to not manipulate CFG during
1573 the expansion. */
1574
1575 static void
1576 tree_expand_cfg (void)
1577 {
1578 basic_block bb, init_block;
1579 sbitmap blocks;
1580
1581 /* Some backends want to know that we are expanding to RTL. */
1582 currently_expanding_to_rtl = 1;
1583
1584 /* Prepare the rtl middle end to start recording block changes. */
1585 reset_block_changes ();
1586
1587 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
1588 discover_nonconstant_array_refs ();
1589
1590 /* Expand the variables recorded during gimple lowering. */
1591 expand_used_vars ();
1592
1593 /* Honor stack protection warnings. */
1594 if (warn_stack_protect)
1595 {
1596 if (current_function_calls_alloca)
1597 warning (0, "not protecting local variables: variable length buffer");
1598 if (has_short_buffer && !cfun->stack_protect_guard)
1599 warning (0, "not protecting function: no buffer at least %d bytes long",
1600 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1601 }
1602
1603 /* Set up parameters and prepare for return, for the function. */
1604 expand_function_start (current_function_decl);
1605
1606 /* If this function is `main', emit a call to `__main'
1607 to run global initializers, etc. */
1608 if (DECL_NAME (current_function_decl)
1609 && MAIN_NAME_P (DECL_NAME (current_function_decl))
1610 && DECL_FILE_SCOPE_P (current_function_decl))
1611 expand_main_function ();
1612
1613 /* Initialize the stack_protect_guard field. This must happen after the
1614 call to __main (if any) so that the external decl is initialized. */
1615 if (cfun->stack_protect_guard)
1616 stack_protect_prologue ();
1617
1618 /* Register rtl specific functions for cfg. */
1619 rtl_register_cfg_hooks ();
1620
1621 init_block = construct_init_block ();
1622
1623 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
1624 bb = expand_gimple_basic_block (bb, dump_file);
1625
1626 construct_exit_block ();
1627
1628 /* We're done expanding trees to RTL. */
1629 currently_expanding_to_rtl = 0;
1630
1631 /* Convert tree EH labels to RTL EH labels, and clean out any unreachable
1632 EH regions. */
1633 convert_from_eh_region_ranges ();
1634
1635 rebuild_jump_labels (get_insns ());
1636 find_exception_handler_labels ();
1637
1638 blocks = sbitmap_alloc (last_basic_block);
1639 sbitmap_ones (blocks);
1640 find_many_sub_basic_blocks (blocks);
1641 purge_all_dead_edges ();
1642 sbitmap_free (blocks);
1643
1644 compact_blocks ();
1645 #ifdef ENABLE_CHECKING
1646 verify_flow_info();
1647 #endif
1648
1649 /* There's no need to defer outputting this function any more; we
1650 know we want to output it. */
1651 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1652
1653 /* Now that we're done expanding trees to RTL, we shouldn't have any
1654 more CONCATs anywhere. */
1655 generating_concat_p = 0;
1656
1657 finalize_block_changes ();
1658
1659 if (dump_file)
1660 {
1661 fprintf (dump_file,
1662 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1663 /* And the pass manager will dump RTL for us. */
1664 }
1665
1666 /* If we're emitting a nested function, make sure its parent gets
1667 emitted as well. Doing otherwise confuses debug info. */
1668 {
1669 tree parent;
1670 for (parent = DECL_CONTEXT (current_function_decl);
1671 parent != NULL_TREE;
1672 parent = get_containing_scope (parent))
1673 if (TREE_CODE (parent) == FUNCTION_DECL)
1674 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1675 }
1676
1677 /* We are now committed to emitting code for this function. Do any
1678 preparation, such as emitting abstract debug info for the inline
1679 before it gets mangled by optimization. */
1680 if (cgraph_function_possibly_inlined_p (current_function_decl))
1681 (*debug_hooks->outlining_inline_function) (current_function_decl);
1682
1683 TREE_ASM_WRITTEN (current_function_decl) = 1;
1684
1685 /* After expanding, the return labels are no longer needed. */
1686 return_label = NULL;
1687 naked_return_label = NULL;
1688 }
1689
1690 struct tree_opt_pass pass_expand =
1691 {
1692 "expand", /* name */
1693 NULL, /* gate */
1694 tree_expand_cfg, /* execute */
1695 NULL, /* sub */
1696 NULL, /* next */
1697 0, /* static_pass_number */
1698 TV_EXPAND, /* tv_id */
1699 /* ??? If TER is enabled, we actually receive GENERIC. */
1700 PROP_gimple_leh | PROP_cfg, /* properties_required */
1701 PROP_rtl, /* properties_provided */
1702 PROP_gimple_leh, /* properties_destroyed */
1703 0, /* todo_flags_start */
1704 TODO_dump_func, /* todo_flags_finish */
1705 'r' /* letter */
1706 };