inclhack.def (hpux_imaginary_i): Remove spaces.
[gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "toplev.h"
40 #include "debug.h"
41 #include "params.h"
42 #include "tree-inline.h"
43 #include "value-prof.h"
44 #include "target.h"
45 #include "ssaexpand.h"
46
47
48 /* This variable holds information helping the rewriting of SSA trees
49 into RTL. */
50 struct ssaexpand SA;
51
52 /* Return an expression tree corresponding to the RHS of GIMPLE
53 statement STMT. */
54
55 tree
56 gimple_assign_rhs_to_tree (gimple stmt)
57 {
58 tree t;
59 enum gimple_rhs_class grhs_class;
60
61 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
62
63 if (grhs_class == GIMPLE_BINARY_RHS)
64 t = build2 (gimple_assign_rhs_code (stmt),
65 TREE_TYPE (gimple_assign_lhs (stmt)),
66 gimple_assign_rhs1 (stmt),
67 gimple_assign_rhs2 (stmt));
68 else if (grhs_class == GIMPLE_UNARY_RHS)
69 t = build1 (gimple_assign_rhs_code (stmt),
70 TREE_TYPE (gimple_assign_lhs (stmt)),
71 gimple_assign_rhs1 (stmt));
72 else if (grhs_class == GIMPLE_SINGLE_RHS)
73 t = gimple_assign_rhs1 (stmt);
74 else
75 gcc_unreachable ();
76
77 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
78 SET_EXPR_LOCATION (t, gimple_location (stmt));
79
80 return t;
81 }
82
83 /* Return an expression tree corresponding to the PREDICATE of GIMPLE_COND
84 statement STMT. */
85
86 static tree
87 gimple_cond_pred_to_tree (gimple stmt)
88 {
89 /* We're sometimes presented with such code:
90 D.123_1 = x < y;
91 if (D.123_1 != 0)
92 ...
93 This would expand to two comparisons which then later might
94 be cleaned up by combine. But some pattern matchers like if-conversion
95 work better when there's only one compare, so make up for this
96 here as special exception if TER would have made the same change. */
97 tree lhs = gimple_cond_lhs (stmt);
98 if (SA.values
99 && TREE_CODE (lhs) == SSA_NAME
100 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (lhs)))
101 lhs = gimple_assign_rhs_to_tree (SSA_NAME_DEF_STMT (lhs));
102
103 return build2 (gimple_cond_code (stmt), boolean_type_node,
104 lhs, gimple_cond_rhs (stmt));
105 }
106
107 /* Helper for gimple_to_tree. Set EXPR_LOCATION for every expression
108 inside *TP. DATA is the location to set. */
109
110 static tree
111 set_expr_location_r (tree *tp, int *ws ATTRIBUTE_UNUSED, void *data)
112 {
113 location_t *loc = (location_t *) data;
114 if (EXPR_P (*tp))
115 SET_EXPR_LOCATION (*tp, *loc);
116
117 return NULL_TREE;
118 }
119
120
121 /* RTL expansion has traditionally been done on trees, so the
122 transition to doing it on GIMPLE tuples is very invasive to the RTL
123 expander. To facilitate the transition, this function takes a
124 GIMPLE tuple STMT and returns the same statement in the form of a
125 tree. */
126
127 static tree
128 gimple_to_tree (gimple stmt)
129 {
130 tree t;
131 int rn;
132 tree_ann_common_t ann;
133 location_t loc;
134
135 switch (gimple_code (stmt))
136 {
137 case GIMPLE_ASSIGN:
138 {
139 tree lhs = gimple_assign_lhs (stmt);
140
141 t = gimple_assign_rhs_to_tree (stmt);
142 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
143 if (gimple_assign_nontemporal_move_p (stmt))
144 MOVE_NONTEMPORAL (t) = true;
145 }
146 break;
147
148 case GIMPLE_COND:
149 t = gimple_cond_pred_to_tree (stmt);
150 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
151 break;
152
153 case GIMPLE_GOTO:
154 t = build1 (GOTO_EXPR, void_type_node, gimple_goto_dest (stmt));
155 break;
156
157 case GIMPLE_LABEL:
158 t = build1 (LABEL_EXPR, void_type_node, gimple_label_label (stmt));
159 break;
160
161 case GIMPLE_RETURN:
162 {
163 tree retval = gimple_return_retval (stmt);
164
165 if (retval && retval != error_mark_node)
166 {
167 tree result = DECL_RESULT (current_function_decl);
168
169 /* If we are not returning the current function's RESULT_DECL,
170 build an assignment to it. */
171 if (retval != result)
172 {
173 /* I believe that a function's RESULT_DECL is unique. */
174 gcc_assert (TREE_CODE (retval) != RESULT_DECL);
175
176 retval = build2 (MODIFY_EXPR, TREE_TYPE (result),
177 result, retval);
178 }
179 }
180 t = build1 (RETURN_EXPR, void_type_node, retval);
181 }
182 break;
183
184 case GIMPLE_ASM:
185 {
186 size_t i, n;
187 tree out, in, cl;
188 const char *s;
189
190 out = NULL_TREE;
191 n = gimple_asm_noutputs (stmt);
192 if (n > 0)
193 {
194 t = out = gimple_asm_output_op (stmt, 0);
195 for (i = 1; i < n; i++)
196 {
197 TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
198 t = gimple_asm_output_op (stmt, i);
199 }
200 }
201
202 in = NULL_TREE;
203 n = gimple_asm_ninputs (stmt);
204 if (n > 0)
205 {
206 t = in = gimple_asm_input_op (stmt, 0);
207 for (i = 1; i < n; i++)
208 {
209 TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
210 t = gimple_asm_input_op (stmt, i);
211 }
212 }
213
214 cl = NULL_TREE;
215 n = gimple_asm_nclobbers (stmt);
216 if (n > 0)
217 {
218 t = cl = gimple_asm_clobber_op (stmt, 0);
219 for (i = 1; i < n; i++)
220 {
221 TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
222 t = gimple_asm_clobber_op (stmt, i);
223 }
224 }
225
226 s = gimple_asm_string (stmt);
227 t = build4 (ASM_EXPR, void_type_node, build_string (strlen (s), s),
228 out, in, cl);
229 ASM_VOLATILE_P (t) = gimple_asm_volatile_p (stmt);
230 ASM_INPUT_P (t) = gimple_asm_input_p (stmt);
231 }
232 break;
233
234 case GIMPLE_CALL:
235 {
236 size_t i;
237 tree fn;
238 tree_ann_common_t ann;
239
240 t = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
241
242 CALL_EXPR_FN (t) = gimple_call_fn (stmt);
243 TREE_TYPE (t) = gimple_call_return_type (stmt);
244 CALL_EXPR_STATIC_CHAIN (t) = gimple_call_chain (stmt);
245
246 for (i = 0; i < gimple_call_num_args (stmt); i++)
247 CALL_EXPR_ARG (t, i) = gimple_call_arg (stmt, i);
248
249 if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
250 TREE_SIDE_EFFECTS (t) = 1;
251
252 if (gimple_call_flags (stmt) & ECF_NOTHROW)
253 TREE_NOTHROW (t) = 1;
254
255 CALL_EXPR_TAILCALL (t) = gimple_call_tail_p (stmt);
256 CALL_EXPR_RETURN_SLOT_OPT (t) = gimple_call_return_slot_opt_p (stmt);
257 CALL_FROM_THUNK_P (t) = gimple_call_from_thunk_p (stmt);
258 CALL_CANNOT_INLINE_P (t) = gimple_call_cannot_inline_p (stmt);
259 CALL_EXPR_VA_ARG_PACK (t) = gimple_call_va_arg_pack_p (stmt);
260
261 /* If the call has a LHS then create a MODIFY_EXPR to hold it. */
262 {
263 tree lhs = gimple_call_lhs (stmt);
264
265 if (lhs)
266 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
267 }
268
269 /* Record the original call statement, as it may be used
270 to retrieve profile information during expansion. */
271
272 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
273 && DECL_BUILT_IN (fn))
274 {
275 ann = get_tree_common_ann (t);
276 ann->stmt = stmt;
277 }
278 }
279 break;
280
281 case GIMPLE_SWITCH:
282 {
283 tree label_vec;
284 size_t i;
285 tree elt = gimple_switch_label (stmt, 0);
286
287 label_vec = make_tree_vec (gimple_switch_num_labels (stmt));
288
289 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
290 {
291 for (i = 1; i < gimple_switch_num_labels (stmt); i++)
292 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, i);
293
294 /* The default case in a SWITCH_EXPR must be at the end of
295 the label vector. */
296 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, 0);
297 }
298 else
299 {
300 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
301 TREE_VEC_ELT (label_vec, i) = gimple_switch_label (stmt, i);
302 }
303
304 t = build3 (SWITCH_EXPR, void_type_node, gimple_switch_index (stmt),
305 NULL, label_vec);
306 }
307 break;
308
309 case GIMPLE_NOP:
310 case GIMPLE_PREDICT:
311 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
312 break;
313
314 case GIMPLE_RESX:
315 t = build_resx (gimple_resx_region (stmt));
316 break;
317
318 default:
319 if (errorcount == 0)
320 {
321 error ("Unrecognized GIMPLE statement during RTL expansion");
322 print_gimple_stmt (stderr, stmt, 4, 0);
323 gcc_unreachable ();
324 }
325 else
326 {
327 /* Ignore any bad gimple codes if we're going to die anyhow,
328 so we can at least set TREE_ASM_WRITTEN and have the rest
329 of compilation advance without sudden ICE death. */
330 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
331 break;
332 }
333 }
334
335 /* If STMT is inside an exception region, record it in the generated
336 expression. */
337 rn = lookup_stmt_eh_region (stmt);
338 if (rn >= 0)
339 {
340 tree call = get_call_expr_in (t);
341
342 ann = get_tree_common_ann (t);
343 ann->rn = rn;
344
345 /* For a CALL_EXPR on the RHS of an assignment, calls.c looks up
346 the CALL_EXPR not the assignment statment for EH region number. */
347 if (call && call != t)
348 {
349 ann = get_tree_common_ann (call);
350 ann->rn = rn;
351 }
352 }
353
354 /* Set EXPR_LOCATION in all the embedded expressions. */
355 loc = gimple_location (stmt);
356 walk_tree (&t, set_expr_location_r, (void *) &loc, NULL);
357
358 TREE_BLOCK (t) = gimple_block (stmt);
359
360 return t;
361 }
362
363
364 /* Release back to GC memory allocated by gimple_to_tree. */
365
366 static void
367 release_stmt_tree (gimple stmt, tree stmt_tree)
368 {
369 tree_ann_common_t ann;
370
371 switch (gimple_code (stmt))
372 {
373 case GIMPLE_ASSIGN:
374 if (get_gimple_rhs_class (gimple_expr_code (stmt)) != GIMPLE_SINGLE_RHS)
375 ggc_free (TREE_OPERAND (stmt_tree, 1));
376 break;
377 case GIMPLE_COND:
378 ggc_free (COND_EXPR_COND (stmt_tree));
379 break;
380 case GIMPLE_RETURN:
381 if (TREE_OPERAND (stmt_tree, 0)
382 && TREE_CODE (TREE_OPERAND (stmt_tree, 0)) == MODIFY_EXPR)
383 ggc_free (TREE_OPERAND (stmt_tree, 0));
384 break;
385 case GIMPLE_CALL:
386 if (gimple_call_lhs (stmt))
387 {
388 ann = tree_common_ann (TREE_OPERAND (stmt_tree, 1));
389 if (ann)
390 ggc_free (ann);
391 ggc_free (TREE_OPERAND (stmt_tree, 1));
392 }
393 break;
394 default:
395 break;
396 }
397 ann = tree_common_ann (stmt_tree);
398 if (ann)
399 ggc_free (ann);
400 ggc_free (stmt_tree);
401 }
402
403
404 /* Verify that there is exactly single jump instruction since last and attach
405 REG_BR_PROB note specifying probability.
406 ??? We really ought to pass the probability down to RTL expanders and let it
407 re-distribute it when the conditional expands into multiple conditionals.
408 This is however difficult to do. */
409 void
410 add_reg_br_prob_note (rtx last, int probability)
411 {
412 if (profile_status == PROFILE_ABSENT)
413 return;
414 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
415 if (JUMP_P (last))
416 {
417 /* It is common to emit condjump-around-jump sequence when we don't know
418 how to reverse the conditional. Special case this. */
419 if (!any_condjump_p (last)
420 || !JUMP_P (NEXT_INSN (last))
421 || !simplejump_p (NEXT_INSN (last))
422 || !NEXT_INSN (NEXT_INSN (last))
423 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
424 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
425 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
426 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
427 goto failed;
428 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
429 add_reg_note (last, REG_BR_PROB,
430 GEN_INT (REG_BR_PROB_BASE - probability));
431 return;
432 }
433 if (!last || !JUMP_P (last) || !any_condjump_p (last))
434 goto failed;
435 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
436 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
437 return;
438 failed:
439 if (dump_file)
440 fprintf (dump_file, "Failed to add probability note\n");
441 }
442
443
444 #ifndef STACK_ALIGNMENT_NEEDED
445 #define STACK_ALIGNMENT_NEEDED 1
446 #endif
447
448 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
449
450 /* Associate declaration T with storage space X. If T is no
451 SSA name this is exactly SET_DECL_RTL, otherwise make the
452 partition of T associated with X. */
453 static inline void
454 set_rtl (tree t, rtx x)
455 {
456 if (TREE_CODE (t) == SSA_NAME)
457 {
458 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
459 if (x && !MEM_P (x))
460 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
461 /* For the benefit of debug information at -O0 (where vartracking
462 doesn't run) record the place also in the base DECL if it's
463 a normal variable (not a parameter). */
464 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
465 {
466 tree var = SSA_NAME_VAR (t);
467 /* If we don't yet have something recorded, just record it now. */
468 if (!DECL_RTL_SET_P (var))
469 SET_DECL_RTL (var, x);
470 /* If we have it set alrady to "multiple places" don't
471 change this. */
472 else if (DECL_RTL (var) == pc_rtx)
473 ;
474 /* If we have something recorded and it's not the same place
475 as we want to record now, we have multiple partitions for the
476 same base variable, with different places. We can't just
477 randomly chose one, hence we have to say that we don't know.
478 This only happens with optimization, and there var-tracking
479 will figure out the right thing. */
480 else if (DECL_RTL (var) != x)
481 SET_DECL_RTL (var, pc_rtx);
482 }
483 }
484 else
485 SET_DECL_RTL (t, x);
486 }
487
488 /* This structure holds data relevant to one variable that will be
489 placed in a stack slot. */
490 struct stack_var
491 {
492 /* The Variable. */
493 tree decl;
494
495 /* The offset of the variable. During partitioning, this is the
496 offset relative to the partition. After partitioning, this
497 is relative to the stack frame. */
498 HOST_WIDE_INT offset;
499
500 /* Initially, the size of the variable. Later, the size of the partition,
501 if this variable becomes it's partition's representative. */
502 HOST_WIDE_INT size;
503
504 /* The *byte* alignment required for this variable. Or as, with the
505 size, the alignment for this partition. */
506 unsigned int alignb;
507
508 /* The partition representative. */
509 size_t representative;
510
511 /* The next stack variable in the partition, or EOC. */
512 size_t next;
513 };
514
515 #define EOC ((size_t)-1)
516
517 /* We have an array of such objects while deciding allocation. */
518 static struct stack_var *stack_vars;
519 static size_t stack_vars_alloc;
520 static size_t stack_vars_num;
521
522 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
523 is non-decreasing. */
524 static size_t *stack_vars_sorted;
525
526 /* We have an interference graph between such objects. This graph
527 is lower triangular. */
528 static bool *stack_vars_conflict;
529 static size_t stack_vars_conflict_alloc;
530
531 /* The phase of the stack frame. This is the known misalignment of
532 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
533 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
534 static int frame_phase;
535
536 /* Used during expand_used_vars to remember if we saw any decls for
537 which we'd like to enable stack smashing protection. */
538 static bool has_protected_decls;
539
540 /* Used during expand_used_vars. Remember if we say a character buffer
541 smaller than our cutoff threshold. Used for -Wstack-protector. */
542 static bool has_short_buffer;
543
544 /* Discover the byte alignment to use for DECL. Ignore alignment
545 we can't do with expected alignment of the stack boundary. */
546
547 static unsigned int
548 get_decl_align_unit (tree decl)
549 {
550 unsigned int align;
551
552 align = LOCAL_DECL_ALIGNMENT (decl);
553
554 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
555 align = MAX_SUPPORTED_STACK_ALIGNMENT;
556
557 if (SUPPORTS_STACK_ALIGNMENT)
558 {
559 if (crtl->stack_alignment_estimated < align)
560 {
561 gcc_assert(!crtl->stack_realign_processed);
562 crtl->stack_alignment_estimated = align;
563 }
564 }
565
566 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
567 So here we only make sure stack_alignment_needed >= align. */
568 if (crtl->stack_alignment_needed < align)
569 crtl->stack_alignment_needed = align;
570 if (crtl->max_used_stack_slot_alignment < align)
571 crtl->max_used_stack_slot_alignment = align;
572
573 return align / BITS_PER_UNIT;
574 }
575
576 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
577 Return the frame offset. */
578
579 static HOST_WIDE_INT
580 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
581 {
582 HOST_WIDE_INT offset, new_frame_offset;
583
584 new_frame_offset = frame_offset;
585 if (FRAME_GROWS_DOWNWARD)
586 {
587 new_frame_offset -= size + frame_phase;
588 new_frame_offset &= -align;
589 new_frame_offset += frame_phase;
590 offset = new_frame_offset;
591 }
592 else
593 {
594 new_frame_offset -= frame_phase;
595 new_frame_offset += align - 1;
596 new_frame_offset &= -align;
597 new_frame_offset += frame_phase;
598 offset = new_frame_offset;
599 new_frame_offset += size;
600 }
601 frame_offset = new_frame_offset;
602
603 if (frame_offset_overflow (frame_offset, cfun->decl))
604 frame_offset = offset = 0;
605
606 return offset;
607 }
608
609 /* Accumulate DECL into STACK_VARS. */
610
611 static void
612 add_stack_var (tree decl)
613 {
614 if (stack_vars_num >= stack_vars_alloc)
615 {
616 if (stack_vars_alloc)
617 stack_vars_alloc = stack_vars_alloc * 3 / 2;
618 else
619 stack_vars_alloc = 32;
620 stack_vars
621 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
622 }
623 stack_vars[stack_vars_num].decl = decl;
624 stack_vars[stack_vars_num].offset = 0;
625 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
626 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
627
628 /* All variables are initially in their own partition. */
629 stack_vars[stack_vars_num].representative = stack_vars_num;
630 stack_vars[stack_vars_num].next = EOC;
631
632 /* Ensure that this decl doesn't get put onto the list twice. */
633 set_rtl (decl, pc_rtx);
634
635 stack_vars_num++;
636 }
637
638 /* Compute the linear index of a lower-triangular coordinate (I, J). */
639
640 static size_t
641 triangular_index (size_t i, size_t j)
642 {
643 if (i < j)
644 {
645 size_t t;
646 t = i, i = j, j = t;
647 }
648 return (i * (i + 1)) / 2 + j;
649 }
650
651 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
652
653 static void
654 resize_stack_vars_conflict (size_t n)
655 {
656 size_t size = triangular_index (n-1, n-1) + 1;
657
658 if (size <= stack_vars_conflict_alloc)
659 return;
660
661 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
662 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
663 (size - stack_vars_conflict_alloc) * sizeof (bool));
664 stack_vars_conflict_alloc = size;
665 }
666
667 /* Make the decls associated with luid's X and Y conflict. */
668
669 static void
670 add_stack_var_conflict (size_t x, size_t y)
671 {
672 size_t index = triangular_index (x, y);
673 gcc_assert (index < stack_vars_conflict_alloc);
674 stack_vars_conflict[index] = true;
675 }
676
677 /* Check whether the decls associated with luid's X and Y conflict. */
678
679 static bool
680 stack_var_conflict_p (size_t x, size_t y)
681 {
682 size_t index = triangular_index (x, y);
683 gcc_assert (index < stack_vars_conflict_alloc);
684 return stack_vars_conflict[index];
685 }
686
687 /* Returns true if TYPE is or contains a union type. */
688
689 static bool
690 aggregate_contains_union_type (tree type)
691 {
692 tree field;
693
694 if (TREE_CODE (type) == UNION_TYPE
695 || TREE_CODE (type) == QUAL_UNION_TYPE)
696 return true;
697 if (TREE_CODE (type) == ARRAY_TYPE)
698 return aggregate_contains_union_type (TREE_TYPE (type));
699 if (TREE_CODE (type) != RECORD_TYPE)
700 return false;
701
702 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
703 if (TREE_CODE (field) == FIELD_DECL)
704 if (aggregate_contains_union_type (TREE_TYPE (field)))
705 return true;
706
707 return false;
708 }
709
710 /* A subroutine of expand_used_vars. If two variables X and Y have alias
711 sets that do not conflict, then do add a conflict for these variables
712 in the interference graph. We also need to make sure to add conflicts
713 for union containing structures. Else RTL alias analysis comes along
714 and due to type based aliasing rules decides that for two overlapping
715 union temporaries { short s; int i; } accesses to the same mem through
716 different types may not alias and happily reorders stores across
717 life-time boundaries of the temporaries (See PR25654).
718 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
719
720 static void
721 add_alias_set_conflicts (void)
722 {
723 size_t i, j, n = stack_vars_num;
724
725 for (i = 0; i < n; ++i)
726 {
727 tree type_i = TREE_TYPE (stack_vars[i].decl);
728 bool aggr_i = AGGREGATE_TYPE_P (type_i);
729 bool contains_union;
730
731 contains_union = aggregate_contains_union_type (type_i);
732 for (j = 0; j < i; ++j)
733 {
734 tree type_j = TREE_TYPE (stack_vars[j].decl);
735 bool aggr_j = AGGREGATE_TYPE_P (type_j);
736 if (aggr_i != aggr_j
737 /* Either the objects conflict by means of type based
738 aliasing rules, or we need to add a conflict. */
739 || !objects_must_conflict_p (type_i, type_j)
740 /* In case the types do not conflict ensure that access
741 to elements will conflict. In case of unions we have
742 to be careful as type based aliasing rules may say
743 access to the same memory does not conflict. So play
744 safe and add a conflict in this case. */
745 || contains_union)
746 add_stack_var_conflict (i, j);
747 }
748 }
749 }
750
751 /* A subroutine of partition_stack_vars. A comparison function for qsort,
752 sorting an array of indices by the size and type of the object. */
753
754 static int
755 stack_var_size_cmp (const void *a, const void *b)
756 {
757 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
758 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
759 tree decla, declb;
760 unsigned int uida, uidb;
761
762 if (sa < sb)
763 return -1;
764 if (sa > sb)
765 return 1;
766 decla = stack_vars[*(const size_t *)a].decl;
767 declb = stack_vars[*(const size_t *)b].decl;
768 /* For stack variables of the same size use and id of the decls
769 to make the sort stable. Two SSA names are compared by their
770 version, SSA names come before non-SSA names, and two normal
771 decls are compared by their DECL_UID. */
772 if (TREE_CODE (decla) == SSA_NAME)
773 {
774 if (TREE_CODE (declb) == SSA_NAME)
775 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
776 else
777 return -1;
778 }
779 else if (TREE_CODE (declb) == SSA_NAME)
780 return 1;
781 else
782 uida = DECL_UID (decla), uidb = DECL_UID (declb);
783 if (uida < uidb)
784 return -1;
785 if (uida > uidb)
786 return 1;
787 return 0;
788 }
789
790
791 /* If the points-to solution *PI points to variables that are in a partition
792 together with other variables add all partition members to the pointed-to
793 variables bitmap. */
794
795 static void
796 add_partitioned_vars_to_ptset (struct pt_solution *pt,
797 struct pointer_map_t *decls_to_partitions,
798 struct pointer_set_t *visited, bitmap temp)
799 {
800 bitmap_iterator bi;
801 unsigned i;
802 bitmap *part;
803
804 if (pt->anything
805 || pt->vars == NULL
806 /* The pointed-to vars bitmap is shared, it is enough to
807 visit it once. */
808 || pointer_set_insert(visited, pt->vars))
809 return;
810
811 bitmap_clear (temp);
812
813 /* By using a temporary bitmap to store all members of the partitions
814 we have to add we make sure to visit each of the partitions only
815 once. */
816 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
817 if ((!temp
818 || !bitmap_bit_p (temp, i))
819 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
820 (void *)(size_t) i)))
821 bitmap_ior_into (temp, *part);
822 if (!bitmap_empty_p (temp))
823 bitmap_ior_into (pt->vars, temp);
824 }
825
826 /* Update points-to sets based on partition info, so we can use them on RTL.
827 The bitmaps representing stack partitions will be saved until expand,
828 where partitioned decls used as bases in memory expressions will be
829 rewritten. */
830
831 static void
832 update_alias_info_with_stack_vars (void)
833 {
834 struct pointer_map_t *decls_to_partitions = NULL;
835 size_t i, j;
836 tree var = NULL_TREE;
837
838 for (i = 0; i < stack_vars_num; i++)
839 {
840 bitmap part = NULL;
841 tree name;
842 struct ptr_info_def *pi;
843
844 /* Not interested in partitions with single variable. */
845 if (stack_vars[i].representative != i
846 || stack_vars[i].next == EOC)
847 continue;
848
849 if (!decls_to_partitions)
850 {
851 decls_to_partitions = pointer_map_create ();
852 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
853 }
854
855 /* Create an SSA_NAME that points to the partition for use
856 as base during alias-oracle queries on RTL for bases that
857 have been partitioned. */
858 if (var == NULL_TREE)
859 var = create_tmp_var (ptr_type_node, NULL);
860 name = make_ssa_name (var, NULL);
861
862 /* Create bitmaps representing partitions. They will be used for
863 points-to sets later, so use GGC alloc. */
864 part = BITMAP_GGC_ALLOC ();
865 for (j = i; j != EOC; j = stack_vars[j].next)
866 {
867 tree decl = stack_vars[j].decl;
868 unsigned int uid = DECL_UID (decl);
869 /* We should never end up partitioning SSA names (though they
870 may end up on the stack). Neither should we allocate stack
871 space to something that is unused and thus unreferenced. */
872 gcc_assert (DECL_P (decl)
873 && referenced_var_lookup (uid));
874 bitmap_set_bit (part, uid);
875 *((bitmap *) pointer_map_insert (decls_to_partitions,
876 (void *)(size_t) uid)) = part;
877 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
878 decl)) = name;
879 }
880
881 /* Make the SSA name point to all partition members. */
882 pi = get_ptr_info (name);
883 pt_solution_set (&pi->pt, part);
884 }
885
886 /* Make all points-to sets that contain one member of a partition
887 contain all members of the partition. */
888 if (decls_to_partitions)
889 {
890 unsigned i;
891 struct pointer_set_t *visited = pointer_set_create ();
892 bitmap temp = BITMAP_ALLOC (NULL);
893
894 for (i = 1; i < num_ssa_names; i++)
895 {
896 tree name = ssa_name (i);
897 struct ptr_info_def *pi;
898
899 if (name
900 && POINTER_TYPE_P (TREE_TYPE (name))
901 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
902 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
903 visited, temp);
904 }
905
906 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
907 decls_to_partitions, visited, temp);
908 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
909 decls_to_partitions, visited, temp);
910
911 pointer_set_destroy (visited);
912 pointer_map_destroy (decls_to_partitions);
913 BITMAP_FREE (temp);
914 }
915 }
916
917 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
918 partitioning algorithm. Partitions A and B are known to be non-conflicting.
919 Merge them into a single partition A.
920
921 At the same time, add OFFSET to all variables in partition B. At the end
922 of the partitioning process we've have a nice block easy to lay out within
923 the stack frame. */
924
925 static void
926 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
927 {
928 size_t i, last;
929
930 /* Update each element of partition B with the given offset,
931 and merge them into partition A. */
932 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
933 {
934 stack_vars[i].offset += offset;
935 stack_vars[i].representative = a;
936 }
937 stack_vars[last].next = stack_vars[a].next;
938 stack_vars[a].next = b;
939
940 /* Update the required alignment of partition A to account for B. */
941 if (stack_vars[a].alignb < stack_vars[b].alignb)
942 stack_vars[a].alignb = stack_vars[b].alignb;
943
944 /* Update the interference graph and merge the conflicts. */
945 for (last = stack_vars_num, i = 0; i < last; ++i)
946 if (stack_var_conflict_p (b, i))
947 add_stack_var_conflict (a, i);
948 }
949
950 /* A subroutine of expand_used_vars. Binpack the variables into
951 partitions constrained by the interference graph. The overall
952 algorithm used is as follows:
953
954 Sort the objects by size.
955 For each object A {
956 S = size(A)
957 O = 0
958 loop {
959 Look for the largest non-conflicting object B with size <= S.
960 UNION (A, B)
961 offset(B) = O
962 O += size(B)
963 S -= size(B)
964 }
965 }
966 */
967
968 static void
969 partition_stack_vars (void)
970 {
971 size_t si, sj, n = stack_vars_num;
972
973 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
974 for (si = 0; si < n; ++si)
975 stack_vars_sorted[si] = si;
976
977 if (n == 1)
978 return;
979
980 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
981
982 /* Special case: detect when all variables conflict, and thus we can't
983 do anything during the partitioning loop. It isn't uncommon (with
984 C code at least) to declare all variables at the top of the function,
985 and if we're not inlining, then all variables will be in the same scope.
986 Take advantage of very fast libc routines for this scan. */
987 gcc_assert (sizeof(bool) == sizeof(char));
988 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
989 return;
990
991 for (si = 0; si < n; ++si)
992 {
993 size_t i = stack_vars_sorted[si];
994 HOST_WIDE_INT isize = stack_vars[i].size;
995 HOST_WIDE_INT offset = 0;
996
997 for (sj = si; sj-- > 0; )
998 {
999 size_t j = stack_vars_sorted[sj];
1000 HOST_WIDE_INT jsize = stack_vars[j].size;
1001 unsigned int jalign = stack_vars[j].alignb;
1002
1003 /* Ignore objects that aren't partition representatives. */
1004 if (stack_vars[j].representative != j)
1005 continue;
1006
1007 /* Ignore objects too large for the remaining space. */
1008 if (isize < jsize)
1009 continue;
1010
1011 /* Ignore conflicting objects. */
1012 if (stack_var_conflict_p (i, j))
1013 continue;
1014
1015 /* Refine the remaining space check to include alignment. */
1016 if (offset & (jalign - 1))
1017 {
1018 HOST_WIDE_INT toff = offset;
1019 toff += jalign - 1;
1020 toff &= -(HOST_WIDE_INT)jalign;
1021 if (isize - (toff - offset) < jsize)
1022 continue;
1023
1024 isize -= toff - offset;
1025 offset = toff;
1026 }
1027
1028 /* UNION the objects, placing J at OFFSET. */
1029 union_stack_vars (i, j, offset);
1030
1031 isize -= jsize;
1032 if (isize == 0)
1033 break;
1034 }
1035 }
1036
1037 if (optimize)
1038 update_alias_info_with_stack_vars ();
1039 }
1040
1041 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
1042
1043 static void
1044 dump_stack_var_partition (void)
1045 {
1046 size_t si, i, j, n = stack_vars_num;
1047
1048 for (si = 0; si < n; ++si)
1049 {
1050 i = stack_vars_sorted[si];
1051
1052 /* Skip variables that aren't partition representatives, for now. */
1053 if (stack_vars[i].representative != i)
1054 continue;
1055
1056 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
1057 " align %u\n", (unsigned long) i, stack_vars[i].size,
1058 stack_vars[i].alignb);
1059
1060 for (j = i; j != EOC; j = stack_vars[j].next)
1061 {
1062 fputc ('\t', dump_file);
1063 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1064 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
1065 stack_vars[j].offset);
1066 }
1067 }
1068 }
1069
1070 /* Assign rtl to DECL at frame offset OFFSET. */
1071
1072 static void
1073 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
1074 {
1075 /* Alignment is unsigned. */
1076 unsigned HOST_WIDE_INT align;
1077 rtx x;
1078
1079 /* If this fails, we've overflowed the stack frame. Error nicely? */
1080 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
1081
1082 x = plus_constant (virtual_stack_vars_rtx, offset);
1083 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1084
1085 if (TREE_CODE (decl) != SSA_NAME)
1086 {
1087 /* Set alignment we actually gave this decl if it isn't an SSA name.
1088 If it is we generate stack slots only accidentally so it isn't as
1089 important, we'll simply use the alignment that is already set. */
1090 offset -= frame_phase;
1091 align = offset & -offset;
1092 align *= BITS_PER_UNIT;
1093 if (align == 0)
1094 align = STACK_BOUNDARY;
1095 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1096 align = MAX_SUPPORTED_STACK_ALIGNMENT;
1097
1098 DECL_ALIGN (decl) = align;
1099 DECL_USER_ALIGN (decl) = 0;
1100 }
1101
1102 set_mem_attributes (x, SSAVAR (decl), true);
1103 set_rtl (decl, x);
1104 }
1105
1106 /* A subroutine of expand_used_vars. Give each partition representative
1107 a unique location within the stack frame. Update each partition member
1108 with that location. */
1109
1110 static void
1111 expand_stack_vars (bool (*pred) (tree))
1112 {
1113 size_t si, i, j, n = stack_vars_num;
1114
1115 for (si = 0; si < n; ++si)
1116 {
1117 HOST_WIDE_INT offset;
1118
1119 i = stack_vars_sorted[si];
1120
1121 /* Skip variables that aren't partition representatives, for now. */
1122 if (stack_vars[i].representative != i)
1123 continue;
1124
1125 /* Skip variables that have already had rtl assigned. See also
1126 add_stack_var where we perpetrate this pc_rtx hack. */
1127 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
1128 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
1129 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
1130 continue;
1131
1132 /* Check the predicate to see whether this variable should be
1133 allocated in this pass. */
1134 if (pred && !pred (stack_vars[i].decl))
1135 continue;
1136
1137 offset = alloc_stack_frame_space (stack_vars[i].size,
1138 stack_vars[i].alignb);
1139
1140 /* Create rtl for each variable based on their location within the
1141 partition. */
1142 for (j = i; j != EOC; j = stack_vars[j].next)
1143 {
1144 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
1145 expand_one_stack_var_at (stack_vars[j].decl,
1146 stack_vars[j].offset + offset);
1147 }
1148 }
1149 }
1150
1151 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1152 static HOST_WIDE_INT
1153 account_stack_vars (void)
1154 {
1155 size_t si, j, i, n = stack_vars_num;
1156 HOST_WIDE_INT size = 0;
1157
1158 for (si = 0; si < n; ++si)
1159 {
1160 i = stack_vars_sorted[si];
1161
1162 /* Skip variables that aren't partition representatives, for now. */
1163 if (stack_vars[i].representative != i)
1164 continue;
1165
1166 size += stack_vars[i].size;
1167 for (j = i; j != EOC; j = stack_vars[j].next)
1168 set_rtl (stack_vars[j].decl, NULL);
1169 }
1170 return size;
1171 }
1172
1173 /* A subroutine of expand_one_var. Called to immediately assign rtl
1174 to a variable to be allocated in the stack frame. */
1175
1176 static void
1177 expand_one_stack_var (tree var)
1178 {
1179 HOST_WIDE_INT size, offset, align;
1180
1181 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1182 align = get_decl_align_unit (SSAVAR (var));
1183 offset = alloc_stack_frame_space (size, align);
1184
1185 expand_one_stack_var_at (var, offset);
1186 }
1187
1188 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1189 that will reside in a hard register. */
1190
1191 static void
1192 expand_one_hard_reg_var (tree var)
1193 {
1194 rest_of_decl_compilation (var, 0, 0);
1195 }
1196
1197 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1198 that will reside in a pseudo register. */
1199
1200 static void
1201 expand_one_register_var (tree var)
1202 {
1203 tree decl = SSAVAR (var);
1204 tree type = TREE_TYPE (decl);
1205 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1206 rtx x = gen_reg_rtx (reg_mode);
1207
1208 set_rtl (var, x);
1209
1210 /* Note if the object is a user variable. */
1211 if (!DECL_ARTIFICIAL (decl))
1212 mark_user_reg (x);
1213
1214 if (POINTER_TYPE_P (type))
1215 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
1216 }
1217
1218 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1219 has some associated error, e.g. its type is error-mark. We just need
1220 to pick something that won't crash the rest of the compiler. */
1221
1222 static void
1223 expand_one_error_var (tree var)
1224 {
1225 enum machine_mode mode = DECL_MODE (var);
1226 rtx x;
1227
1228 if (mode == BLKmode)
1229 x = gen_rtx_MEM (BLKmode, const0_rtx);
1230 else if (mode == VOIDmode)
1231 x = const0_rtx;
1232 else
1233 x = gen_reg_rtx (mode);
1234
1235 SET_DECL_RTL (var, x);
1236 }
1237
1238 /* A subroutine of expand_one_var. VAR is a variable that will be
1239 allocated to the local stack frame. Return true if we wish to
1240 add VAR to STACK_VARS so that it will be coalesced with other
1241 variables. Return false to allocate VAR immediately.
1242
1243 This function is used to reduce the number of variables considered
1244 for coalescing, which reduces the size of the quadratic problem. */
1245
1246 static bool
1247 defer_stack_allocation (tree var, bool toplevel)
1248 {
1249 /* If stack protection is enabled, *all* stack variables must be deferred,
1250 so that we can re-order the strings to the top of the frame. */
1251 if (flag_stack_protect)
1252 return true;
1253
1254 /* Variables in the outermost scope automatically conflict with
1255 every other variable. The only reason to want to defer them
1256 at all is that, after sorting, we can more efficiently pack
1257 small variables in the stack frame. Continue to defer at -O2. */
1258 if (toplevel && optimize < 2)
1259 return false;
1260
1261 /* Without optimization, *most* variables are allocated from the
1262 stack, which makes the quadratic problem large exactly when we
1263 want compilation to proceed as quickly as possible. On the
1264 other hand, we don't want the function's stack frame size to
1265 get completely out of hand. So we avoid adding scalars and
1266 "small" aggregates to the list at all. */
1267 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1268 return false;
1269
1270 return true;
1271 }
1272
1273 /* A subroutine of expand_used_vars. Expand one variable according to
1274 its flavor. Variables to be placed on the stack are not actually
1275 expanded yet, merely recorded.
1276 When REALLY_EXPAND is false, only add stack values to be allocated.
1277 Return stack usage this variable is supposed to take.
1278 */
1279
1280 static HOST_WIDE_INT
1281 expand_one_var (tree var, bool toplevel, bool really_expand)
1282 {
1283 tree origvar = var;
1284 var = SSAVAR (var);
1285
1286 if (SUPPORTS_STACK_ALIGNMENT
1287 && TREE_TYPE (var) != error_mark_node
1288 && TREE_CODE (var) == VAR_DECL)
1289 {
1290 unsigned int align;
1291
1292 /* Because we don't know if VAR will be in register or on stack,
1293 we conservatively assume it will be on stack even if VAR is
1294 eventually put into register after RA pass. For non-automatic
1295 variables, which won't be on stack, we collect alignment of
1296 type and ignore user specified alignment. */
1297 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1298 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1299 TYPE_MODE (TREE_TYPE (var)),
1300 TYPE_ALIGN (TREE_TYPE (var)));
1301 else
1302 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1303
1304 if (crtl->stack_alignment_estimated < align)
1305 {
1306 /* stack_alignment_estimated shouldn't change after stack
1307 realign decision made */
1308 gcc_assert(!crtl->stack_realign_processed);
1309 crtl->stack_alignment_estimated = align;
1310 }
1311 }
1312
1313 if (TREE_CODE (origvar) == SSA_NAME)
1314 {
1315 gcc_assert (TREE_CODE (var) != VAR_DECL
1316 || (!DECL_EXTERNAL (var)
1317 && !DECL_HAS_VALUE_EXPR_P (var)
1318 && !TREE_STATIC (var)
1319 && TREE_TYPE (var) != error_mark_node
1320 && !DECL_HARD_REGISTER (var)
1321 && really_expand));
1322 }
1323 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1324 ;
1325 else if (DECL_EXTERNAL (var))
1326 ;
1327 else if (DECL_HAS_VALUE_EXPR_P (var))
1328 ;
1329 else if (TREE_STATIC (var))
1330 ;
1331 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1332 ;
1333 else if (TREE_TYPE (var) == error_mark_node)
1334 {
1335 if (really_expand)
1336 expand_one_error_var (var);
1337 }
1338 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1339 {
1340 if (really_expand)
1341 expand_one_hard_reg_var (var);
1342 }
1343 else if (use_register_for_decl (var))
1344 {
1345 if (really_expand)
1346 expand_one_register_var (origvar);
1347 }
1348 else if (defer_stack_allocation (var, toplevel))
1349 add_stack_var (origvar);
1350 else
1351 {
1352 if (really_expand)
1353 expand_one_stack_var (origvar);
1354 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1355 }
1356 return 0;
1357 }
1358
1359 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1360 expanding variables. Those variables that can be put into registers
1361 are allocated pseudos; those that can't are put on the stack.
1362
1363 TOPLEVEL is true if this is the outermost BLOCK. */
1364
1365 static void
1366 expand_used_vars_for_block (tree block, bool toplevel)
1367 {
1368 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1369 tree t;
1370
1371 old_sv_num = toplevel ? 0 : stack_vars_num;
1372
1373 /* Expand all variables at this level. */
1374 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1375 if (TREE_USED (t))
1376 expand_one_var (t, toplevel, true);
1377
1378 this_sv_num = stack_vars_num;
1379
1380 /* Expand all variables at containing levels. */
1381 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1382 expand_used_vars_for_block (t, false);
1383
1384 /* Since we do not track exact variable lifetimes (which is not even
1385 possible for variables whose address escapes), we mirror the block
1386 tree in the interference graph. Here we cause all variables at this
1387 level, and all sublevels, to conflict. Do make certain that a
1388 variable conflicts with itself. */
1389 if (old_sv_num < this_sv_num)
1390 {
1391 new_sv_num = stack_vars_num;
1392 resize_stack_vars_conflict (new_sv_num);
1393
1394 for (i = old_sv_num; i < new_sv_num; ++i)
1395 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1396 add_stack_var_conflict (i, j);
1397 }
1398 }
1399
1400 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1401 and clear TREE_USED on all local variables. */
1402
1403 static void
1404 clear_tree_used (tree block)
1405 {
1406 tree t;
1407
1408 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1409 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1410 TREE_USED (t) = 0;
1411
1412 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1413 clear_tree_used (t);
1414 }
1415
1416 /* Examine TYPE and determine a bit mask of the following features. */
1417
1418 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1419 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1420 #define SPCT_HAS_ARRAY 4
1421 #define SPCT_HAS_AGGREGATE 8
1422
1423 static unsigned int
1424 stack_protect_classify_type (tree type)
1425 {
1426 unsigned int ret = 0;
1427 tree t;
1428
1429 switch (TREE_CODE (type))
1430 {
1431 case ARRAY_TYPE:
1432 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1433 if (t == char_type_node
1434 || t == signed_char_type_node
1435 || t == unsigned_char_type_node)
1436 {
1437 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1438 unsigned HOST_WIDE_INT len;
1439
1440 if (!TYPE_SIZE_UNIT (type)
1441 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1442 len = max;
1443 else
1444 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1445
1446 if (len < max)
1447 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1448 else
1449 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1450 }
1451 else
1452 ret = SPCT_HAS_ARRAY;
1453 break;
1454
1455 case UNION_TYPE:
1456 case QUAL_UNION_TYPE:
1457 case RECORD_TYPE:
1458 ret = SPCT_HAS_AGGREGATE;
1459 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1460 if (TREE_CODE (t) == FIELD_DECL)
1461 ret |= stack_protect_classify_type (TREE_TYPE (t));
1462 break;
1463
1464 default:
1465 break;
1466 }
1467
1468 return ret;
1469 }
1470
1471 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1472 part of the local stack frame. Remember if we ever return nonzero for
1473 any variable in this function. The return value is the phase number in
1474 which the variable should be allocated. */
1475
1476 static int
1477 stack_protect_decl_phase (tree decl)
1478 {
1479 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1480 int ret = 0;
1481
1482 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1483 has_short_buffer = true;
1484
1485 if (flag_stack_protect == 2)
1486 {
1487 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1488 && !(bits & SPCT_HAS_AGGREGATE))
1489 ret = 1;
1490 else if (bits & SPCT_HAS_ARRAY)
1491 ret = 2;
1492 }
1493 else
1494 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1495
1496 if (ret)
1497 has_protected_decls = true;
1498
1499 return ret;
1500 }
1501
1502 /* Two helper routines that check for phase 1 and phase 2. These are used
1503 as callbacks for expand_stack_vars. */
1504
1505 static bool
1506 stack_protect_decl_phase_1 (tree decl)
1507 {
1508 return stack_protect_decl_phase (decl) == 1;
1509 }
1510
1511 static bool
1512 stack_protect_decl_phase_2 (tree decl)
1513 {
1514 return stack_protect_decl_phase (decl) == 2;
1515 }
1516
1517 /* Ensure that variables in different stack protection phases conflict
1518 so that they are not merged and share the same stack slot. */
1519
1520 static void
1521 add_stack_protection_conflicts (void)
1522 {
1523 size_t i, j, n = stack_vars_num;
1524 unsigned char *phase;
1525
1526 phase = XNEWVEC (unsigned char, n);
1527 for (i = 0; i < n; ++i)
1528 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1529
1530 for (i = 0; i < n; ++i)
1531 {
1532 unsigned char ph_i = phase[i];
1533 for (j = 0; j < i; ++j)
1534 if (ph_i != phase[j])
1535 add_stack_var_conflict (i, j);
1536 }
1537
1538 XDELETEVEC (phase);
1539 }
1540
1541 /* Create a decl for the guard at the top of the stack frame. */
1542
1543 static void
1544 create_stack_guard (void)
1545 {
1546 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1547 VAR_DECL, NULL, ptr_type_node);
1548 TREE_THIS_VOLATILE (guard) = 1;
1549 TREE_USED (guard) = 1;
1550 expand_one_stack_var (guard);
1551 crtl->stack_protect_guard = guard;
1552 }
1553
1554 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1555 expanding variables. Those variables that can be put into registers
1556 are allocated pseudos; those that can't are put on the stack.
1557
1558 TOPLEVEL is true if this is the outermost BLOCK. */
1559
1560 static HOST_WIDE_INT
1561 account_used_vars_for_block (tree block, bool toplevel)
1562 {
1563 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1564 tree t;
1565 HOST_WIDE_INT size = 0;
1566
1567 old_sv_num = toplevel ? 0 : stack_vars_num;
1568
1569 /* Expand all variables at this level. */
1570 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1571 if (TREE_USED (t))
1572 size += expand_one_var (t, toplevel, false);
1573
1574 this_sv_num = stack_vars_num;
1575
1576 /* Expand all variables at containing levels. */
1577 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1578 size += account_used_vars_for_block (t, false);
1579
1580 /* Since we do not track exact variable lifetimes (which is not even
1581 possible for variables whose address escapes), we mirror the block
1582 tree in the interference graph. Here we cause all variables at this
1583 level, and all sublevels, to conflict. Do make certain that a
1584 variable conflicts with itself. */
1585 if (old_sv_num < this_sv_num)
1586 {
1587 new_sv_num = stack_vars_num;
1588 resize_stack_vars_conflict (new_sv_num);
1589
1590 for (i = old_sv_num; i < new_sv_num; ++i)
1591 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1592 add_stack_var_conflict (i, j);
1593 }
1594 return size;
1595 }
1596
1597 /* Prepare for expanding variables. */
1598 static void
1599 init_vars_expansion (void)
1600 {
1601 tree t;
1602 /* Set TREE_USED on all variables in the local_decls. */
1603 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1604 TREE_USED (TREE_VALUE (t)) = 1;
1605
1606 /* Clear TREE_USED on all variables associated with a block scope. */
1607 clear_tree_used (DECL_INITIAL (current_function_decl));
1608
1609 /* Initialize local stack smashing state. */
1610 has_protected_decls = false;
1611 has_short_buffer = false;
1612 }
1613
1614 /* Free up stack variable graph data. */
1615 static void
1616 fini_vars_expansion (void)
1617 {
1618 XDELETEVEC (stack_vars);
1619 XDELETEVEC (stack_vars_sorted);
1620 XDELETEVEC (stack_vars_conflict);
1621 stack_vars = NULL;
1622 stack_vars_alloc = stack_vars_num = 0;
1623 stack_vars_conflict = NULL;
1624 stack_vars_conflict_alloc = 0;
1625 }
1626
1627 /* Make a fair guess for the size of the stack frame of the current
1628 function. This doesn't have to be exact, the result is only used
1629 in the inline heuristics. So we don't want to run the full stack
1630 var packing algorithm (which is quadratic in the number of stack
1631 vars). Instead, we calculate the total size of all stack vars.
1632 This turns out to be a pretty fair estimate -- packing of stack
1633 vars doesn't happen very often. */
1634
1635 HOST_WIDE_INT
1636 estimated_stack_frame_size (void)
1637 {
1638 HOST_WIDE_INT size = 0;
1639 size_t i;
1640 tree t, outer_block = DECL_INITIAL (current_function_decl);
1641
1642 init_vars_expansion ();
1643
1644 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1645 {
1646 tree var = TREE_VALUE (t);
1647
1648 if (TREE_USED (var))
1649 size += expand_one_var (var, true, false);
1650 TREE_USED (var) = 1;
1651 }
1652 size += account_used_vars_for_block (outer_block, true);
1653
1654 if (stack_vars_num > 0)
1655 {
1656 /* Fake sorting the stack vars for account_stack_vars (). */
1657 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1658 for (i = 0; i < stack_vars_num; ++i)
1659 stack_vars_sorted[i] = i;
1660 size += account_stack_vars ();
1661 fini_vars_expansion ();
1662 }
1663
1664 return size;
1665 }
1666
1667 /* Expand all variables used in the function. */
1668
1669 static void
1670 expand_used_vars (void)
1671 {
1672 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1673 unsigned i;
1674
1675 /* Compute the phase of the stack frame for this function. */
1676 {
1677 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1678 int off = STARTING_FRAME_OFFSET % align;
1679 frame_phase = off ? align - off : 0;
1680 }
1681
1682 init_vars_expansion ();
1683
1684 for (i = 0; i < SA.map->num_partitions; i++)
1685 {
1686 tree var = partition_to_var (SA.map, i);
1687
1688 gcc_assert (is_gimple_reg (var));
1689 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1690 expand_one_var (var, true, true);
1691 else
1692 {
1693 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1694 contain the default def (representing the parm or result itself)
1695 we don't do anything here. But those which don't contain the
1696 default def (representing a temporary based on the parm/result)
1697 we need to allocate space just like for normal VAR_DECLs. */
1698 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1699 {
1700 expand_one_var (var, true, true);
1701 gcc_assert (SA.partition_to_pseudo[i]);
1702 }
1703 }
1704 }
1705
1706 /* At this point all variables on the local_decls with TREE_USED
1707 set are not associated with any block scope. Lay them out. */
1708 t = cfun->local_decls;
1709 cfun->local_decls = NULL_TREE;
1710 for (; t; t = next)
1711 {
1712 tree var = TREE_VALUE (t);
1713 bool expand_now = false;
1714
1715 next = TREE_CHAIN (t);
1716
1717 /* Expanded above already. */
1718 if (is_gimple_reg (var))
1719 {
1720 TREE_USED (var) = 0;
1721 ggc_free (t);
1722 continue;
1723 }
1724 /* We didn't set a block for static or extern because it's hard
1725 to tell the difference between a global variable (re)declared
1726 in a local scope, and one that's really declared there to
1727 begin with. And it doesn't really matter much, since we're
1728 not giving them stack space. Expand them now. */
1729 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1730 expand_now = true;
1731
1732 /* If the variable is not associated with any block, then it
1733 was created by the optimizers, and could be live anywhere
1734 in the function. */
1735 else if (TREE_USED (var))
1736 expand_now = true;
1737
1738 /* Finally, mark all variables on the list as used. We'll use
1739 this in a moment when we expand those associated with scopes. */
1740 TREE_USED (var) = 1;
1741
1742 if (expand_now)
1743 {
1744 expand_one_var (var, true, true);
1745 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1746 {
1747 rtx rtl = DECL_RTL_IF_SET (var);
1748
1749 /* Keep artificial non-ignored vars in cfun->local_decls
1750 chain until instantiate_decls. */
1751 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1752 {
1753 TREE_CHAIN (t) = cfun->local_decls;
1754 cfun->local_decls = t;
1755 continue;
1756 }
1757 }
1758 }
1759
1760 ggc_free (t);
1761 }
1762
1763 /* At this point, all variables within the block tree with TREE_USED
1764 set are actually used by the optimized function. Lay them out. */
1765 expand_used_vars_for_block (outer_block, true);
1766
1767 if (stack_vars_num > 0)
1768 {
1769 /* Due to the way alias sets work, no variables with non-conflicting
1770 alias sets may be assigned the same address. Add conflicts to
1771 reflect this. */
1772 add_alias_set_conflicts ();
1773
1774 /* If stack protection is enabled, we don't share space between
1775 vulnerable data and non-vulnerable data. */
1776 if (flag_stack_protect)
1777 add_stack_protection_conflicts ();
1778
1779 /* Now that we have collected all stack variables, and have computed a
1780 minimal interference graph, attempt to save some stack space. */
1781 partition_stack_vars ();
1782 if (dump_file)
1783 dump_stack_var_partition ();
1784 }
1785
1786 /* There are several conditions under which we should create a
1787 stack guard: protect-all, alloca used, protected decls present. */
1788 if (flag_stack_protect == 2
1789 || (flag_stack_protect
1790 && (cfun->calls_alloca || has_protected_decls)))
1791 create_stack_guard ();
1792
1793 /* Assign rtl to each variable based on these partitions. */
1794 if (stack_vars_num > 0)
1795 {
1796 /* Reorder decls to be protected by iterating over the variables
1797 array multiple times, and allocating out of each phase in turn. */
1798 /* ??? We could probably integrate this into the qsort we did
1799 earlier, such that we naturally see these variables first,
1800 and thus naturally allocate things in the right order. */
1801 if (has_protected_decls)
1802 {
1803 /* Phase 1 contains only character arrays. */
1804 expand_stack_vars (stack_protect_decl_phase_1);
1805
1806 /* Phase 2 contains other kinds of arrays. */
1807 if (flag_stack_protect == 2)
1808 expand_stack_vars (stack_protect_decl_phase_2);
1809 }
1810
1811 expand_stack_vars (NULL);
1812
1813 fini_vars_expansion ();
1814 }
1815
1816 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1817 if (STACK_ALIGNMENT_NEEDED)
1818 {
1819 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1820 if (!FRAME_GROWS_DOWNWARD)
1821 frame_offset += align - 1;
1822 frame_offset &= -align;
1823 }
1824 }
1825
1826
1827 /* If we need to produce a detailed dump, print the tree representation
1828 for STMT to the dump file. SINCE is the last RTX after which the RTL
1829 generated for STMT should have been appended. */
1830
1831 static void
1832 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1833 {
1834 if (dump_file && (dump_flags & TDF_DETAILS))
1835 {
1836 fprintf (dump_file, "\n;; ");
1837 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1838 fprintf (dump_file, "\n");
1839
1840 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1841 }
1842 }
1843
1844 /* Maps the blocks that do not contain tree labels to rtx labels. */
1845
1846 static struct pointer_map_t *lab_rtx_for_bb;
1847
1848 /* Returns the label_rtx expression for a label starting basic block BB. */
1849
1850 static rtx
1851 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1852 {
1853 gimple_stmt_iterator gsi;
1854 tree lab;
1855 gimple lab_stmt;
1856 void **elt;
1857
1858 if (bb->flags & BB_RTL)
1859 return block_label (bb);
1860
1861 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1862 if (elt)
1863 return (rtx) *elt;
1864
1865 /* Find the tree label if it is present. */
1866
1867 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1868 {
1869 lab_stmt = gsi_stmt (gsi);
1870 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1871 break;
1872
1873 lab = gimple_label_label (lab_stmt);
1874 if (DECL_NONLOCAL (lab))
1875 break;
1876
1877 return label_rtx (lab);
1878 }
1879
1880 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1881 *elt = gen_label_rtx ();
1882 return (rtx) *elt;
1883 }
1884
1885
1886 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1887 of a basic block where we just expanded the conditional at the end,
1888 possibly clean up the CFG and instruction sequence. */
1889
1890 static void
1891 maybe_cleanup_end_of_block (edge e)
1892 {
1893 /* Special case: when jumpif decides that the condition is
1894 trivial it emits an unconditional jump (and the necessary
1895 barrier). But we still have two edges, the fallthru one is
1896 wrong. purge_dead_edges would clean this up later. Unfortunately
1897 we have to insert insns (and split edges) before
1898 find_many_sub_basic_blocks and hence before purge_dead_edges.
1899 But splitting edges might create new blocks which depend on the
1900 fact that if there are two edges there's no barrier. So the
1901 barrier would get lost and verify_flow_info would ICE. Instead
1902 of auditing all edge splitters to care for the barrier (which
1903 normally isn't there in a cleaned CFG), fix it here. */
1904 if (BARRIER_P (get_last_insn ()))
1905 {
1906 basic_block bb = e->src;
1907 rtx insn;
1908 remove_edge (e);
1909 /* Now, we have a single successor block, if we have insns to
1910 insert on the remaining edge we potentially will insert
1911 it at the end of this block (if the dest block isn't feasible)
1912 in order to avoid splitting the edge. This insertion will take
1913 place in front of the last jump. But we might have emitted
1914 multiple jumps (conditional and one unconditional) to the
1915 same destination. Inserting in front of the last one then
1916 is a problem. See PR 40021. We fix this by deleting all
1917 jumps except the last unconditional one. */
1918 insn = PREV_INSN (get_last_insn ());
1919 /* Make sure we have an unconditional jump. Otherwise we're
1920 confused. */
1921 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1922 for (insn = PREV_INSN (insn); insn != BB_HEAD (bb);)
1923 {
1924 insn = PREV_INSN (insn);
1925 if (JUMP_P (NEXT_INSN (insn)))
1926 delete_insn (NEXT_INSN (insn));
1927 }
1928 }
1929 }
1930
1931
1932 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1933 Returns a new basic block if we've terminated the current basic
1934 block and created a new one. */
1935
1936 static basic_block
1937 expand_gimple_cond (basic_block bb, gimple stmt)
1938 {
1939 basic_block new_bb, dest;
1940 edge new_edge;
1941 edge true_edge;
1942 edge false_edge;
1943 tree pred = gimple_cond_pred_to_tree (stmt);
1944 rtx last2, last;
1945
1946 last2 = last = get_last_insn ();
1947
1948 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1949 if (gimple_has_location (stmt))
1950 {
1951 set_curr_insn_source_location (gimple_location (stmt));
1952 set_curr_insn_block (gimple_block (stmt));
1953 }
1954
1955 /* These flags have no purpose in RTL land. */
1956 true_edge->flags &= ~EDGE_TRUE_VALUE;
1957 false_edge->flags &= ~EDGE_FALSE_VALUE;
1958
1959 /* We can either have a pure conditional jump with one fallthru edge or
1960 two-way jump that needs to be decomposed into two basic blocks. */
1961 if (false_edge->dest == bb->next_bb)
1962 {
1963 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1964 add_reg_br_prob_note (last, true_edge->probability);
1965 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1966 if (true_edge->goto_locus)
1967 {
1968 set_curr_insn_source_location (true_edge->goto_locus);
1969 set_curr_insn_block (true_edge->goto_block);
1970 true_edge->goto_locus = curr_insn_locator ();
1971 }
1972 true_edge->goto_block = NULL;
1973 false_edge->flags |= EDGE_FALLTHRU;
1974 ggc_free (pred);
1975 maybe_cleanup_end_of_block (false_edge);
1976 return NULL;
1977 }
1978 if (true_edge->dest == bb->next_bb)
1979 {
1980 jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
1981 add_reg_br_prob_note (last, false_edge->probability);
1982 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1983 if (false_edge->goto_locus)
1984 {
1985 set_curr_insn_source_location (false_edge->goto_locus);
1986 set_curr_insn_block (false_edge->goto_block);
1987 false_edge->goto_locus = curr_insn_locator ();
1988 }
1989 false_edge->goto_block = NULL;
1990 true_edge->flags |= EDGE_FALLTHRU;
1991 ggc_free (pred);
1992 maybe_cleanup_end_of_block (true_edge);
1993 return NULL;
1994 }
1995
1996 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1997 add_reg_br_prob_note (last, true_edge->probability);
1998 last = get_last_insn ();
1999 if (false_edge->goto_locus)
2000 {
2001 set_curr_insn_source_location (false_edge->goto_locus);
2002 set_curr_insn_block (false_edge->goto_block);
2003 false_edge->goto_locus = curr_insn_locator ();
2004 }
2005 false_edge->goto_block = NULL;
2006 emit_jump (label_rtx_for_bb (false_edge->dest));
2007
2008 BB_END (bb) = last;
2009 if (BARRIER_P (BB_END (bb)))
2010 BB_END (bb) = PREV_INSN (BB_END (bb));
2011 update_bb_for_insn (bb);
2012
2013 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2014 dest = false_edge->dest;
2015 redirect_edge_succ (false_edge, new_bb);
2016 false_edge->flags |= EDGE_FALLTHRU;
2017 new_bb->count = false_edge->count;
2018 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2019 new_edge = make_edge (new_bb, dest, 0);
2020 new_edge->probability = REG_BR_PROB_BASE;
2021 new_edge->count = new_bb->count;
2022 if (BARRIER_P (BB_END (new_bb)))
2023 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2024 update_bb_for_insn (new_bb);
2025
2026 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2027
2028 if (true_edge->goto_locus)
2029 {
2030 set_curr_insn_source_location (true_edge->goto_locus);
2031 set_curr_insn_block (true_edge->goto_block);
2032 true_edge->goto_locus = curr_insn_locator ();
2033 }
2034 true_edge->goto_block = NULL;
2035
2036 ggc_free (pred);
2037 return new_bb;
2038 }
2039
2040 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2041 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2042 generated a tail call (something that might be denied by the ABI
2043 rules governing the call; see calls.c).
2044
2045 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2046 can still reach the rest of BB. The case here is __builtin_sqrt,
2047 where the NaN result goes through the external function (with a
2048 tailcall) and the normal result happens via a sqrt instruction. */
2049
2050 static basic_block
2051 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2052 {
2053 rtx last2, last;
2054 edge e;
2055 edge_iterator ei;
2056 int probability;
2057 gcov_type count;
2058 tree stmt_tree = gimple_to_tree (stmt);
2059
2060 last2 = last = get_last_insn ();
2061
2062 expand_expr_stmt (stmt_tree);
2063
2064 release_stmt_tree (stmt, stmt_tree);
2065
2066 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2067 if (CALL_P (last) && SIBLING_CALL_P (last))
2068 goto found;
2069
2070 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2071
2072 *can_fallthru = true;
2073 return NULL;
2074
2075 found:
2076 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2077 Any instructions emitted here are about to be deleted. */
2078 do_pending_stack_adjust ();
2079
2080 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2081 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2082 EH or abnormal edges, we shouldn't have created a tail call in
2083 the first place. So it seems to me we should just be removing
2084 all edges here, or redirecting the existing fallthru edge to
2085 the exit block. */
2086
2087 probability = 0;
2088 count = 0;
2089
2090 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2091 {
2092 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2093 {
2094 if (e->dest != EXIT_BLOCK_PTR)
2095 {
2096 e->dest->count -= e->count;
2097 e->dest->frequency -= EDGE_FREQUENCY (e);
2098 if (e->dest->count < 0)
2099 e->dest->count = 0;
2100 if (e->dest->frequency < 0)
2101 e->dest->frequency = 0;
2102 }
2103 count += e->count;
2104 probability += e->probability;
2105 remove_edge (e);
2106 }
2107 else
2108 ei_next (&ei);
2109 }
2110
2111 /* This is somewhat ugly: the call_expr expander often emits instructions
2112 after the sibcall (to perform the function return). These confuse the
2113 find_many_sub_basic_blocks code, so we need to get rid of these. */
2114 last = NEXT_INSN (last);
2115 gcc_assert (BARRIER_P (last));
2116
2117 *can_fallthru = false;
2118 while (NEXT_INSN (last))
2119 {
2120 /* For instance an sqrt builtin expander expands if with
2121 sibcall in the then and label for `else`. */
2122 if (LABEL_P (NEXT_INSN (last)))
2123 {
2124 *can_fallthru = true;
2125 break;
2126 }
2127 delete_insn (NEXT_INSN (last));
2128 }
2129
2130 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2131 e->probability += probability;
2132 e->count += count;
2133 BB_END (bb) = last;
2134 update_bb_for_insn (bb);
2135
2136 if (NEXT_INSN (last))
2137 {
2138 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2139
2140 last = BB_END (bb);
2141 if (BARRIER_P (last))
2142 BB_END (bb) = PREV_INSN (last);
2143 }
2144
2145 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2146
2147 return bb;
2148 }
2149
2150 /* Expand basic block BB from GIMPLE trees to RTL. */
2151
2152 static basic_block
2153 expand_gimple_basic_block (basic_block bb)
2154 {
2155 gimple_stmt_iterator gsi;
2156 gimple_seq stmts;
2157 gimple stmt = NULL;
2158 rtx note, last;
2159 edge e;
2160 edge_iterator ei;
2161 void **elt;
2162
2163 if (dump_file)
2164 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
2165 bb->index);
2166
2167 /* Note that since we are now transitioning from GIMPLE to RTL, we
2168 cannot use the gsi_*_bb() routines because they expect the basic
2169 block to be in GIMPLE, instead of RTL. Therefore, we need to
2170 access the BB sequence directly. */
2171 stmts = bb_seq (bb);
2172 bb->il.gimple = NULL;
2173 rtl_profile_for_bb (bb);
2174 init_rtl_bb_info (bb);
2175 bb->flags |= BB_RTL;
2176
2177 /* Remove the RETURN_EXPR if we may fall though to the exit
2178 instead. */
2179 gsi = gsi_last (stmts);
2180 if (!gsi_end_p (gsi)
2181 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2182 {
2183 gimple ret_stmt = gsi_stmt (gsi);
2184
2185 gcc_assert (single_succ_p (bb));
2186 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2187
2188 if (bb->next_bb == EXIT_BLOCK_PTR
2189 && !gimple_return_retval (ret_stmt))
2190 {
2191 gsi_remove (&gsi, false);
2192 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2193 }
2194 }
2195
2196 gsi = gsi_start (stmts);
2197 if (!gsi_end_p (gsi))
2198 {
2199 stmt = gsi_stmt (gsi);
2200 if (gimple_code (stmt) != GIMPLE_LABEL)
2201 stmt = NULL;
2202 }
2203
2204 elt = pointer_map_contains (lab_rtx_for_bb, bb);
2205
2206 if (stmt || elt)
2207 {
2208 last = get_last_insn ();
2209
2210 if (stmt)
2211 {
2212 tree stmt_tree = gimple_to_tree (stmt);
2213 expand_expr_stmt (stmt_tree);
2214 release_stmt_tree (stmt, stmt_tree);
2215 gsi_next (&gsi);
2216 }
2217
2218 if (elt)
2219 emit_label ((rtx) *elt);
2220
2221 /* Java emits line number notes in the top of labels.
2222 ??? Make this go away once line number notes are obsoleted. */
2223 BB_HEAD (bb) = NEXT_INSN (last);
2224 if (NOTE_P (BB_HEAD (bb)))
2225 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
2226 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
2227
2228 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2229 }
2230 else
2231 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
2232
2233 NOTE_BASIC_BLOCK (note) = bb;
2234
2235 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2236 {
2237 gimple stmt = gsi_stmt (gsi);
2238 basic_block new_bb;
2239
2240 /* Expand this statement, then evaluate the resulting RTL and
2241 fixup the CFG accordingly. */
2242 if (gimple_code (stmt) == GIMPLE_COND)
2243 {
2244 new_bb = expand_gimple_cond (bb, stmt);
2245 if (new_bb)
2246 return new_bb;
2247 }
2248 else
2249 {
2250 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
2251 {
2252 bool can_fallthru;
2253 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
2254 if (new_bb)
2255 {
2256 if (can_fallthru)
2257 bb = new_bb;
2258 else
2259 return new_bb;
2260 }
2261 }
2262 else
2263 {
2264 def_operand_p def_p;
2265 tree stmt_tree;
2266 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
2267
2268 if (def_p != NULL)
2269 {
2270 /* Ignore this stmt if it is in the list of
2271 replaceable expressions. */
2272 if (SA.values
2273 && bitmap_bit_p (SA.values,
2274 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
2275 continue;
2276 }
2277 stmt_tree = gimple_to_tree (stmt);
2278 last = get_last_insn ();
2279 expand_expr_stmt (stmt_tree);
2280 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2281 release_stmt_tree (stmt, stmt_tree);
2282 }
2283 }
2284 }
2285
2286 /* Expand implicit goto and convert goto_locus. */
2287 FOR_EACH_EDGE (e, ei, bb->succs)
2288 {
2289 if (e->goto_locus && e->goto_block)
2290 {
2291 set_curr_insn_source_location (e->goto_locus);
2292 set_curr_insn_block (e->goto_block);
2293 e->goto_locus = curr_insn_locator ();
2294 }
2295 e->goto_block = NULL;
2296 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
2297 {
2298 emit_jump (label_rtx_for_bb (e->dest));
2299 e->flags &= ~EDGE_FALLTHRU;
2300 }
2301 }
2302
2303 do_pending_stack_adjust ();
2304
2305 /* Find the block tail. The last insn in the block is the insn
2306 before a barrier and/or table jump insn. */
2307 last = get_last_insn ();
2308 if (BARRIER_P (last))
2309 last = PREV_INSN (last);
2310 if (JUMP_TABLE_DATA_P (last))
2311 last = PREV_INSN (PREV_INSN (last));
2312 BB_END (bb) = last;
2313
2314 update_bb_for_insn (bb);
2315
2316 return bb;
2317 }
2318
2319
2320 /* Create a basic block for initialization code. */
2321
2322 static basic_block
2323 construct_init_block (void)
2324 {
2325 basic_block init_block, first_block;
2326 edge e = NULL;
2327 int flags;
2328
2329 /* Multiple entry points not supported yet. */
2330 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
2331 init_rtl_bb_info (ENTRY_BLOCK_PTR);
2332 init_rtl_bb_info (EXIT_BLOCK_PTR);
2333 ENTRY_BLOCK_PTR->flags |= BB_RTL;
2334 EXIT_BLOCK_PTR->flags |= BB_RTL;
2335
2336 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
2337
2338 /* When entry edge points to first basic block, we don't need jump,
2339 otherwise we have to jump into proper target. */
2340 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
2341 {
2342 tree label = gimple_block_label (e->dest);
2343
2344 emit_jump (label_rtx (label));
2345 flags = 0;
2346 }
2347 else
2348 flags = EDGE_FALLTHRU;
2349
2350 init_block = create_basic_block (NEXT_INSN (get_insns ()),
2351 get_last_insn (),
2352 ENTRY_BLOCK_PTR);
2353 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
2354 init_block->count = ENTRY_BLOCK_PTR->count;
2355 if (e)
2356 {
2357 first_block = e->dest;
2358 redirect_edge_succ (e, init_block);
2359 e = make_edge (init_block, first_block, flags);
2360 }
2361 else
2362 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2363 e->probability = REG_BR_PROB_BASE;
2364 e->count = ENTRY_BLOCK_PTR->count;
2365
2366 update_bb_for_insn (init_block);
2367 return init_block;
2368 }
2369
2370 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
2371 found in the block tree. */
2372
2373 static void
2374 set_block_levels (tree block, int level)
2375 {
2376 while (block)
2377 {
2378 BLOCK_NUMBER (block) = level;
2379 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
2380 block = BLOCK_CHAIN (block);
2381 }
2382 }
2383
2384 /* Create a block containing landing pads and similar stuff. */
2385
2386 static void
2387 construct_exit_block (void)
2388 {
2389 rtx head = get_last_insn ();
2390 rtx end;
2391 basic_block exit_block;
2392 edge e, e2;
2393 unsigned ix;
2394 edge_iterator ei;
2395 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
2396
2397 rtl_profile_for_bb (EXIT_BLOCK_PTR);
2398
2399 /* Make sure the locus is set to the end of the function, so that
2400 epilogue line numbers and warnings are set properly. */
2401 if (cfun->function_end_locus != UNKNOWN_LOCATION)
2402 input_location = cfun->function_end_locus;
2403
2404 /* The following insns belong to the top scope. */
2405 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2406
2407 /* Generate rtl for function exit. */
2408 expand_function_end ();
2409
2410 end = get_last_insn ();
2411 if (head == end)
2412 return;
2413 /* While emitting the function end we could move end of the last basic block.
2414 */
2415 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
2416 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
2417 head = NEXT_INSN (head);
2418 exit_block = create_basic_block (NEXT_INSN (head), end,
2419 EXIT_BLOCK_PTR->prev_bb);
2420 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
2421 exit_block->count = EXIT_BLOCK_PTR->count;
2422
2423 ix = 0;
2424 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
2425 {
2426 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
2427 if (!(e->flags & EDGE_ABNORMAL))
2428 redirect_edge_succ (e, exit_block);
2429 else
2430 ix++;
2431 }
2432
2433 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2434 e->probability = REG_BR_PROB_BASE;
2435 e->count = EXIT_BLOCK_PTR->count;
2436 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
2437 if (e2 != e)
2438 {
2439 e->count -= e2->count;
2440 exit_block->count -= e2->count;
2441 exit_block->frequency -= EDGE_FREQUENCY (e2);
2442 }
2443 if (e->count < 0)
2444 e->count = 0;
2445 if (exit_block->count < 0)
2446 exit_block->count = 0;
2447 if (exit_block->frequency < 0)
2448 exit_block->frequency = 0;
2449 update_bb_for_insn (exit_block);
2450 }
2451
2452 /* Helper function for discover_nonconstant_array_refs.
2453 Look for ARRAY_REF nodes with non-constant indexes and mark them
2454 addressable. */
2455
2456 static tree
2457 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
2458 void *data ATTRIBUTE_UNUSED)
2459 {
2460 tree t = *tp;
2461
2462 if (IS_TYPE_OR_DECL_P (t))
2463 *walk_subtrees = 0;
2464 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2465 {
2466 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2467 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
2468 && (!TREE_OPERAND (t, 2)
2469 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2470 || (TREE_CODE (t) == COMPONENT_REF
2471 && (!TREE_OPERAND (t,2)
2472 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2473 || TREE_CODE (t) == BIT_FIELD_REF
2474 || TREE_CODE (t) == REALPART_EXPR
2475 || TREE_CODE (t) == IMAGPART_EXPR
2476 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2477 || CONVERT_EXPR_P (t))
2478 t = TREE_OPERAND (t, 0);
2479
2480 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2481 {
2482 t = get_base_address (t);
2483 if (t && DECL_P (t)
2484 && DECL_MODE (t) != BLKmode)
2485 TREE_ADDRESSABLE (t) = 1;
2486 }
2487
2488 *walk_subtrees = 0;
2489 }
2490
2491 return NULL_TREE;
2492 }
2493
2494 /* RTL expansion is not able to compile array references with variable
2495 offsets for arrays stored in single register. Discover such
2496 expressions and mark variables as addressable to avoid this
2497 scenario. */
2498
2499 static void
2500 discover_nonconstant_array_refs (void)
2501 {
2502 basic_block bb;
2503 gimple_stmt_iterator gsi;
2504
2505 FOR_EACH_BB (bb)
2506 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2507 {
2508 gimple stmt = gsi_stmt (gsi);
2509 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
2510 }
2511 }
2512
2513 /* This function sets crtl->args.internal_arg_pointer to a virtual
2514 register if DRAP is needed. Local register allocator will replace
2515 virtual_incoming_args_rtx with the virtual register. */
2516
2517 static void
2518 expand_stack_alignment (void)
2519 {
2520 rtx drap_rtx;
2521 unsigned int preferred_stack_boundary;
2522
2523 if (! SUPPORTS_STACK_ALIGNMENT)
2524 return;
2525
2526 if (cfun->calls_alloca
2527 || cfun->has_nonlocal_label
2528 || crtl->has_nonlocal_goto)
2529 crtl->need_drap = true;
2530
2531 gcc_assert (crtl->stack_alignment_needed
2532 <= crtl->stack_alignment_estimated);
2533
2534 /* Update crtl->stack_alignment_estimated and use it later to align
2535 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
2536 exceptions since callgraph doesn't collect incoming stack alignment
2537 in this case. */
2538 if (flag_non_call_exceptions
2539 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
2540 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2541 else
2542 preferred_stack_boundary = crtl->preferred_stack_boundary;
2543 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
2544 crtl->stack_alignment_estimated = preferred_stack_boundary;
2545 if (preferred_stack_boundary > crtl->stack_alignment_needed)
2546 crtl->stack_alignment_needed = preferred_stack_boundary;
2547
2548 crtl->stack_realign_needed
2549 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
2550 crtl->stack_realign_tried = crtl->stack_realign_needed;
2551
2552 crtl->stack_realign_processed = true;
2553
2554 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
2555 alignment. */
2556 gcc_assert (targetm.calls.get_drap_rtx != NULL);
2557 drap_rtx = targetm.calls.get_drap_rtx ();
2558
2559 /* stack_realign_drap and drap_rtx must match. */
2560 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
2561
2562 /* Do nothing if NULL is returned, which means DRAP is not needed. */
2563 if (NULL != drap_rtx)
2564 {
2565 crtl->args.internal_arg_pointer = drap_rtx;
2566
2567 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
2568 needed. */
2569 fixup_tail_calls ();
2570 }
2571 }
2572
2573 /* Translate the intermediate representation contained in the CFG
2574 from GIMPLE trees to RTL.
2575
2576 We do conversion per basic block and preserve/update the tree CFG.
2577 This implies we have to do some magic as the CFG can simultaneously
2578 consist of basic blocks containing RTL and GIMPLE trees. This can
2579 confuse the CFG hooks, so be careful to not manipulate CFG during
2580 the expansion. */
2581
2582 static unsigned int
2583 gimple_expand_cfg (void)
2584 {
2585 basic_block bb, init_block;
2586 sbitmap blocks;
2587 edge_iterator ei;
2588 edge e;
2589 unsigned i;
2590
2591 rewrite_out_of_ssa (&SA);
2592 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
2593 sizeof (rtx));
2594
2595 /* Some backends want to know that we are expanding to RTL. */
2596 currently_expanding_to_rtl = 1;
2597
2598 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
2599
2600 insn_locators_alloc ();
2601 if (!DECL_IS_BUILTIN (current_function_decl))
2602 {
2603 /* Eventually, all FEs should explicitly set function_start_locus. */
2604 if (cfun->function_start_locus == UNKNOWN_LOCATION)
2605 set_curr_insn_source_location
2606 (DECL_SOURCE_LOCATION (current_function_decl));
2607 else
2608 set_curr_insn_source_location (cfun->function_start_locus);
2609 }
2610 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2611 prologue_locator = curr_insn_locator ();
2612
2613 /* Make sure first insn is a note even if we don't want linenums.
2614 This makes sure the first insn will never be deleted.
2615 Also, final expects a note to appear there. */
2616 emit_note (NOTE_INSN_DELETED);
2617
2618 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
2619 discover_nonconstant_array_refs ();
2620
2621 targetm.expand_to_rtl_hook ();
2622 crtl->stack_alignment_needed = STACK_BOUNDARY;
2623 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
2624 crtl->stack_alignment_estimated = STACK_BOUNDARY;
2625 crtl->preferred_stack_boundary = STACK_BOUNDARY;
2626 cfun->cfg->max_jumptable_ents = 0;
2627
2628
2629 /* Expand the variables recorded during gimple lowering. */
2630 expand_used_vars ();
2631
2632 /* Honor stack protection warnings. */
2633 if (warn_stack_protect)
2634 {
2635 if (cfun->calls_alloca)
2636 warning (OPT_Wstack_protector,
2637 "not protecting local variables: variable length buffer");
2638 if (has_short_buffer && !crtl->stack_protect_guard)
2639 warning (OPT_Wstack_protector,
2640 "not protecting function: no buffer at least %d bytes long",
2641 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
2642 }
2643
2644 /* Set up parameters and prepare for return, for the function. */
2645 expand_function_start (current_function_decl);
2646
2647 /* Now that we also have the parameter RTXs, copy them over to our
2648 partitions. */
2649 for (i = 0; i < SA.map->num_partitions; i++)
2650 {
2651 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
2652
2653 if (TREE_CODE (var) != VAR_DECL
2654 && !SA.partition_to_pseudo[i])
2655 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
2656 gcc_assert (SA.partition_to_pseudo[i]);
2657
2658 /* If this decl was marked as living in multiple places, reset
2659 this now to NULL. */
2660 if (DECL_RTL_IF_SET (var) == pc_rtx)
2661 SET_DECL_RTL (var, NULL);
2662
2663 /* Some RTL parts really want to look at DECL_RTL(x) when x
2664 was a decl marked in REG_ATTR or MEM_ATTR. We could use
2665 SET_DECL_RTL here making this available, but that would mean
2666 to select one of the potentially many RTLs for one DECL. Instead
2667 of doing that we simply reset the MEM_EXPR of the RTL in question,
2668 then nobody can get at it and hence nobody can call DECL_RTL on it. */
2669 if (!DECL_RTL_SET_P (var))
2670 {
2671 if (MEM_P (SA.partition_to_pseudo[i]))
2672 set_mem_expr (SA.partition_to_pseudo[i], NULL);
2673 }
2674 }
2675
2676 /* If this function is `main', emit a call to `__main'
2677 to run global initializers, etc. */
2678 if (DECL_NAME (current_function_decl)
2679 && MAIN_NAME_P (DECL_NAME (current_function_decl))
2680 && DECL_FILE_SCOPE_P (current_function_decl))
2681 expand_main_function ();
2682
2683 /* Initialize the stack_protect_guard field. This must happen after the
2684 call to __main (if any) so that the external decl is initialized. */
2685 if (crtl->stack_protect_guard)
2686 stack_protect_prologue ();
2687
2688 /* Update stack boundary if needed. */
2689 if (SUPPORTS_STACK_ALIGNMENT)
2690 {
2691 /* Call update_stack_boundary here to update incoming stack
2692 boundary before TARGET_FUNCTION_OK_FOR_SIBCALL is called.
2693 TARGET_FUNCTION_OK_FOR_SIBCALL needs to know the accurate
2694 incoming stack alignment to check if it is OK to perform
2695 sibcall optimization since sibcall optimization will only
2696 align the outgoing stack to incoming stack boundary. */
2697 if (targetm.calls.update_stack_boundary)
2698 targetm.calls.update_stack_boundary ();
2699
2700 /* The incoming stack frame has to be aligned at least at
2701 parm_stack_boundary. */
2702 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2703 }
2704
2705 expand_phi_nodes (&SA);
2706
2707 /* Register rtl specific functions for cfg. */
2708 rtl_register_cfg_hooks ();
2709
2710 init_block = construct_init_block ();
2711
2712 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
2713 remaining edges later. */
2714 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2715 e->flags &= ~EDGE_EXECUTABLE;
2716
2717 lab_rtx_for_bb = pointer_map_create ();
2718 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
2719 bb = expand_gimple_basic_block (bb);
2720
2721 execute_free_datastructures ();
2722 finish_out_of_ssa (&SA);
2723
2724 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
2725 conservatively to true until they are all profile aware. */
2726 pointer_map_destroy (lab_rtx_for_bb);
2727 free_histograms ();
2728
2729 construct_exit_block ();
2730 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2731 insn_locators_finalize ();
2732
2733 /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */
2734 convert_from_eh_region_ranges ();
2735 set_eh_throw_stmt_table (cfun, NULL);
2736
2737 rebuild_jump_labels (get_insns ());
2738 find_exception_handler_labels ();
2739
2740 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
2741 {
2742 edge e;
2743 edge_iterator ei;
2744 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2745 {
2746 if (e->insns.r)
2747 commit_one_edge_insertion (e);
2748 else
2749 ei_next (&ei);
2750 }
2751 }
2752
2753 /* We're done expanding trees to RTL. */
2754 currently_expanding_to_rtl = 0;
2755
2756 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
2757 {
2758 edge e;
2759 edge_iterator ei;
2760 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2761 {
2762 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
2763 e->flags &= ~EDGE_EXECUTABLE;
2764
2765 /* At the moment not all abnormal edges match the RTL
2766 representation. It is safe to remove them here as
2767 find_many_sub_basic_blocks will rediscover them.
2768 In the future we should get this fixed properly. */
2769 if ((e->flags & EDGE_ABNORMAL)
2770 && !(e->flags & EDGE_SIBCALL))
2771 remove_edge (e);
2772 else
2773 ei_next (&ei);
2774 }
2775 }
2776
2777 blocks = sbitmap_alloc (last_basic_block);
2778 sbitmap_ones (blocks);
2779 find_many_sub_basic_blocks (blocks);
2780 sbitmap_free (blocks);
2781 purge_all_dead_edges ();
2782
2783 compact_blocks ();
2784
2785 expand_stack_alignment ();
2786
2787 #ifdef ENABLE_CHECKING
2788 verify_flow_info ();
2789 #endif
2790
2791 /* There's no need to defer outputting this function any more; we
2792 know we want to output it. */
2793 DECL_DEFER_OUTPUT (current_function_decl) = 0;
2794
2795 /* Now that we're done expanding trees to RTL, we shouldn't have any
2796 more CONCATs anywhere. */
2797 generating_concat_p = 0;
2798
2799 if (dump_file)
2800 {
2801 fprintf (dump_file,
2802 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
2803 /* And the pass manager will dump RTL for us. */
2804 }
2805
2806 /* If we're emitting a nested function, make sure its parent gets
2807 emitted as well. Doing otherwise confuses debug info. */
2808 {
2809 tree parent;
2810 for (parent = DECL_CONTEXT (current_function_decl);
2811 parent != NULL_TREE;
2812 parent = get_containing_scope (parent))
2813 if (TREE_CODE (parent) == FUNCTION_DECL)
2814 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
2815 }
2816
2817 /* We are now committed to emitting code for this function. Do any
2818 preparation, such as emitting abstract debug info for the inline
2819 before it gets mangled by optimization. */
2820 if (cgraph_function_possibly_inlined_p (current_function_decl))
2821 (*debug_hooks->outlining_inline_function) (current_function_decl);
2822
2823 TREE_ASM_WRITTEN (current_function_decl) = 1;
2824
2825 /* After expanding, the return labels are no longer needed. */
2826 return_label = NULL;
2827 naked_return_label = NULL;
2828 /* Tag the blocks with a depth number so that change_scope can find
2829 the common parent easily. */
2830 set_block_levels (DECL_INITIAL (cfun->decl), 0);
2831 default_rtl_profile ();
2832 return 0;
2833 }
2834
2835 struct rtl_opt_pass pass_expand =
2836 {
2837 {
2838 RTL_PASS,
2839 "expand", /* name */
2840 NULL, /* gate */
2841 gimple_expand_cfg, /* execute */
2842 NULL, /* sub */
2843 NULL, /* next */
2844 0, /* static_pass_number */
2845 TV_EXPAND, /* tv_id */
2846 PROP_ssa | PROP_gimple_leh | PROP_cfg,/* properties_required */
2847 PROP_rtl, /* properties_provided */
2848 PROP_ssa | PROP_trees, /* properties_destroyed */
2849 TODO_verify_ssa | TODO_verify_flow
2850 | TODO_verify_stmts, /* todo_flags_start */
2851 TODO_dump_func
2852 | TODO_ggc_collect /* todo_flags_finish */
2853 }
2854 };