ggc.h (GGC_RESIZEVAR): New, reorder macros.
[gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for trees.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-inline.h"
30 #include "tree-gimple.h"
31 #include "tree-iterator.h"
32 #include "tree-flow.h"
33 #include "cgraph.h"
34 #include "expr.h"
35 #include "langhooks.h"
36 #include "pointer-set.h"
37 #include "ggc.h"
38
39
40 /* The object of this pass is to lower the representation of a set of nested
41 functions in order to expose all of the gory details of the various
42 nonlocal references. We want to do this sooner rather than later, in
43 order to give us more freedom in emitting all of the functions in question.
44
45 Back in olden times, when gcc was young, we developed an insanely
46 complicated scheme whereby variables which were referenced nonlocally
47 were forced to live in the stack of the declaring function, and then
48 the nested functions magically discovered where these variables were
49 placed. In order for this scheme to function properly, it required
50 that the outer function be partially expanded, then we switch to
51 compiling the inner function, and once done with those we switch back
52 to compiling the outer function. Such delicate ordering requirements
53 makes it difficult to do whole translation unit optimizations
54 involving such functions.
55
56 The implementation here is much more direct. Everything that can be
57 referenced by an inner function is a member of an explicitly created
58 structure herein called the "nonlocal frame struct". The incoming
59 static chain for a nested function is a pointer to this struct in
60 the parent. In this way, we settle on known offsets from a known
61 base, and so are decoupled from the logic that places objects in the
62 function's stack frame. More importantly, we don't have to wait for
63 that to happen -- since the compilation of the inner function is no
64 longer tied to a real stack frame, the nonlocal frame struct can be
65 allocated anywhere. Which means that the outer function is now
66 inlinable.
67
68 Theory of operation here is very simple. Iterate over all the
69 statements in all the functions (depth first) several times,
70 allocating structures and fields on demand. In general we want to
71 examine inner functions first, so that we can avoid making changes
72 to outer functions which are unnecessary.
73
74 The order of the passes matters a bit, in that later passes will be
75 skipped if it is discovered that the functions don't actually interact
76 at all. That is, they're nested in the lexical sense but could have
77 been written as independent functions without change. */
78
79
80 struct nesting_info
81 {
82 struct nesting_info *outer;
83 struct nesting_info *inner;
84 struct nesting_info *next;
85
86 struct pointer_map_t *field_map;
87 struct pointer_map_t *var_map;
88 bitmap suppress_expansion;
89
90 tree context;
91 tree new_local_var_chain;
92 tree debug_var_chain;
93 tree frame_type;
94 tree frame_decl;
95 tree chain_field;
96 tree chain_decl;
97 tree nl_goto_field;
98
99 bool any_parm_remapped;
100 bool any_tramp_created;
101 char static_chain_added;
102 };
103
104
105 /* Obstack used for the bitmaps in the struct above. */
106 static struct bitmap_obstack nesting_info_bitmap_obstack;
107
108
109 /* We're working in so many different function contexts simultaneously,
110 that create_tmp_var is dangerous. Prevent mishap. */
111 #define create_tmp_var cant_use_create_tmp_var_here_dummy
112
113 /* Like create_tmp_var, except record the variable for registration at
114 the given nesting level. */
115
116 static tree
117 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
118 {
119 tree tmp_var;
120
121 /* If the type is of variable size or a type which must be created by the
122 frontend, something is wrong. Note that we explicitly allow
123 incomplete types here, since we create them ourselves here. */
124 gcc_assert (!TREE_ADDRESSABLE (type));
125 gcc_assert (!TYPE_SIZE_UNIT (type)
126 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
127
128 tmp_var = create_tmp_var_raw (type, prefix);
129 DECL_CONTEXT (tmp_var) = info->context;
130 TREE_CHAIN (tmp_var) = info->new_local_var_chain;
131 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
132 if (TREE_CODE (type) == COMPLEX_TYPE
133 || TREE_CODE (type) == VECTOR_TYPE)
134 DECL_GIMPLE_REG_P (tmp_var) = 1;
135
136 info->new_local_var_chain = tmp_var;
137
138 return tmp_var;
139 }
140
141 /* Take the address of EXP to be used within function CONTEXT.
142 Mark it for addressability as necessary. */
143
144 tree
145 build_addr (tree exp, tree context)
146 {
147 tree base = exp;
148 tree save_context;
149 tree retval;
150
151 while (handled_component_p (base))
152 base = TREE_OPERAND (base, 0);
153
154 if (DECL_P (base))
155 TREE_ADDRESSABLE (base) = 1;
156
157 /* Building the ADDR_EXPR will compute a set of properties for
158 that ADDR_EXPR. Those properties are unfortunately context
159 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
160
161 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
162 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
163 way the properties are for the ADDR_EXPR are computed properly. */
164 save_context = current_function_decl;
165 current_function_decl = context;
166 retval = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
167 current_function_decl = save_context;
168 return retval;
169 }
170
171 /* Insert FIELD into TYPE, sorted by alignment requirements. */
172
173 void
174 insert_field_into_struct (tree type, tree field)
175 {
176 tree *p;
177
178 DECL_CONTEXT (field) = type;
179
180 for (p = &TYPE_FIELDS (type); *p ; p = &TREE_CHAIN (*p))
181 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
182 break;
183
184 TREE_CHAIN (field) = *p;
185 *p = field;
186
187 /* Set correct alignment for frame struct type. */
188 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
189 TYPE_ALIGN (type) = DECL_ALIGN (field);
190 }
191
192 /* Build or return the RECORD_TYPE that describes the frame state that is
193 shared between INFO->CONTEXT and its nested functions. This record will
194 not be complete until finalize_nesting_tree; up until that point we'll
195 be adding fields as necessary.
196
197 We also build the DECL that represents this frame in the function. */
198
199 static tree
200 get_frame_type (struct nesting_info *info)
201 {
202 tree type = info->frame_type;
203 if (!type)
204 {
205 char *name;
206
207 type = make_node (RECORD_TYPE);
208
209 name = concat ("FRAME.",
210 IDENTIFIER_POINTER (DECL_NAME (info->context)),
211 NULL);
212 TYPE_NAME (type) = get_identifier (name);
213 free (name);
214
215 info->frame_type = type;
216 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
217
218 /* ??? Always make it addressable for now, since it is meant to
219 be pointed to by the static chain pointer. This pessimizes
220 when it turns out that no static chains are needed because
221 the nested functions referencing non-local variables are not
222 reachable, but the true pessimization is to create the non-
223 local frame structure in the first place. */
224 TREE_ADDRESSABLE (info->frame_decl) = 1;
225 }
226 return type;
227 }
228
229 /* Return true if DECL should be referenced by pointer in the non-local
230 frame structure. */
231
232 static bool
233 use_pointer_in_frame (tree decl)
234 {
235 if (TREE_CODE (decl) == PARM_DECL)
236 {
237 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
238 sized decls, and inefficient to copy large aggregates. Don't bother
239 moving anything but scalar variables. */
240 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
241 }
242 else
243 {
244 /* Variable sized types make things "interesting" in the frame. */
245 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
246 }
247 }
248
249 /* Given DECL, a non-locally accessed variable, find or create a field
250 in the non-local frame structure for the given nesting context. */
251
252 static tree
253 lookup_field_for_decl (struct nesting_info *info, tree decl,
254 enum insert_option insert)
255 {
256 void **slot;
257
258 if (insert == NO_INSERT)
259 {
260 slot = pointer_map_contains (info->field_map, decl);
261 return slot ? (tree) *slot : NULL_TREE;
262 }
263
264 slot = pointer_map_insert (info->field_map, decl);
265 if (!*slot)
266 {
267 tree field = make_node (FIELD_DECL);
268 DECL_NAME (field) = DECL_NAME (decl);
269
270 if (use_pointer_in_frame (decl))
271 {
272 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
273 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
274 DECL_NONADDRESSABLE_P (field) = 1;
275 }
276 else
277 {
278 TREE_TYPE (field) = TREE_TYPE (decl);
279 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
280 DECL_ALIGN (field) = DECL_ALIGN (decl);
281 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
282 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
283 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
284 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
285 }
286
287 insert_field_into_struct (get_frame_type (info), field);
288 *slot = field;
289
290 if (TREE_CODE (decl) == PARM_DECL)
291 info->any_parm_remapped = true;
292 }
293
294 return (tree) *slot;
295 }
296
297 /* Build or return the variable that holds the static chain within
298 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
299
300 static tree
301 get_chain_decl (struct nesting_info *info)
302 {
303 tree decl = info->chain_decl;
304 if (!decl)
305 {
306 tree type;
307
308 type = get_frame_type (info->outer);
309 type = build_pointer_type (type);
310
311 /* Note that this variable is *not* entered into any BIND_EXPR;
312 the construction of this variable is handled specially in
313 expand_function_start and initialize_inlined_parameters.
314 Note also that it's represented as a parameter. This is more
315 close to the truth, since the initial value does come from
316 the caller. */
317 decl = build_decl (PARM_DECL, create_tmp_var_name ("CHAIN"), type);
318 DECL_ARTIFICIAL (decl) = 1;
319 DECL_IGNORED_P (decl) = 1;
320 TREE_USED (decl) = 1;
321 DECL_CONTEXT (decl) = info->context;
322 DECL_ARG_TYPE (decl) = type;
323
324 /* Tell tree-inline.c that we never write to this variable, so
325 it can copy-prop the replacement value immediately. */
326 TREE_READONLY (decl) = 1;
327
328 info->chain_decl = decl;
329 }
330 return decl;
331 }
332
333 /* Build or return the field within the non-local frame state that holds
334 the static chain for INFO->CONTEXT. This is the way to walk back up
335 multiple nesting levels. */
336
337 static tree
338 get_chain_field (struct nesting_info *info)
339 {
340 tree field = info->chain_field;
341 if (!field)
342 {
343 tree type = build_pointer_type (get_frame_type (info->outer));
344
345 field = make_node (FIELD_DECL);
346 DECL_NAME (field) = get_identifier ("__chain");
347 TREE_TYPE (field) = type;
348 DECL_ALIGN (field) = TYPE_ALIGN (type);
349 DECL_NONADDRESSABLE_P (field) = 1;
350
351 insert_field_into_struct (get_frame_type (info), field);
352
353 info->chain_field = field;
354 }
355 return field;
356 }
357
358 /* Copy EXP into a temporary. Allocate the temporary in the context of
359 INFO and insert the initialization statement before TSI. */
360
361 static tree
362 init_tmp_var (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
363 {
364 tree t, stmt;
365
366 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
367 stmt = build_gimple_modify_stmt (t, exp);
368 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
369 tsi_link_before (tsi, stmt, TSI_SAME_STMT);
370
371 return t;
372 }
373
374 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
375
376 static tree
377 tsi_gimplify_val (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
378 {
379 if (is_gimple_val (exp))
380 return exp;
381 else
382 return init_tmp_var (info, exp, tsi);
383 }
384
385 /* Similarly, but copy from the temporary and insert the statement
386 after the iterator. */
387
388 static tree
389 save_tmp_var (struct nesting_info *info, tree exp,
390 tree_stmt_iterator *tsi)
391 {
392 tree t, stmt;
393
394 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
395 stmt = build_gimple_modify_stmt (exp, t);
396 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
397 tsi_link_after (tsi, stmt, TSI_SAME_STMT);
398
399 return t;
400 }
401
402 /* Build or return the type used to represent a nested function trampoline. */
403
404 static GTY(()) tree trampoline_type;
405
406 static tree
407 get_trampoline_type (void)
408 {
409 unsigned align, size;
410 tree t;
411
412 if (trampoline_type)
413 return trampoline_type;
414
415 align = TRAMPOLINE_ALIGNMENT;
416 size = TRAMPOLINE_SIZE;
417
418 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
419 then allocate extra space so that we can do dynamic alignment. */
420 if (align > STACK_BOUNDARY)
421 {
422 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
423 align = STACK_BOUNDARY;
424 }
425
426 t = build_index_type (build_int_cst (NULL_TREE, size - 1));
427 t = build_array_type (char_type_node, t);
428 t = build_decl (FIELD_DECL, get_identifier ("__data"), t);
429 DECL_ALIGN (t) = align;
430 DECL_USER_ALIGN (t) = 1;
431
432 trampoline_type = make_node (RECORD_TYPE);
433 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
434 TYPE_FIELDS (trampoline_type) = t;
435 layout_type (trampoline_type);
436 DECL_CONTEXT (t) = trampoline_type;
437
438 return trampoline_type;
439 }
440
441 /* Given DECL, a nested function, find or create a field in the non-local
442 frame structure for a trampoline for this function. */
443
444 static tree
445 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
446 enum insert_option insert)
447 {
448 void **slot;
449
450 if (insert == NO_INSERT)
451 {
452 slot = pointer_map_contains (info->var_map, decl);
453 return slot ? (tree) *slot : NULL_TREE;
454 }
455
456 slot = pointer_map_insert (info->var_map, decl);
457 if (!*slot)
458 {
459 tree field = make_node (FIELD_DECL);
460 DECL_NAME (field) = DECL_NAME (decl);
461 TREE_TYPE (field) = get_trampoline_type ();
462 TREE_ADDRESSABLE (field) = 1;
463
464 insert_field_into_struct (get_frame_type (info), field);
465 *slot = field;
466
467 info->any_tramp_created = true;
468 }
469
470 return (tree) *slot;
471 }
472
473 /* Build or return the field within the non-local frame state that holds
474 the non-local goto "jmp_buf". The buffer itself is maintained by the
475 rtl middle-end as dynamic stack space is allocated. */
476
477 static tree
478 get_nl_goto_field (struct nesting_info *info)
479 {
480 tree field = info->nl_goto_field;
481 if (!field)
482 {
483 unsigned size;
484 tree type;
485
486 /* For __builtin_nonlocal_goto, we need N words. The first is the
487 frame pointer, the rest is for the target's stack pointer save
488 area. The number of words is controlled by STACK_SAVEAREA_MODE;
489 not the best interface, but it'll do for now. */
490 if (Pmode == ptr_mode)
491 type = ptr_type_node;
492 else
493 type = lang_hooks.types.type_for_mode (Pmode, 1);
494
495 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
496 size = size / GET_MODE_SIZE (Pmode);
497 size = size + 1;
498
499 type = build_array_type
500 (type, build_index_type (build_int_cst (NULL_TREE, size)));
501
502 field = make_node (FIELD_DECL);
503 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
504 TREE_TYPE (field) = type;
505 DECL_ALIGN (field) = TYPE_ALIGN (type);
506 TREE_ADDRESSABLE (field) = 1;
507
508 insert_field_into_struct (get_frame_type (info), field);
509
510 info->nl_goto_field = field;
511 }
512
513 return field;
514 }
515 \f
516 /* Helper function for walk_stmts. Walk output operands of an ASM_EXPR. */
517
518 static void
519 walk_asm_expr (struct walk_stmt_info *wi, tree stmt)
520 {
521 int noutputs = list_length (ASM_OUTPUTS (stmt));
522 const char **oconstraints
523 = (const char **) alloca ((noutputs) * sizeof (const char *));
524 int i;
525 tree link;
526 const char *constraint;
527 bool allows_mem, allows_reg, is_inout;
528
529 wi->is_lhs = true;
530 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
531 {
532 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
533 oconstraints[i] = constraint;
534 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
535 &allows_reg, &is_inout);
536
537 wi->val_only = (allows_reg || !allows_mem);
538 walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
539 }
540
541 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
542 {
543 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
544 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
545 oconstraints, &allows_mem, &allows_reg);
546
547 wi->val_only = (allows_reg || !allows_mem);
548 /* Although input "m" is not really a LHS, we need a lvalue. */
549 wi->is_lhs = !wi->val_only;
550 walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
551 }
552
553 wi->is_lhs = false;
554 wi->val_only = true;
555 }
556
557 /* Iterate over all sub-statements of *TP calling walk_tree with
558 WI->CALLBACK for every sub-expression in each statement found. */
559
560 void
561 walk_stmts (struct walk_stmt_info *wi, tree *tp)
562 {
563 tree t = *tp;
564 int walk_subtrees;
565
566 if (!t)
567 return;
568
569 if (wi->want_locations && EXPR_HAS_LOCATION (t))
570 input_location = EXPR_LOCATION (t);
571
572 switch (TREE_CODE (t))
573 {
574 case STATEMENT_LIST:
575 {
576 tree_stmt_iterator i;
577 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
578 {
579 wi->tsi = i;
580 walk_stmts (wi, tsi_stmt_ptr (i));
581 }
582 }
583 break;
584
585 case COND_EXPR:
586 walk_tree (&COND_EXPR_COND (t), wi->callback, wi, NULL);
587 walk_stmts (wi, &COND_EXPR_THEN (t));
588 walk_stmts (wi, &COND_EXPR_ELSE (t));
589 break;
590 case CATCH_EXPR:
591 walk_stmts (wi, &CATCH_BODY (t));
592 break;
593 case EH_FILTER_EXPR:
594 walk_stmts (wi, &EH_FILTER_FAILURE (t));
595 break;
596 case TRY_CATCH_EXPR:
597 case TRY_FINALLY_EXPR:
598 walk_stmts (wi, &TREE_OPERAND (t, 0));
599 walk_stmts (wi, &TREE_OPERAND (t, 1));
600 break;
601
602 case BIND_EXPR:
603 if (wi->want_bind_expr)
604 {
605 walk_subtrees = 1;
606 wi->callback (tp, &walk_subtrees, wi);
607 if (!walk_subtrees)
608 break;
609 }
610 walk_stmts (wi, &BIND_EXPR_BODY (t));
611 break;
612
613 case RETURN_EXPR:
614 if (wi->want_return_expr)
615 {
616 walk_subtrees = 1;
617 wi->callback (tp, &walk_subtrees, wi);
618 if (!walk_subtrees)
619 break;
620 }
621 walk_stmts (wi, &TREE_OPERAND (t, 0));
622 break;
623
624 case GIMPLE_MODIFY_STMT:
625 /* A formal temporary lhs may use a COMPONENT_REF rhs. */
626 wi->val_only = !is_gimple_formal_tmp_var (GIMPLE_STMT_OPERAND (t, 0));
627 walk_tree (&GIMPLE_STMT_OPERAND (t, 1), wi->callback, wi, NULL);
628
629 /* If the rhs is appropriate for a memory, we may use a
630 COMPONENT_REF on the lhs. */
631 wi->val_only = !is_gimple_mem_rhs (GIMPLE_STMT_OPERAND (t, 1));
632 wi->is_lhs = true;
633 walk_tree (&GIMPLE_STMT_OPERAND (t, 0), wi->callback, wi, NULL);
634
635 wi->val_only = true;
636 wi->is_lhs = false;
637 break;
638
639 case ASM_EXPR:
640 walk_asm_expr (wi, *tp);
641 break;
642
643 default:
644 wi->val_only = true;
645 walk_tree (tp, wi->callback, wi, NULL);
646 break;
647 }
648 }
649
650 /* Invoke CALLBACK on all statements of *STMT_P. */
651
652 static void
653 walk_body (walk_tree_fn callback, struct nesting_info *info, tree *stmt_p)
654 {
655 struct walk_stmt_info wi;
656
657 memset (&wi, 0, sizeof (wi));
658 wi.callback = callback;
659 wi.info = info;
660 wi.val_only = true;
661
662 walk_stmts (&wi, stmt_p);
663 }
664
665 /* Invoke CALLBACK on all statements of INFO->CONTEXT. */
666
667 static inline void
668 walk_function (walk_tree_fn callback, struct nesting_info *info)
669 {
670 walk_body (callback, info, &DECL_SAVED_TREE (info->context));
671 }
672
673 /* Invoke CALLBACK on OMP_FOR init, cond, incr and pre-body. */
674
675 static void
676 walk_omp_for (walk_tree_fn callback, struct nesting_info *info, tree for_stmt)
677 {
678 struct walk_stmt_info wi;
679 tree t, list = NULL, empty;
680 int i;
681
682 walk_body (callback, info, &OMP_FOR_PRE_BODY (for_stmt));
683
684 empty = build_empty_stmt ();
685 append_to_statement_list_force (empty, &list);
686 memset (&wi, 0, sizeof (wi));
687 wi.callback = callback;
688 wi.info = info;
689 wi.tsi = tsi_last (list);
690
691 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
692 {
693 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
694 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
695 SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
696 wi.val_only = false;
697 walk_tree (&GIMPLE_STMT_OPERAND (t, 0), callback, &wi, NULL);
698 wi.val_only = true;
699 wi.is_lhs = false;
700 walk_tree (&GIMPLE_STMT_OPERAND (t, 1), callback, &wi, NULL);
701
702 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
703 gcc_assert (COMPARISON_CLASS_P (t));
704 SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
705 wi.val_only = false;
706 walk_tree (&TREE_OPERAND (t, 0), callback, &wi, NULL);
707 wi.val_only = true;
708 wi.is_lhs = false;
709 walk_tree (&TREE_OPERAND (t, 1), callback, &wi, NULL);
710
711 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
712 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
713 SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
714 wi.val_only = false;
715 walk_tree (&GIMPLE_STMT_OPERAND (t, 0), callback, &wi, NULL);
716 t = GIMPLE_STMT_OPERAND (t, 1);
717 gcc_assert (BINARY_CLASS_P (t));
718 wi.val_only = false;
719 walk_tree (&TREE_OPERAND (t, 0), callback, &wi, NULL);
720 wi.val_only = true;
721 wi.is_lhs = false;
722 walk_tree (&TREE_OPERAND (t, 1), callback, &wi, NULL);
723 }
724
725 /* Remove empty statement added above from the end of statement list. */
726 tsi_delink (&wi.tsi);
727 append_to_statement_list (list, &OMP_FOR_PRE_BODY (for_stmt));
728 }
729
730 /* Similarly for ROOT and all functions nested underneath, depth first. */
731
732 static void
733 walk_all_functions (walk_tree_fn callback, struct nesting_info *root)
734 {
735 do
736 {
737 if (root->inner)
738 walk_all_functions (callback, root->inner);
739 walk_function (callback, root);
740 root = root->next;
741 }
742 while (root);
743 }
744 \f
745 /* We have to check for a fairly pathological case. The operands of function
746 nested function are to be interpreted in the context of the enclosing
747 function. So if any are variably-sized, they will get remapped when the
748 enclosing function is inlined. But that remapping would also have to be
749 done in the types of the PARM_DECLs of the nested function, meaning the
750 argument types of that function will disagree with the arguments in the
751 calls to that function. So we'd either have to make a copy of the nested
752 function corresponding to each time the enclosing function was inlined or
753 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
754 function. The former is not practical. The latter would still require
755 detecting this case to know when to add the conversions. So, for now at
756 least, we don't inline such an enclosing function.
757
758 We have to do that check recursively, so here return indicating whether
759 FNDECL has such a nested function. ORIG_FN is the function we were
760 trying to inline to use for checking whether any argument is variably
761 modified by anything in it.
762
763 It would be better to do this in tree-inline.c so that we could give
764 the appropriate warning for why a function can't be inlined, but that's
765 too late since the nesting structure has already been flattened and
766 adding a flag just to record this fact seems a waste of a flag. */
767
768 static bool
769 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
770 {
771 struct cgraph_node *cgn = cgraph_node (fndecl);
772 tree arg;
773
774 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
775 {
776 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = TREE_CHAIN (arg))
777 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
778 return true;
779
780 if (check_for_nested_with_variably_modified (cgn->decl, orig_fndecl))
781 return true;
782 }
783
784 return false;
785 }
786
787 /* Construct our local datastructure describing the function nesting
788 tree rooted by CGN. */
789
790 static struct nesting_info *
791 create_nesting_tree (struct cgraph_node *cgn)
792 {
793 struct nesting_info *info = XCNEW (struct nesting_info);
794 info->field_map = pointer_map_create ();
795 info->var_map = pointer_map_create ();
796 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
797 info->context = cgn->decl;
798
799 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
800 {
801 struct nesting_info *sub = create_nesting_tree (cgn);
802 sub->outer = info;
803 sub->next = info->inner;
804 info->inner = sub;
805 }
806
807 /* See discussion at check_for_nested_with_variably_modified for a
808 discussion of why this has to be here. */
809 if (check_for_nested_with_variably_modified (info->context, info->context))
810 DECL_UNINLINABLE (info->context) = true;
811
812 return info;
813 }
814
815 /* Return an expression computing the static chain for TARGET_CONTEXT
816 from INFO->CONTEXT. Insert any necessary computations before TSI. */
817
818 static tree
819 get_static_chain (struct nesting_info *info, tree target_context,
820 tree_stmt_iterator *tsi)
821 {
822 struct nesting_info *i;
823 tree x;
824
825 if (info->context == target_context)
826 {
827 x = build_addr (info->frame_decl, target_context);
828 }
829 else
830 {
831 x = get_chain_decl (info);
832
833 for (i = info->outer; i->context != target_context; i = i->outer)
834 {
835 tree field = get_chain_field (i);
836
837 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
838 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
839 x = init_tmp_var (info, x, tsi);
840 }
841 }
842
843 return x;
844 }
845
846 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
847 frame as seen from INFO->CONTEXT. Insert any necessary computations
848 before TSI. */
849
850 static tree
851 get_frame_field (struct nesting_info *info, tree target_context,
852 tree field, tree_stmt_iterator *tsi)
853 {
854 struct nesting_info *i;
855 tree x;
856
857 if (info->context == target_context)
858 {
859 /* Make sure frame_decl gets created. */
860 (void) get_frame_type (info);
861 x = info->frame_decl;
862 }
863 else
864 {
865 x = get_chain_decl (info);
866
867 for (i = info->outer; i->context != target_context; i = i->outer)
868 {
869 tree field = get_chain_field (i);
870
871 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
872 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
873 x = init_tmp_var (info, x, tsi);
874 }
875
876 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
877 }
878
879 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
880 return x;
881 }
882
883 /* A subroutine of convert_nonlocal_reference. Create a local variable
884 in the nested function with DECL_VALUE_EXPR set to reference the true
885 variable in the parent function. This is used both for debug info
886 and in OpenMP lowering. */
887
888 static tree
889 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
890 {
891 tree target_context;
892 struct nesting_info *i;
893 tree x, field, new_decl;
894 void **slot;
895
896 slot = pointer_map_insert (info->var_map, decl);
897
898 if (*slot)
899 return (tree) *slot;
900
901 target_context = decl_function_context (decl);
902
903 /* A copy of the code in get_frame_field, but without the temporaries. */
904 if (info->context == target_context)
905 {
906 /* Make sure frame_decl gets created. */
907 (void) get_frame_type (info);
908 x = info->frame_decl;
909 i = info;
910 }
911 else
912 {
913 x = get_chain_decl (info);
914 for (i = info->outer; i->context != target_context; i = i->outer)
915 {
916 field = get_chain_field (i);
917 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
918 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
919 }
920 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
921 }
922
923 field = lookup_field_for_decl (i, decl, INSERT);
924 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
925 if (use_pointer_in_frame (decl))
926 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
927
928 /* ??? We should be remapping types as well, surely. */
929 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
930 DECL_CONTEXT (new_decl) = info->context;
931 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
932 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
933 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
934 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
935 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
936 TREE_READONLY (new_decl) = TREE_READONLY (decl);
937 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
938 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
939
940 SET_DECL_VALUE_EXPR (new_decl, x);
941 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
942
943 *slot = new_decl;
944 TREE_CHAIN (new_decl) = info->debug_var_chain;
945 info->debug_var_chain = new_decl;
946
947 return new_decl;
948 }
949
950 /* Called via walk_function+walk_tree, rewrite all references to VAR
951 and PARM_DECLs that belong to outer functions.
952
953 The rewrite will involve some number of structure accesses back up
954 the static chain. E.g. for a variable FOO up one nesting level it'll
955 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
956 indirections apply to decls for which use_pointer_in_frame is true. */
957
958 static bool convert_nonlocal_omp_clauses (tree *, struct walk_stmt_info *);
959
960 static tree
961 convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
962 {
963 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
964 struct nesting_info *const info = (struct nesting_info *) wi->info;
965 tree t = *tp;
966 tree save_local_var_chain;
967 bitmap save_suppress;
968
969 *walk_subtrees = 0;
970 switch (TREE_CODE (t))
971 {
972 case VAR_DECL:
973 /* Non-automatic variables are never processed. */
974 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
975 break;
976 /* FALLTHRU */
977
978 case PARM_DECL:
979 if (decl_function_context (t) != info->context)
980 {
981 tree x;
982 wi->changed = true;
983
984 x = get_nonlocal_debug_decl (info, t);
985 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
986 {
987 tree target_context = decl_function_context (t);
988 struct nesting_info *i;
989 for (i = info->outer; i->context != target_context; i = i->outer)
990 continue;
991 x = lookup_field_for_decl (i, t, INSERT);
992 x = get_frame_field (info, target_context, x, &wi->tsi);
993 if (use_pointer_in_frame (t))
994 {
995 x = init_tmp_var (info, x, &wi->tsi);
996 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
997 }
998 }
999
1000 if (wi->val_only)
1001 {
1002 if (wi->is_lhs)
1003 x = save_tmp_var (info, x, &wi->tsi);
1004 else
1005 x = init_tmp_var (info, x, &wi->tsi);
1006 }
1007
1008 *tp = x;
1009 }
1010 break;
1011
1012 case GOTO_EXPR:
1013 /* Don't walk non-local gotos for now. */
1014 if (TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL)
1015 {
1016 *walk_subtrees = 1;
1017 wi->val_only = true;
1018 wi->is_lhs = false;
1019 }
1020 break;
1021
1022 case LABEL_DECL:
1023 /* We're taking the address of a label from a parent function, but
1024 this is not itself a non-local goto. Mark the label such that it
1025 will not be deleted, much as we would with a label address in
1026 static storage. */
1027 if (decl_function_context (t) != info->context)
1028 FORCED_LABEL (t) = 1;
1029 break;
1030
1031 case ADDR_EXPR:
1032 {
1033 bool save_val_only = wi->val_only;
1034
1035 wi->val_only = false;
1036 wi->is_lhs = false;
1037 wi->changed = false;
1038 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference, wi, NULL);
1039 wi->val_only = true;
1040
1041 if (wi->changed)
1042 {
1043 tree save_context;
1044
1045 /* If we changed anything, we might no longer be directly
1046 referencing a decl. */
1047 save_context = current_function_decl;
1048 current_function_decl = info->context;
1049 recompute_tree_invariant_for_addr_expr (t);
1050 current_function_decl = save_context;
1051
1052 /* If the callback converted the address argument in a context
1053 where we only accept variables (and min_invariant, presumably),
1054 then compute the address into a temporary. */
1055 if (save_val_only)
1056 *tp = tsi_gimplify_val ((struct nesting_info *) wi->info,
1057 t, &wi->tsi);
1058 }
1059 }
1060 break;
1061
1062 case REALPART_EXPR:
1063 case IMAGPART_EXPR:
1064 case COMPONENT_REF:
1065 case ARRAY_REF:
1066 case ARRAY_RANGE_REF:
1067 case BIT_FIELD_REF:
1068 /* Go down this entire nest and just look at the final prefix and
1069 anything that describes the references. Otherwise, we lose track
1070 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1071 wi->val_only = true;
1072 wi->is_lhs = false;
1073 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1074 {
1075 if (TREE_CODE (t) == COMPONENT_REF)
1076 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1077 NULL);
1078 else if (TREE_CODE (t) == ARRAY_REF
1079 || TREE_CODE (t) == ARRAY_RANGE_REF)
1080 {
1081 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
1082 NULL);
1083 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1084 NULL);
1085 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference, wi,
1086 NULL);
1087 }
1088 else if (TREE_CODE (t) == BIT_FIELD_REF)
1089 {
1090 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
1091 NULL);
1092 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1093 NULL);
1094 }
1095 }
1096 wi->val_only = false;
1097 walk_tree (tp, convert_nonlocal_reference, wi, NULL);
1098 break;
1099
1100 case VIEW_CONVERT_EXPR:
1101 /* Just request to look at the subtrees, leaving val_only and lhs
1102 untouched. This might actually be for !val_only + lhs, in which
1103 case we don't want to force a replacement by a temporary. */
1104 *walk_subtrees = 1;
1105 break;
1106
1107 case OMP_PARALLEL:
1108 case OMP_TASK:
1109 save_suppress = info->suppress_expansion;
1110 if (convert_nonlocal_omp_clauses (&OMP_TASKREG_CLAUSES (t), wi))
1111 {
1112 tree c, decl;
1113 decl = get_chain_decl (info);
1114 c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
1115 OMP_CLAUSE_DECL (c) = decl;
1116 OMP_CLAUSE_CHAIN (c) = OMP_TASKREG_CLAUSES (t);
1117 OMP_TASKREG_CLAUSES (t) = c;
1118 }
1119
1120 save_local_var_chain = info->new_local_var_chain;
1121 info->new_local_var_chain = NULL;
1122
1123 walk_body (convert_nonlocal_reference, info, &OMP_TASKREG_BODY (t));
1124
1125 if (info->new_local_var_chain)
1126 declare_vars (info->new_local_var_chain, OMP_TASKREG_BODY (t), false);
1127 info->new_local_var_chain = save_local_var_chain;
1128 info->suppress_expansion = save_suppress;
1129 break;
1130
1131 case OMP_FOR:
1132 save_suppress = info->suppress_expansion;
1133 convert_nonlocal_omp_clauses (&OMP_FOR_CLAUSES (t), wi);
1134 walk_omp_for (convert_nonlocal_reference, info, t);
1135 walk_body (convert_nonlocal_reference, info, &OMP_FOR_BODY (t));
1136 info->suppress_expansion = save_suppress;
1137 break;
1138
1139 case OMP_SECTIONS:
1140 case OMP_SINGLE:
1141 save_suppress = info->suppress_expansion;
1142 convert_nonlocal_omp_clauses (&OMP_CLAUSES (t), wi);
1143 walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
1144 info->suppress_expansion = save_suppress;
1145 break;
1146
1147 case OMP_SECTION:
1148 case OMP_MASTER:
1149 case OMP_ORDERED:
1150 walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
1151 break;
1152
1153 default:
1154 if (!IS_TYPE_OR_DECL_P (t))
1155 {
1156 *walk_subtrees = 1;
1157 wi->val_only = true;
1158 wi->is_lhs = false;
1159 }
1160 break;
1161 }
1162
1163 return NULL_TREE;
1164 }
1165
1166 static bool
1167 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1168 {
1169 struct nesting_info *const info = (struct nesting_info *) wi->info;
1170 bool need_chain = false, need_stmts = false;
1171 tree clause, decl;
1172 int dummy;
1173 bitmap new_suppress;
1174
1175 new_suppress = BITMAP_GGC_ALLOC ();
1176 bitmap_copy (new_suppress, info->suppress_expansion);
1177
1178 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1179 {
1180 switch (OMP_CLAUSE_CODE (clause))
1181 {
1182 case OMP_CLAUSE_REDUCTION:
1183 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1184 need_stmts = true;
1185 goto do_decl_clause;
1186
1187 case OMP_CLAUSE_LASTPRIVATE:
1188 if (OMP_CLAUSE_LASTPRIVATE_STMT (clause))
1189 need_stmts = true;
1190 goto do_decl_clause;
1191
1192 case OMP_CLAUSE_PRIVATE:
1193 case OMP_CLAUSE_FIRSTPRIVATE:
1194 case OMP_CLAUSE_COPYPRIVATE:
1195 case OMP_CLAUSE_SHARED:
1196 do_decl_clause:
1197 decl = OMP_CLAUSE_DECL (clause);
1198 if (TREE_CODE (decl) == VAR_DECL
1199 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1200 break;
1201 if (decl_function_context (decl) != info->context)
1202 {
1203 bitmap_set_bit (new_suppress, DECL_UID (decl));
1204 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1205 need_chain = true;
1206 }
1207 break;
1208
1209 case OMP_CLAUSE_SCHEDULE:
1210 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1211 break;
1212 /* FALLTHRU */
1213 case OMP_CLAUSE_IF:
1214 case OMP_CLAUSE_NUM_THREADS:
1215 wi->val_only = true;
1216 wi->is_lhs = false;
1217 convert_nonlocal_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1218 wi);
1219 break;
1220
1221 case OMP_CLAUSE_NOWAIT:
1222 case OMP_CLAUSE_ORDERED:
1223 case OMP_CLAUSE_DEFAULT:
1224 case OMP_CLAUSE_COPYIN:
1225 case OMP_CLAUSE_COLLAPSE:
1226 case OMP_CLAUSE_UNTIED:
1227 break;
1228
1229 default:
1230 gcc_unreachable ();
1231 }
1232 }
1233
1234 info->suppress_expansion = new_suppress;
1235
1236 if (need_stmts)
1237 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1238 switch (OMP_CLAUSE_CODE (clause))
1239 {
1240 case OMP_CLAUSE_REDUCTION:
1241 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1242 {
1243 tree old_context
1244 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1245 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1246 = info->context;
1247 walk_body (convert_nonlocal_reference, info,
1248 &OMP_CLAUSE_REDUCTION_INIT (clause));
1249 walk_body (convert_nonlocal_reference, info,
1250 &OMP_CLAUSE_REDUCTION_MERGE (clause));
1251 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1252 = old_context;
1253 }
1254 break;
1255
1256 case OMP_CLAUSE_LASTPRIVATE:
1257 walk_body (convert_nonlocal_reference, info,
1258 &OMP_CLAUSE_LASTPRIVATE_STMT (clause));
1259 break;
1260
1261 default:
1262 break;
1263 }
1264
1265 return need_chain;
1266 }
1267
1268 /* A subroutine of convert_local_reference. Create a local variable
1269 in the parent function with DECL_VALUE_EXPR set to reference the
1270 field in FRAME. This is used both for debug info and in OpenMP
1271 lowering. */
1272
1273 static tree
1274 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1275 {
1276 tree x, new_decl;
1277 void **slot;
1278
1279 slot = pointer_map_insert (info->var_map, decl);
1280 if (*slot)
1281 return (tree) *slot;
1282
1283 /* Make sure frame_decl gets created. */
1284 (void) get_frame_type (info);
1285 x = info->frame_decl;
1286 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1287
1288 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1289 DECL_CONTEXT (new_decl) = info->context;
1290 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
1291 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1292 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1293 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1294 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1295 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1296 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1297 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1298
1299 SET_DECL_VALUE_EXPR (new_decl, x);
1300 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1301 *slot = new_decl;
1302
1303 TREE_CHAIN (new_decl) = info->debug_var_chain;
1304 info->debug_var_chain = new_decl;
1305
1306 /* Do not emit debug info twice. */
1307 DECL_IGNORED_P (decl) = 1;
1308
1309 return new_decl;
1310 }
1311
1312 /* Called via walk_function+walk_tree, rewrite all references to VAR
1313 and PARM_DECLs that were referenced by inner nested functions.
1314 The rewrite will be a structure reference to the local frame variable. */
1315
1316 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1317
1318 static tree
1319 convert_local_reference (tree *tp, int *walk_subtrees, void *data)
1320 {
1321 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1322 struct nesting_info *const info = (struct nesting_info *) wi->info;
1323 tree t = *tp, field, x;
1324 bool save_val_only;
1325 tree save_local_var_chain;
1326 bitmap save_suppress;
1327
1328 *walk_subtrees = 0;
1329 switch (TREE_CODE (t))
1330 {
1331 case VAR_DECL:
1332 /* Non-automatic variables are never processed. */
1333 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1334 break;
1335 /* FALLTHRU */
1336
1337 case PARM_DECL:
1338 if (decl_function_context (t) == info->context)
1339 {
1340 /* If we copied a pointer to the frame, then the original decl
1341 is used unchanged in the parent function. */
1342 if (use_pointer_in_frame (t))
1343 break;
1344
1345 /* No need to transform anything if no child references the
1346 variable. */
1347 field = lookup_field_for_decl (info, t, NO_INSERT);
1348 if (!field)
1349 break;
1350 wi->changed = true;
1351
1352 x = get_local_debug_decl (info, t, field);
1353 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1354 x = get_frame_field (info, info->context, field, &wi->tsi);
1355
1356 if (wi->val_only)
1357 {
1358 if (wi->is_lhs)
1359 x = save_tmp_var (info, x, &wi->tsi);
1360 else
1361 x = init_tmp_var (info, x, &wi->tsi);
1362 }
1363
1364 *tp = x;
1365 }
1366 break;
1367
1368 case ADDR_EXPR:
1369 save_val_only = wi->val_only;
1370 wi->val_only = false;
1371 wi->is_lhs = false;
1372 wi->changed = false;
1373 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference, wi, NULL);
1374 wi->val_only = save_val_only;
1375
1376 /* If we converted anything ... */
1377 if (wi->changed)
1378 {
1379 tree save_context;
1380
1381 /* Then the frame decl is now addressable. */
1382 TREE_ADDRESSABLE (info->frame_decl) = 1;
1383
1384 save_context = current_function_decl;
1385 current_function_decl = info->context;
1386 recompute_tree_invariant_for_addr_expr (t);
1387 current_function_decl = save_context;
1388
1389 /* If we are in a context where we only accept values, then
1390 compute the address into a temporary. */
1391 if (save_val_only)
1392 *tp = tsi_gimplify_val ((struct nesting_info *)wi->info, t, &wi->tsi);
1393 }
1394 break;
1395
1396 case REALPART_EXPR:
1397 case IMAGPART_EXPR:
1398 case COMPONENT_REF:
1399 case ARRAY_REF:
1400 case ARRAY_RANGE_REF:
1401 case BIT_FIELD_REF:
1402 /* Go down this entire nest and just look at the final prefix and
1403 anything that describes the references. Otherwise, we lose track
1404 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1405 save_val_only = wi->val_only;
1406 wi->val_only = true;
1407 wi->is_lhs = false;
1408 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1409 {
1410 if (TREE_CODE (t) == COMPONENT_REF)
1411 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1412 NULL);
1413 else if (TREE_CODE (t) == ARRAY_REF
1414 || TREE_CODE (t) == ARRAY_RANGE_REF)
1415 {
1416 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
1417 NULL);
1418 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1419 NULL);
1420 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference, wi,
1421 NULL);
1422 }
1423 else if (TREE_CODE (t) == BIT_FIELD_REF)
1424 {
1425 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
1426 NULL);
1427 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1428 NULL);
1429 }
1430 }
1431 wi->val_only = false;
1432 walk_tree (tp, convert_local_reference, wi, NULL);
1433 wi->val_only = save_val_only;
1434 break;
1435
1436 case VIEW_CONVERT_EXPR:
1437 /* Just request to look at the subtrees, leaving val_only and lhs
1438 untouched. This might actually be for !val_only + lhs, in which
1439 case we don't want to force a replacement by a temporary. */
1440 *walk_subtrees = 1;
1441 break;
1442
1443 case OMP_PARALLEL:
1444 case OMP_TASK:
1445 save_suppress = info->suppress_expansion;
1446 if (convert_local_omp_clauses (&OMP_TASKREG_CLAUSES (t), wi))
1447 {
1448 tree c;
1449 (void) get_frame_type (info);
1450 c = build_omp_clause (OMP_CLAUSE_SHARED);
1451 OMP_CLAUSE_DECL (c) = info->frame_decl;
1452 OMP_CLAUSE_CHAIN (c) = OMP_TASKREG_CLAUSES (t);
1453 OMP_TASKREG_CLAUSES (t) = c;
1454 }
1455
1456 save_local_var_chain = info->new_local_var_chain;
1457 info->new_local_var_chain = NULL;
1458
1459 walk_body (convert_local_reference, info, &OMP_TASKREG_BODY (t));
1460
1461 if (info->new_local_var_chain)
1462 declare_vars (info->new_local_var_chain, OMP_TASKREG_BODY (t), false);
1463 info->new_local_var_chain = save_local_var_chain;
1464 info->suppress_expansion = save_suppress;
1465 break;
1466
1467 case OMP_FOR:
1468 save_suppress = info->suppress_expansion;
1469 convert_local_omp_clauses (&OMP_FOR_CLAUSES (t), wi);
1470 walk_omp_for (convert_local_reference, info, t);
1471 walk_body (convert_local_reference, info, &OMP_FOR_BODY (t));
1472 info->suppress_expansion = save_suppress;
1473 break;
1474
1475 case OMP_SECTIONS:
1476 case OMP_SINGLE:
1477 save_suppress = info->suppress_expansion;
1478 convert_local_omp_clauses (&OMP_CLAUSES (t), wi);
1479 walk_body (convert_local_reference, info, &OMP_BODY (t));
1480 info->suppress_expansion = save_suppress;
1481 break;
1482
1483 case OMP_SECTION:
1484 case OMP_MASTER:
1485 case OMP_ORDERED:
1486 walk_body (convert_local_reference, info, &OMP_BODY (t));
1487 break;
1488
1489 default:
1490 if (!IS_TYPE_OR_DECL_P (t))
1491 {
1492 *walk_subtrees = 1;
1493 wi->val_only = true;
1494 wi->is_lhs = false;
1495 }
1496 break;
1497 }
1498
1499 return NULL_TREE;
1500 }
1501
1502 static bool
1503 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1504 {
1505 struct nesting_info *const info = (struct nesting_info *) wi->info;
1506 bool need_frame = false, need_stmts = false;
1507 tree clause, decl;
1508 int dummy;
1509 bitmap new_suppress;
1510
1511 new_suppress = BITMAP_GGC_ALLOC ();
1512 bitmap_copy (new_suppress, info->suppress_expansion);
1513
1514 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1515 {
1516 switch (OMP_CLAUSE_CODE (clause))
1517 {
1518 case OMP_CLAUSE_REDUCTION:
1519 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1520 need_stmts = true;
1521 goto do_decl_clause;
1522
1523 case OMP_CLAUSE_LASTPRIVATE:
1524 if (OMP_CLAUSE_LASTPRIVATE_STMT (clause))
1525 need_stmts = true;
1526 goto do_decl_clause;
1527
1528 case OMP_CLAUSE_PRIVATE:
1529 case OMP_CLAUSE_FIRSTPRIVATE:
1530 case OMP_CLAUSE_COPYPRIVATE:
1531 case OMP_CLAUSE_SHARED:
1532 do_decl_clause:
1533 decl = OMP_CLAUSE_DECL (clause);
1534 if (TREE_CODE (decl) == VAR_DECL
1535 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1536 break;
1537 if (decl_function_context (decl) == info->context
1538 && !use_pointer_in_frame (decl))
1539 {
1540 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1541 if (field)
1542 {
1543 bitmap_set_bit (new_suppress, DECL_UID (decl));
1544 OMP_CLAUSE_DECL (clause)
1545 = get_local_debug_decl (info, decl, field);
1546 need_frame = true;
1547 }
1548 }
1549 break;
1550
1551 case OMP_CLAUSE_SCHEDULE:
1552 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1553 break;
1554 /* FALLTHRU */
1555 case OMP_CLAUSE_IF:
1556 case OMP_CLAUSE_NUM_THREADS:
1557 wi->val_only = true;
1558 wi->is_lhs = false;
1559 convert_local_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy, wi);
1560 break;
1561
1562 case OMP_CLAUSE_NOWAIT:
1563 case OMP_CLAUSE_ORDERED:
1564 case OMP_CLAUSE_DEFAULT:
1565 case OMP_CLAUSE_COPYIN:
1566 case OMP_CLAUSE_COLLAPSE:
1567 case OMP_CLAUSE_UNTIED:
1568 break;
1569
1570 default:
1571 gcc_unreachable ();
1572 }
1573 }
1574
1575 info->suppress_expansion = new_suppress;
1576
1577 if (need_stmts)
1578 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1579 switch (OMP_CLAUSE_CODE (clause))
1580 {
1581 case OMP_CLAUSE_REDUCTION:
1582 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1583 {
1584 tree old_context
1585 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1586 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1587 = info->context;
1588 walk_body (convert_local_reference, info,
1589 &OMP_CLAUSE_REDUCTION_INIT (clause));
1590 walk_body (convert_local_reference, info,
1591 &OMP_CLAUSE_REDUCTION_MERGE (clause));
1592 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1593 = old_context;
1594 }
1595 break;
1596
1597 case OMP_CLAUSE_LASTPRIVATE:
1598 walk_body (convert_local_reference, info,
1599 &OMP_CLAUSE_LASTPRIVATE_STMT (clause));
1600 break;
1601
1602 default:
1603 break;
1604 }
1605
1606 return need_frame;
1607 }
1608
1609 /* Called via walk_function+walk_tree, rewrite all GOTO_EXPRs that
1610 reference labels from outer functions. The rewrite will be a
1611 call to __builtin_nonlocal_goto. */
1612
1613 static tree
1614 convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data)
1615 {
1616 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1617 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1618 tree t = *tp, label, new_label, target_context, x, field;
1619 void **slot;
1620
1621 *walk_subtrees = 0;
1622 if (TREE_CODE (t) != GOTO_EXPR)
1623 return NULL_TREE;
1624 label = GOTO_DESTINATION (t);
1625 if (TREE_CODE (label) != LABEL_DECL)
1626 return NULL_TREE;
1627 target_context = decl_function_context (label);
1628 if (target_context == info->context)
1629 return NULL_TREE;
1630
1631 for (i = info->outer; target_context != i->context; i = i->outer)
1632 continue;
1633
1634 /* The original user label may also be use for a normal goto, therefore
1635 we must create a new label that will actually receive the abnormal
1636 control transfer. This new label will be marked LABEL_NONLOCAL; this
1637 mark will trigger proper behavior in the cfg, as well as cause the
1638 (hairy target-specific) non-local goto receiver code to be generated
1639 when we expand rtl. Enter this association into var_map so that we
1640 can insert the new label into the IL during a second pass. */
1641 slot = pointer_map_insert (i->var_map, label);
1642 if (*slot == NULL)
1643 {
1644 new_label = create_artificial_label ();
1645 DECL_NONLOCAL (new_label) = 1;
1646 *slot = new_label;
1647 }
1648 else
1649 new_label = (tree) *slot;
1650
1651 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1652 field = get_nl_goto_field (i);
1653 x = get_frame_field (info, target_context, field, &wi->tsi);
1654 x = build_addr (x, target_context);
1655 x = tsi_gimplify_val (info, x, &wi->tsi);
1656 x = build_call_expr (implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO], 2,
1657 build_addr (new_label, target_context), x);
1658
1659 SET_EXPR_LOCUS (x, EXPR_LOCUS (tsi_stmt (wi->tsi)));
1660 *tsi_stmt_ptr (wi->tsi) = x;
1661
1662 return NULL_TREE;
1663 }
1664
1665 /* Called via walk_function+walk_tree, rewrite all LABEL_EXPRs that
1666 are referenced via nonlocal goto from a nested function. The rewrite
1667 will involve installing a newly generated DECL_NONLOCAL label, and
1668 (potentially) a branch around the rtl gunk that is assumed to be
1669 attached to such a label. */
1670
1671 static tree
1672 convert_nl_goto_receiver (tree *tp, int *walk_subtrees, void *data)
1673 {
1674 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1675 struct nesting_info *const info = (struct nesting_info *) wi->info;
1676 tree t = *tp, label, new_label, x;
1677 tree_stmt_iterator tmp_tsi;
1678 void **slot;
1679
1680 *walk_subtrees = 0;
1681 if (TREE_CODE (t) != LABEL_EXPR)
1682 return NULL_TREE;
1683 label = LABEL_EXPR_LABEL (t);
1684
1685 slot = pointer_map_contains (info->var_map, label);
1686 if (!slot)
1687 return NULL_TREE;
1688
1689 /* If there's any possibility that the previous statement falls through,
1690 then we must branch around the new non-local label. */
1691 tmp_tsi = wi->tsi;
1692 tsi_prev (&tmp_tsi);
1693 if (tsi_end_p (tmp_tsi) || block_may_fallthru (tsi_stmt (tmp_tsi)))
1694 {
1695 x = build1 (GOTO_EXPR, void_type_node, label);
1696 tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
1697 }
1698
1699 new_label = (tree) *slot;
1700 x = build1 (LABEL_EXPR, void_type_node, new_label);
1701 tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
1702
1703 return NULL_TREE;
1704 }
1705
1706 /* Called via walk_function+walk_tree, rewrite all references to addresses
1707 of nested functions that require the use of trampolines. The rewrite
1708 will involve a reference a trampoline generated for the occasion. */
1709
1710 static tree
1711 convert_tramp_reference (tree *tp, int *walk_subtrees, void *data)
1712 {
1713 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1714 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1715 tree t = *tp, decl, target_context, x;
1716
1717 *walk_subtrees = 0;
1718 switch (TREE_CODE (t))
1719 {
1720 case ADDR_EXPR:
1721 /* Build
1722 T.1 = &CHAIN->tramp;
1723 T.2 = __builtin_adjust_trampoline (T.1);
1724 T.3 = (func_type)T.2;
1725 */
1726
1727 decl = TREE_OPERAND (t, 0);
1728 if (TREE_CODE (decl) != FUNCTION_DECL)
1729 break;
1730
1731 /* Only need to process nested functions. */
1732 target_context = decl_function_context (decl);
1733 if (!target_context)
1734 break;
1735
1736 /* If the nested function doesn't use a static chain, then
1737 it doesn't need a trampoline. */
1738 if (DECL_NO_STATIC_CHAIN (decl))
1739 break;
1740
1741 /* If we don't want a trampoline, then don't build one. */
1742 if (TREE_NO_TRAMPOLINE (t))
1743 break;
1744
1745 /* Lookup the immediate parent of the callee, as that's where
1746 we need to insert the trampoline. */
1747 for (i = info; i->context != target_context; i = i->outer)
1748 continue;
1749 x = lookup_tramp_for_decl (i, decl, INSERT);
1750
1751 /* Compute the address of the field holding the trampoline. */
1752 x = get_frame_field (info, target_context, x, &wi->tsi);
1753 x = build_addr (x, target_context);
1754 x = tsi_gimplify_val (info, x, &wi->tsi);
1755
1756 /* Do machine-specific ugliness. Normally this will involve
1757 computing extra alignment, but it can really be anything. */
1758 x = build_call_expr (implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE],
1759 1, x);
1760 x = init_tmp_var (info, x, &wi->tsi);
1761
1762 /* Cast back to the proper function type. */
1763 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1764 x = init_tmp_var (info, x, &wi->tsi);
1765
1766 *tp = x;
1767 break;
1768
1769 case CALL_EXPR:
1770 /* Only walk call arguments, lest we generate trampolines for
1771 direct calls. */
1772 {
1773 int nargs = call_expr_nargs (t);
1774 int i;
1775 for (i = 0; i < nargs; i++)
1776 walk_tree (&CALL_EXPR_ARG (t, i), convert_tramp_reference, wi, NULL);
1777 }
1778 break;
1779
1780 default:
1781 if (!IS_TYPE_OR_DECL_P (t))
1782 *walk_subtrees = 1;
1783 break;
1784 }
1785
1786 return NULL_TREE;
1787 }
1788
1789 /* Called via walk_function+walk_tree, rewrite all CALL_EXPRs that
1790 reference nested functions to make sure that the static chain is
1791 set up properly for the call. */
1792
1793 static tree
1794 convert_call_expr (tree *tp, int *walk_subtrees, void *data)
1795 {
1796 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1797 struct nesting_info *const info = (struct nesting_info *) wi->info;
1798 tree t = *tp, decl, target_context;
1799 char save_static_chain_added;
1800 int i;
1801
1802 *walk_subtrees = 0;
1803 switch (TREE_CODE (t))
1804 {
1805 case CALL_EXPR:
1806 decl = get_callee_fndecl (t);
1807 if (!decl)
1808 break;
1809 target_context = decl_function_context (decl);
1810 if (target_context && !DECL_NO_STATIC_CHAIN (decl))
1811 {
1812 CALL_EXPR_STATIC_CHAIN (t)
1813 = get_static_chain (info, target_context, &wi->tsi);
1814 info->static_chain_added
1815 |= (1 << (info->context != target_context));
1816 }
1817 break;
1818
1819 case RETURN_EXPR:
1820 case GIMPLE_MODIFY_STMT:
1821 case WITH_SIZE_EXPR:
1822 /* Only return modify and with_size_expr may contain calls. */
1823 *walk_subtrees = 1;
1824 break;
1825
1826 case OMP_PARALLEL:
1827 case OMP_TASK:
1828 save_static_chain_added = info->static_chain_added;
1829 info->static_chain_added = 0;
1830 walk_body (convert_call_expr, info, &OMP_TASKREG_BODY (t));
1831 for (i = 0; i < 2; i++)
1832 {
1833 tree c, decl;
1834 if ((info->static_chain_added & (1 << i)) == 0)
1835 continue;
1836 decl = i ? get_chain_decl (info) : info->frame_decl;
1837 /* Don't add CHAIN.* or FRAME.* twice. */
1838 for (c = OMP_TASKREG_CLAUSES (t); c; c = OMP_CLAUSE_CHAIN (c))
1839 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1840 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
1841 && OMP_CLAUSE_DECL (c) == decl)
1842 break;
1843 if (c == NULL)
1844 {
1845 c = build_omp_clause (i ? OMP_CLAUSE_FIRSTPRIVATE
1846 : OMP_CLAUSE_SHARED);
1847 OMP_CLAUSE_DECL (c) = decl;
1848 OMP_CLAUSE_CHAIN (c) = OMP_TASKREG_CLAUSES (t);
1849 OMP_TASKREG_CLAUSES (t) = c;
1850 }
1851 }
1852 info->static_chain_added |= save_static_chain_added;
1853 break;
1854
1855 case OMP_FOR:
1856 walk_body (convert_call_expr, info, &OMP_FOR_PRE_BODY (t));
1857 /* FALLTHRU */
1858 case OMP_SECTIONS:
1859 case OMP_SECTION:
1860 case OMP_SINGLE:
1861 case OMP_MASTER:
1862 case OMP_ORDERED:
1863 case OMP_CRITICAL:
1864 walk_body (convert_call_expr, info, &OMP_BODY (t));
1865 break;
1866
1867 default:
1868 break;
1869 }
1870
1871 return NULL_TREE;
1872 }
1873
1874 /* Walk the nesting tree starting with ROOT, depth first. Convert all
1875 trampolines and call expressions. On the way back up, determine if
1876 a nested function actually uses its static chain; if not, remember that. */
1877
1878 static void
1879 convert_all_function_calls (struct nesting_info *root)
1880 {
1881 do
1882 {
1883 if (root->inner)
1884 convert_all_function_calls (root->inner);
1885
1886 walk_function (convert_tramp_reference, root);
1887 walk_function (convert_call_expr, root);
1888
1889 /* If the function does not use a static chain, then remember that. */
1890 if (root->outer && !root->chain_decl && !root->chain_field)
1891 DECL_NO_STATIC_CHAIN (root->context) = 1;
1892 else
1893 gcc_assert (!DECL_NO_STATIC_CHAIN (root->context));
1894
1895 root = root->next;
1896 }
1897 while (root);
1898 }
1899
1900 /* Do "everything else" to clean up or complete state collected by the
1901 various walking passes -- lay out the types and decls, generate code
1902 to initialize the frame decl, store critical expressions in the
1903 struct function for rtl to find. */
1904
1905 static void
1906 finalize_nesting_tree_1 (struct nesting_info *root)
1907 {
1908 tree stmt_list = NULL;
1909 tree context = root->context;
1910 struct function *sf;
1911
1912 /* If we created a non-local frame type or decl, we need to lay them
1913 out at this time. */
1914 if (root->frame_type)
1915 {
1916 /* In some cases the frame type will trigger the -Wpadded warning.
1917 This is not helpful; suppress it. */
1918 int save_warn_padded = warn_padded;
1919 warn_padded = 0;
1920 layout_type (root->frame_type);
1921 warn_padded = save_warn_padded;
1922 layout_decl (root->frame_decl, 0);
1923 }
1924
1925 /* If any parameters were referenced non-locally, then we need to
1926 insert a copy. Likewise, if any variables were referenced by
1927 pointer, we need to initialize the address. */
1928 if (root->any_parm_remapped)
1929 {
1930 tree p;
1931 for (p = DECL_ARGUMENTS (context); p ; p = TREE_CHAIN (p))
1932 {
1933 tree field, x, y;
1934
1935 field = lookup_field_for_decl (root, p, NO_INSERT);
1936 if (!field)
1937 continue;
1938
1939 if (use_pointer_in_frame (p))
1940 x = build_addr (p, context);
1941 else
1942 x = p;
1943
1944 y = build3 (COMPONENT_REF, TREE_TYPE (field),
1945 root->frame_decl, field, NULL_TREE);
1946 x = build_gimple_modify_stmt (y, x);
1947 append_to_statement_list (x, &stmt_list);
1948 }
1949 }
1950
1951 /* If a chain_field was created, then it needs to be initialized
1952 from chain_decl. */
1953 if (root->chain_field)
1954 {
1955 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
1956 root->frame_decl, root->chain_field, NULL_TREE);
1957 x = build_gimple_modify_stmt (x, get_chain_decl (root));
1958 append_to_statement_list (x, &stmt_list);
1959 }
1960
1961 /* If trampolines were created, then we need to initialize them. */
1962 if (root->any_tramp_created)
1963 {
1964 struct nesting_info *i;
1965 for (i = root->inner; i ; i = i->next)
1966 {
1967 tree arg1, arg2, arg3, x, field;
1968
1969 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
1970 if (!field)
1971 continue;
1972
1973 if (DECL_NO_STATIC_CHAIN (i->context))
1974 arg3 = null_pointer_node;
1975 else
1976 arg3 = build_addr (root->frame_decl, context);
1977
1978 arg2 = build_addr (i->context, context);
1979
1980 x = build3 (COMPONENT_REF, TREE_TYPE (field),
1981 root->frame_decl, field, NULL_TREE);
1982 arg1 = build_addr (x, context);
1983
1984 x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE];
1985 x = build_call_expr (x, 3, arg1, arg2, arg3);
1986 append_to_statement_list (x, &stmt_list);
1987 }
1988 }
1989
1990 /* If we created initialization statements, insert them. */
1991 if (stmt_list)
1992 {
1993 annotate_all_with_locus (&stmt_list,
1994 DECL_SOURCE_LOCATION (context));
1995 append_to_statement_list (BIND_EXPR_BODY (DECL_SAVED_TREE (context)),
1996 &stmt_list);
1997 BIND_EXPR_BODY (DECL_SAVED_TREE (context)) = stmt_list;
1998 }
1999
2000 /* If a chain_decl was created, then it needs to be registered with
2001 struct function so that it gets initialized from the static chain
2002 register at the beginning of the function. */
2003 sf = DECL_STRUCT_FUNCTION (root->context);
2004 sf->static_chain_decl = root->chain_decl;
2005
2006 /* Similarly for the non-local goto save area. */
2007 if (root->nl_goto_field)
2008 {
2009 sf->nonlocal_goto_save_area
2010 = get_frame_field (root, context, root->nl_goto_field, NULL);
2011 sf->has_nonlocal_label = 1;
2012 }
2013
2014 /* Make sure all new local variables get inserted into the
2015 proper BIND_EXPR. */
2016 if (root->new_local_var_chain)
2017 declare_vars (root->new_local_var_chain, DECL_SAVED_TREE (root->context),
2018 false);
2019 if (root->debug_var_chain)
2020 declare_vars (root->debug_var_chain, DECL_SAVED_TREE (root->context),
2021 true);
2022
2023 /* Dump the translated tree function. */
2024 dump_function (TDI_nested, root->context);
2025 }
2026
2027 static void
2028 finalize_nesting_tree (struct nesting_info *root)
2029 {
2030 do
2031 {
2032 if (root->inner)
2033 finalize_nesting_tree (root->inner);
2034 finalize_nesting_tree_1 (root);
2035 root = root->next;
2036 }
2037 while (root);
2038 }
2039
2040 /* Unnest the nodes and pass them to cgraph. */
2041
2042 static void
2043 unnest_nesting_tree_1 (struct nesting_info *root)
2044 {
2045 struct cgraph_node *node = cgraph_node (root->context);
2046
2047 /* For nested functions update the cgraph to reflect unnesting.
2048 We also delay finalizing of these functions up to this point. */
2049 if (node->origin)
2050 {
2051 cgraph_unnest_node (cgraph_node (root->context));
2052 cgraph_finalize_function (root->context, true);
2053 }
2054 }
2055
2056 static void
2057 unnest_nesting_tree (struct nesting_info *root)
2058 {
2059 do
2060 {
2061 if (root->inner)
2062 unnest_nesting_tree (root->inner);
2063 unnest_nesting_tree_1 (root);
2064 root = root->next;
2065 }
2066 while (root);
2067 }
2068
2069 /* Free the data structures allocated during this pass. */
2070
2071 static void
2072 free_nesting_tree (struct nesting_info *root)
2073 {
2074 struct nesting_info *next;
2075 do
2076 {
2077 if (root->inner)
2078 free_nesting_tree (root->inner);
2079 pointer_map_destroy (root->var_map);
2080 pointer_map_destroy (root->field_map);
2081 next = root->next;
2082 free (root);
2083 root = next;
2084 }
2085 while (root);
2086 }
2087
2088 /* Main entry point for this pass. Process FNDECL and all of its nested
2089 subroutines and turn them into something less tightly bound. */
2090
2091 void
2092 lower_nested_functions (tree fndecl)
2093 {
2094 struct cgraph_node *cgn;
2095 struct nesting_info *root;
2096
2097 /* If there are no nested functions, there's nothing to do. */
2098 cgn = cgraph_node (fndecl);
2099 if (!cgn->nested)
2100 return;
2101
2102 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2103 root = create_nesting_tree (cgn);
2104 walk_all_functions (convert_nonlocal_reference, root);
2105 walk_all_functions (convert_local_reference, root);
2106 walk_all_functions (convert_nl_goto_reference, root);
2107 walk_all_functions (convert_nl_goto_receiver, root);
2108 convert_all_function_calls (root);
2109 finalize_nesting_tree (root);
2110 unnest_nesting_tree (root);
2111 free_nesting_tree (root);
2112 bitmap_obstack_release (&nesting_info_bitmap_obstack);
2113 }
2114
2115 #include "gt-tree-nested.h"