Collections.java (UnmodifiableMap.toArray): Imported changes from Classpath.
[gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for trees.
2 Copyright (C) 2004, 2005, 2006 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "function.h"
29 #include "tree-dump.h"
30 #include "tree-inline.h"
31 #include "tree-gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-flow.h"
34 #include "cgraph.h"
35 #include "expr.h"
36 #include "langhooks.h"
37 #include "pointer-set.h"
38 #include "ggc.h"
39
40
41 /* The object of this pass is to lower the representation of a set of nested
42 functions in order to expose all of the gory details of the various
43 nonlocal references. We want to do this sooner rather than later, in
44 order to give us more freedom in emitting all of the functions in question.
45
46 Back in olden times, when gcc was young, we developed an insanely
47 complicated scheme whereby variables which were referenced nonlocally
48 were forced to live in the stack of the declaring function, and then
49 the nested functions magically discovered where these variables were
50 placed. In order for this scheme to function properly, it required
51 that the outer function be partially expanded, then we switch to
52 compiling the inner function, and once done with those we switch back
53 to compiling the outer function. Such delicate ordering requirements
54 makes it difficult to do whole translation unit optimizations
55 involving such functions.
56
57 The implementation here is much more direct. Everything that can be
58 referenced by an inner function is a member of an explicitly created
59 structure herein called the "nonlocal frame struct". The incoming
60 static chain for a nested function is a pointer to this struct in
61 the parent. In this way, we settle on known offsets from a known
62 base, and so are decoupled from the logic that places objects in the
63 function's stack frame. More importantly, we don't have to wait for
64 that to happen -- since the compilation of the inner function is no
65 longer tied to a real stack frame, the nonlocal frame struct can be
66 allocated anywhere. Which means that the outer function is now
67 inlinable.
68
69 Theory of operation here is very simple. Iterate over all the
70 statements in all the functions (depth first) several times,
71 allocating structures and fields on demand. In general we want to
72 examine inner functions first, so that we can avoid making changes
73 to outer functions which are unnecessary.
74
75 The order of the passes matters a bit, in that later passes will be
76 skipped if it is discovered that the functions don't actually interact
77 at all. That is, they're nested in the lexical sense but could have
78 been written as independent functions without change. */
79
80
81 struct nesting_info
82 {
83 struct nesting_info *outer;
84 struct nesting_info *inner;
85 struct nesting_info *next;
86
87 struct pointer_map_t *field_map;
88 struct pointer_map_t *var_map;
89 bitmap suppress_expansion;
90
91 tree context;
92 tree new_local_var_chain;
93 tree debug_var_chain;
94 tree frame_type;
95 tree frame_decl;
96 tree chain_field;
97 tree chain_decl;
98 tree nl_goto_field;
99
100 bool any_parm_remapped;
101 bool any_tramp_created;
102 char static_chain_added;
103 };
104
105
106 /* Obstack used for the bitmaps in the struct above. */
107 static struct bitmap_obstack nesting_info_bitmap_obstack;
108
109
110 /* We're working in so many different function contexts simultaneously,
111 that create_tmp_var is dangerous. Prevent mishap. */
112 #define create_tmp_var cant_use_create_tmp_var_here_dummy
113
114 /* Like create_tmp_var, except record the variable for registration at
115 the given nesting level. */
116
117 static tree
118 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
119 {
120 tree tmp_var;
121
122 /* If the type is of variable size or a type which must be created by the
123 frontend, something is wrong. Note that we explicitly allow
124 incomplete types here, since we create them ourselves here. */
125 gcc_assert (!TREE_ADDRESSABLE (type));
126 gcc_assert (!TYPE_SIZE_UNIT (type)
127 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
128
129 tmp_var = create_tmp_var_raw (type, prefix);
130 DECL_CONTEXT (tmp_var) = info->context;
131 TREE_CHAIN (tmp_var) = info->new_local_var_chain;
132 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
133 if (TREE_CODE (type) == COMPLEX_TYPE
134 || TREE_CODE (type) == VECTOR_TYPE)
135 DECL_GIMPLE_REG_P (tmp_var) = 1;
136
137 info->new_local_var_chain = tmp_var;
138
139 return tmp_var;
140 }
141
142 /* Take the address of EXP to be used within function CONTEXT.
143 Mark it for addressability as necessary. */
144
145 tree
146 build_addr (tree exp, tree context)
147 {
148 tree base = exp;
149 tree save_context;
150 tree retval;
151
152 while (handled_component_p (base))
153 base = TREE_OPERAND (base, 0);
154
155 if (DECL_P (base))
156 TREE_ADDRESSABLE (base) = 1;
157
158 /* Building the ADDR_EXPR will compute a set of properties for
159 that ADDR_EXPR. Those properties are unfortunately context
160 specific. ie, they are dependent on CURRENT_FUNCTION_DECL.
161
162 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
163 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
164 way the properties are for the ADDR_EXPR are computed properly. */
165 save_context = current_function_decl;
166 current_function_decl = context;
167 retval = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
168 current_function_decl = save_context;
169 return retval;
170 }
171
172 /* Insert FIELD into TYPE, sorted by alignment requirements. */
173
174 void
175 insert_field_into_struct (tree type, tree field)
176 {
177 tree *p;
178
179 DECL_CONTEXT (field) = type;
180
181 for (p = &TYPE_FIELDS (type); *p ; p = &TREE_CHAIN (*p))
182 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
183 break;
184
185 TREE_CHAIN (field) = *p;
186 *p = field;
187 }
188
189 /* Build or return the RECORD_TYPE that describes the frame state that is
190 shared between INFO->CONTEXT and its nested functions. This record will
191 not be complete until finalize_nesting_tree; up until that point we'll
192 be adding fields as necessary.
193
194 We also build the DECL that represents this frame in the function. */
195
196 static tree
197 get_frame_type (struct nesting_info *info)
198 {
199 tree type = info->frame_type;
200 if (!type)
201 {
202 char *name;
203
204 type = make_node (RECORD_TYPE);
205
206 name = concat ("FRAME.",
207 IDENTIFIER_POINTER (DECL_NAME (info->context)),
208 NULL);
209 TYPE_NAME (type) = get_identifier (name);
210 free (name);
211
212 info->frame_type = type;
213 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
214
215 /* ??? Always make it addressable for now, since it is meant to
216 be pointed to by the static chain pointer. This pessimizes
217 when it turns out that no static chains are needed because
218 the nested functions referencing non-local variables are not
219 reachable, but the true pessimization is to create the non-
220 local frame structure in the first place. */
221 TREE_ADDRESSABLE (info->frame_decl) = 1;
222 }
223 return type;
224 }
225
226 /* Return true if DECL should be referenced by pointer in the non-local
227 frame structure. */
228
229 static bool
230 use_pointer_in_frame (tree decl)
231 {
232 if (TREE_CODE (decl) == PARM_DECL)
233 {
234 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
235 sized decls, and inefficient to copy large aggregates. Don't bother
236 moving anything but scalar variables. */
237 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
238 }
239 else
240 {
241 /* Variable sized types make things "interesting" in the frame. */
242 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
243 }
244 }
245
246 /* Given DECL, a non-locally accessed variable, find or create a field
247 in the non-local frame structure for the given nesting context. */
248
249 static tree
250 lookup_field_for_decl (struct nesting_info *info, tree decl,
251 enum insert_option insert)
252 {
253 void **slot;
254
255 if (insert == NO_INSERT)
256 {
257 slot = pointer_map_contains (info->field_map, decl);
258 return slot ? *slot : NULL;
259 }
260
261 slot = pointer_map_insert (info->field_map, decl);
262 if (!*slot)
263 {
264 tree field = make_node (FIELD_DECL);
265 DECL_NAME (field) = DECL_NAME (decl);
266
267 if (use_pointer_in_frame (decl))
268 {
269 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
270 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
271 DECL_NONADDRESSABLE_P (field) = 1;
272 }
273 else
274 {
275 TREE_TYPE (field) = TREE_TYPE (decl);
276 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
277 DECL_ALIGN (field) = DECL_ALIGN (decl);
278 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
279 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
280 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
281 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
282 }
283
284 insert_field_into_struct (get_frame_type (info), field);
285 *slot = field;
286
287 if (TREE_CODE (decl) == PARM_DECL)
288 info->any_parm_remapped = true;
289 }
290
291 return *slot;
292 }
293
294 /* Build or return the variable that holds the static chain within
295 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
296
297 static tree
298 get_chain_decl (struct nesting_info *info)
299 {
300 tree decl = info->chain_decl;
301 if (!decl)
302 {
303 tree type;
304
305 type = get_frame_type (info->outer);
306 type = build_pointer_type (type);
307
308 /* Note that this variable is *not* entered into any BIND_EXPR;
309 the construction of this variable is handled specially in
310 expand_function_start and initialize_inlined_parameters.
311 Note also that it's represented as a parameter. This is more
312 close to the truth, since the initial value does come from
313 the caller. */
314 decl = build_decl (PARM_DECL, create_tmp_var_name ("CHAIN"), type);
315 DECL_ARTIFICIAL (decl) = 1;
316 DECL_IGNORED_P (decl) = 1;
317 TREE_USED (decl) = 1;
318 DECL_CONTEXT (decl) = info->context;
319 DECL_ARG_TYPE (decl) = type;
320
321 /* Tell tree-inline.c that we never write to this variable, so
322 it can copy-prop the replacement value immediately. */
323 TREE_READONLY (decl) = 1;
324
325 info->chain_decl = decl;
326 }
327 return decl;
328 }
329
330 /* Build or return the field within the non-local frame state that holds
331 the static chain for INFO->CONTEXT. This is the way to walk back up
332 multiple nesting levels. */
333
334 static tree
335 get_chain_field (struct nesting_info *info)
336 {
337 tree field = info->chain_field;
338 if (!field)
339 {
340 tree type = build_pointer_type (get_frame_type (info->outer));
341
342 field = make_node (FIELD_DECL);
343 DECL_NAME (field) = get_identifier ("__chain");
344 TREE_TYPE (field) = type;
345 DECL_ALIGN (field) = TYPE_ALIGN (type);
346 DECL_NONADDRESSABLE_P (field) = 1;
347
348 insert_field_into_struct (get_frame_type (info), field);
349
350 info->chain_field = field;
351 }
352 return field;
353 }
354
355 /* Copy EXP into a temporary. Allocate the temporary in the context of
356 INFO and insert the initialization statement before TSI. */
357
358 static tree
359 init_tmp_var (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
360 {
361 tree t, stmt;
362
363 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
364 stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (t), t, exp);
365 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
366 tsi_link_before (tsi, stmt, TSI_SAME_STMT);
367
368 return t;
369 }
370
371 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
372
373 static tree
374 tsi_gimplify_val (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
375 {
376 if (is_gimple_val (exp))
377 return exp;
378 else
379 return init_tmp_var (info, exp, tsi);
380 }
381
382 /* Similarly, but copy from the temporary and insert the statement
383 after the iterator. */
384
385 static tree
386 save_tmp_var (struct nesting_info *info, tree exp,
387 tree_stmt_iterator *tsi)
388 {
389 tree t, stmt;
390
391 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
392 stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (t), exp, t);
393 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
394 tsi_link_after (tsi, stmt, TSI_SAME_STMT);
395
396 return t;
397 }
398
399 /* Build or return the type used to represent a nested function trampoline. */
400
401 static GTY(()) tree trampoline_type;
402
403 static tree
404 get_trampoline_type (void)
405 {
406 tree record, t;
407 unsigned align, size;
408
409 if (trampoline_type)
410 return trampoline_type;
411
412 align = TRAMPOLINE_ALIGNMENT;
413 size = TRAMPOLINE_SIZE;
414
415 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
416 then allocate extra space so that we can do dynamic alignment. */
417 if (align > STACK_BOUNDARY)
418 {
419 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
420 align = STACK_BOUNDARY;
421 }
422
423 t = build_index_type (build_int_cst (NULL_TREE, size - 1));
424 t = build_array_type (char_type_node, t);
425 t = build_decl (FIELD_DECL, get_identifier ("__data"), t);
426 DECL_ALIGN (t) = align;
427 DECL_USER_ALIGN (t) = 1;
428
429 record = make_node (RECORD_TYPE);
430 TYPE_NAME (record) = get_identifier ("__builtin_trampoline");
431 TYPE_FIELDS (record) = t;
432 layout_type (record);
433
434 return record;
435 }
436
437 /* Given DECL, a nested function, find or create a field in the non-local
438 frame structure for a trampoline for this function. */
439
440 static tree
441 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
442 enum insert_option insert)
443 {
444 void **slot;
445
446 if (insert == NO_INSERT)
447 {
448 slot = pointer_map_contains (info->var_map, decl);
449 return slot ? *slot : NULL;
450 }
451
452 slot = pointer_map_insert (info->var_map, decl);
453 if (!*slot)
454 {
455 tree field = make_node (FIELD_DECL);
456 DECL_NAME (field) = DECL_NAME (decl);
457 TREE_TYPE (field) = get_trampoline_type ();
458 TREE_ADDRESSABLE (field) = 1;
459
460 insert_field_into_struct (get_frame_type (info), field);
461 *slot = field;
462
463 info->any_tramp_created = true;
464 }
465
466 return *slot;
467 }
468
469 /* Build or return the field within the non-local frame state that holds
470 the non-local goto "jmp_buf". The buffer itself is maintained by the
471 rtl middle-end as dynamic stack space is allocated. */
472
473 static tree
474 get_nl_goto_field (struct nesting_info *info)
475 {
476 tree field = info->nl_goto_field;
477 if (!field)
478 {
479 unsigned size;
480 tree type;
481
482 /* For __builtin_nonlocal_goto, we need N words. The first is the
483 frame pointer, the rest is for the target's stack pointer save
484 area. The number of words is controlled by STACK_SAVEAREA_MODE;
485 not the best interface, but it'll do for now. */
486 if (Pmode == ptr_mode)
487 type = ptr_type_node;
488 else
489 type = lang_hooks.types.type_for_mode (Pmode, 1);
490
491 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
492 size = size / GET_MODE_SIZE (Pmode);
493 size = size + 1;
494
495 type = build_array_type
496 (type, build_index_type (build_int_cst (NULL_TREE, size)));
497
498 field = make_node (FIELD_DECL);
499 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
500 TREE_TYPE (field) = type;
501 DECL_ALIGN (field) = TYPE_ALIGN (type);
502 TREE_ADDRESSABLE (field) = 1;
503
504 insert_field_into_struct (get_frame_type (info), field);
505
506 info->nl_goto_field = field;
507 }
508
509 return field;
510 }
511 \f
512 /* Helper function for walk_stmts. Walk output operands of an ASM_EXPR. */
513
514 static void
515 walk_asm_expr (struct walk_stmt_info *wi, tree stmt)
516 {
517 int noutputs = list_length (ASM_OUTPUTS (stmt));
518 const char **oconstraints
519 = (const char **) alloca ((noutputs) * sizeof (const char *));
520 int i;
521 tree link;
522 const char *constraint;
523 bool allows_mem, allows_reg, is_inout;
524
525 wi->is_lhs = true;
526 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
527 {
528 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
529 oconstraints[i] = constraint;
530 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
531 &allows_reg, &is_inout);
532
533 wi->val_only = (allows_reg || !allows_mem);
534 walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
535 }
536
537 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
538 {
539 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
540 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
541 oconstraints, &allows_mem, &allows_reg);
542
543 wi->val_only = (allows_reg || !allows_mem);
544 /* Although input "m" is not really a LHS, we need a lvalue. */
545 wi->is_lhs = !wi->val_only;
546 walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
547 }
548
549 wi->is_lhs = false;
550 wi->val_only = true;
551 }
552
553 /* Iterate over all sub-statements of *TP calling walk_tree with
554 WI->CALLBACK for every sub-expression in each statement found. */
555
556 void
557 walk_stmts (struct walk_stmt_info *wi, tree *tp)
558 {
559 tree t = *tp;
560 int walk_subtrees;
561
562 if (!t)
563 return;
564
565 if (wi->want_locations && EXPR_HAS_LOCATION (t))
566 input_location = EXPR_LOCATION (t);
567
568 switch (TREE_CODE (t))
569 {
570 case STATEMENT_LIST:
571 {
572 tree_stmt_iterator i;
573 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
574 {
575 wi->tsi = i;
576 walk_stmts (wi, tsi_stmt_ptr (i));
577 }
578 }
579 break;
580
581 case COND_EXPR:
582 walk_tree (&COND_EXPR_COND (t), wi->callback, wi, NULL);
583 walk_stmts (wi, &COND_EXPR_THEN (t));
584 walk_stmts (wi, &COND_EXPR_ELSE (t));
585 break;
586 case CATCH_EXPR:
587 walk_stmts (wi, &CATCH_BODY (t));
588 break;
589 case EH_FILTER_EXPR:
590 walk_stmts (wi, &EH_FILTER_FAILURE (t));
591 break;
592 case TRY_CATCH_EXPR:
593 case TRY_FINALLY_EXPR:
594 walk_stmts (wi, &TREE_OPERAND (t, 0));
595 walk_stmts (wi, &TREE_OPERAND (t, 1));
596 break;
597
598 case BIND_EXPR:
599 if (wi->want_bind_expr)
600 {
601 walk_subtrees = 1;
602 wi->callback (tp, &walk_subtrees, wi);
603 if (!walk_subtrees)
604 break;
605 }
606 walk_stmts (wi, &BIND_EXPR_BODY (t));
607 break;
608
609 case RETURN_EXPR:
610 if (wi->want_return_expr)
611 {
612 walk_subtrees = 1;
613 wi->callback (tp, &walk_subtrees, wi);
614 if (!walk_subtrees)
615 break;
616 }
617 walk_stmts (wi, &TREE_OPERAND (t, 0));
618 break;
619
620 case GIMPLE_MODIFY_STMT:
621 /* A formal temporary lhs may use a COMPONENT_REF rhs. */
622 wi->val_only = !is_gimple_formal_tmp_var (GIMPLE_STMT_OPERAND (t, 0));
623 walk_tree (&GIMPLE_STMT_OPERAND (t, 1), wi->callback, wi, NULL);
624
625 /* If the rhs is appropriate for a memory, we may use a
626 COMPONENT_REF on the lhs. */
627 wi->val_only = !is_gimple_mem_rhs (GIMPLE_STMT_OPERAND (t, 1));
628 wi->is_lhs = true;
629 walk_tree (&GIMPLE_STMT_OPERAND (t, 0), wi->callback, wi, NULL);
630
631 wi->val_only = true;
632 wi->is_lhs = false;
633 break;
634
635 case ASM_EXPR:
636 walk_asm_expr (wi, *tp);
637 break;
638
639 default:
640 wi->val_only = true;
641 walk_tree (tp, wi->callback, wi, NULL);
642 break;
643 }
644 }
645
646 /* Invoke CALLBACK on all statements of *STMT_P. */
647
648 static void
649 walk_body (walk_tree_fn callback, struct nesting_info *info, tree *stmt_p)
650 {
651 struct walk_stmt_info wi;
652
653 memset (&wi, 0, sizeof (wi));
654 wi.callback = callback;
655 wi.info = info;
656 wi.val_only = true;
657
658 walk_stmts (&wi, stmt_p);
659 }
660
661 /* Invoke CALLBACK on all statements of INFO->CONTEXT. */
662
663 static inline void
664 walk_function (walk_tree_fn callback, struct nesting_info *info)
665 {
666 walk_body (callback, info, &DECL_SAVED_TREE (info->context));
667 }
668
669 /* Similarly for ROOT and all functions nested underneath, depth first. */
670
671 static void
672 walk_all_functions (walk_tree_fn callback, struct nesting_info *root)
673 {
674 do
675 {
676 if (root->inner)
677 walk_all_functions (callback, root->inner);
678 walk_function (callback, root);
679 root = root->next;
680 }
681 while (root);
682 }
683 \f
684 /* We have to check for a fairly pathological case. The operands of function
685 nested function are to be interpreted in the context of the enclosing
686 function. So if any are variably-sized, they will get remapped when the
687 enclosing function is inlined. But that remapping would also have to be
688 done in the types of the PARM_DECLs of the nested function, meaning the
689 argument types of that function will disagree with the arguments in the
690 calls to that function. So we'd either have to make a copy of the nested
691 function corresponding to each time the enclosing function was inlined or
692 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
693 function. The former is not practical. The latter would still require
694 detecting this case to know when to add the conversions. So, for now at
695 least, we don't inline such an enclosing function.
696
697 We have to do that check recursively, so here return indicating whether
698 FNDECL has such a nested function. ORIG_FN is the function we were
699 trying to inline to use for checking whether any argument is variably
700 modified by anything in it.
701
702 It would be better to do this in tree-inline.c so that we could give
703 the appropriate warning for why a function can't be inlined, but that's
704 too late since the nesting structure has already been flattened and
705 adding a flag just to record this fact seems a waste of a flag. */
706
707 static bool
708 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
709 {
710 struct cgraph_node *cgn = cgraph_node (fndecl);
711 tree arg;
712
713 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
714 {
715 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = TREE_CHAIN (arg))
716 if (variably_modified_type_p (TREE_TYPE (arg), 0), orig_fndecl)
717 return true;
718
719 if (check_for_nested_with_variably_modified (cgn->decl, orig_fndecl))
720 return true;
721 }
722
723 return false;
724 }
725
726 /* Construct our local datastructure describing the function nesting
727 tree rooted by CGN. */
728
729 static struct nesting_info *
730 create_nesting_tree (struct cgraph_node *cgn)
731 {
732 struct nesting_info *info = XCNEW (struct nesting_info);
733 info->field_map = pointer_map_create ();
734 info->var_map = pointer_map_create ();
735 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
736 info->context = cgn->decl;
737
738 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
739 {
740 struct nesting_info *sub = create_nesting_tree (cgn);
741 sub->outer = info;
742 sub->next = info->inner;
743 info->inner = sub;
744 }
745
746 /* See discussion at check_for_nested_with_variably_modified for a
747 discussion of why this has to be here. */
748 if (check_for_nested_with_variably_modified (info->context, info->context))
749 DECL_UNINLINABLE (info->context) = true;
750
751 return info;
752 }
753
754 /* Return an expression computing the static chain for TARGET_CONTEXT
755 from INFO->CONTEXT. Insert any necessary computations before TSI. */
756
757 static tree
758 get_static_chain (struct nesting_info *info, tree target_context,
759 tree_stmt_iterator *tsi)
760 {
761 struct nesting_info *i;
762 tree x;
763
764 if (info->context == target_context)
765 {
766 x = build_addr (info->frame_decl, target_context);
767 }
768 else
769 {
770 x = get_chain_decl (info);
771
772 for (i = info->outer; i->context != target_context; i = i->outer)
773 {
774 tree field = get_chain_field (i);
775
776 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
777 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
778 x = init_tmp_var (info, x, tsi);
779 }
780 }
781
782 return x;
783 }
784
785 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
786 frame as seen from INFO->CONTEXT. Insert any necessary computations
787 before TSI. */
788
789 static tree
790 get_frame_field (struct nesting_info *info, tree target_context,
791 tree field, tree_stmt_iterator *tsi)
792 {
793 struct nesting_info *i;
794 tree x;
795
796 if (info->context == target_context)
797 {
798 /* Make sure frame_decl gets created. */
799 (void) get_frame_type (info);
800 x = info->frame_decl;
801 }
802 else
803 {
804 x = get_chain_decl (info);
805
806 for (i = info->outer; i->context != target_context; i = i->outer)
807 {
808 tree field = get_chain_field (i);
809
810 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
811 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
812 x = init_tmp_var (info, x, tsi);
813 }
814
815 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
816 }
817
818 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
819 return x;
820 }
821
822 /* A subroutine of convert_nonlocal_reference. Create a local variable
823 in the nested function with DECL_VALUE_EXPR set to reference the true
824 variable in the parent function. This is used both for debug info
825 and in OpenMP lowering. */
826
827 static tree
828 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
829 {
830 tree target_context;
831 struct nesting_info *i;
832 tree x, field, new_decl;
833 void **slot;
834
835 slot = pointer_map_insert (info->var_map, decl);
836
837 if (*slot)
838 return *slot;
839
840 target_context = decl_function_context (decl);
841
842 /* A copy of the code in get_frame_field, but without the temporaries. */
843 if (info->context == target_context)
844 {
845 /* Make sure frame_decl gets created. */
846 (void) get_frame_type (info);
847 x = info->frame_decl;
848 i = info;
849 }
850 else
851 {
852 x = get_chain_decl (info);
853 for (i = info->outer; i->context != target_context; i = i->outer)
854 {
855 field = get_chain_field (i);
856 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
857 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
858 }
859 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
860 }
861
862 field = lookup_field_for_decl (i, decl, INSERT);
863 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
864 if (use_pointer_in_frame (decl))
865 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
866
867 /* ??? We should be remapping types as well, surely. */
868 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
869 DECL_CONTEXT (new_decl) = info->context;
870 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
871 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
872 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
873 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
874 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
875 TREE_READONLY (new_decl) = TREE_READONLY (decl);
876 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
877 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
878
879 SET_DECL_VALUE_EXPR (new_decl, x);
880 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
881
882 *slot = new_decl;
883 TREE_CHAIN (new_decl) = info->debug_var_chain;
884 info->debug_var_chain = new_decl;
885
886 return new_decl;
887 }
888
889 /* Called via walk_function+walk_tree, rewrite all references to VAR
890 and PARM_DECLs that belong to outer functions.
891
892 The rewrite will involve some number of structure accesses back up
893 the static chain. E.g. for a variable FOO up one nesting level it'll
894 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
895 indirections apply to decls for which use_pointer_in_frame is true. */
896
897 static bool convert_nonlocal_omp_clauses (tree *, struct walk_stmt_info *);
898
899 static tree
900 convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
901 {
902 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
903 struct nesting_info *info = wi->info;
904 tree t = *tp;
905 tree save_local_var_chain;
906 bitmap save_suppress;
907
908 *walk_subtrees = 0;
909 switch (TREE_CODE (t))
910 {
911 case VAR_DECL:
912 /* Non-automatic variables are never processed. */
913 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
914 break;
915 /* FALLTHRU */
916
917 case PARM_DECL:
918 if (decl_function_context (t) != info->context)
919 {
920 tree x;
921 wi->changed = true;
922
923 x = get_nonlocal_debug_decl (info, t);
924 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
925 {
926 tree target_context = decl_function_context (t);
927 struct nesting_info *i;
928 for (i = info->outer; i->context != target_context; i = i->outer)
929 continue;
930 x = lookup_field_for_decl (i, t, INSERT);
931 x = get_frame_field (info, target_context, x, &wi->tsi);
932 if (use_pointer_in_frame (t))
933 {
934 x = init_tmp_var (info, x, &wi->tsi);
935 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
936 }
937 }
938
939 if (wi->val_only)
940 {
941 if (wi->is_lhs)
942 x = save_tmp_var (info, x, &wi->tsi);
943 else
944 x = init_tmp_var (info, x, &wi->tsi);
945 }
946
947 *tp = x;
948 }
949 break;
950
951 case GOTO_EXPR:
952 /* Don't walk non-local gotos for now. */
953 if (TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL)
954 {
955 *walk_subtrees = 1;
956 wi->val_only = true;
957 wi->is_lhs = false;
958 }
959 break;
960
961 case LABEL_DECL:
962 /* We're taking the address of a label from a parent function, but
963 this is not itself a non-local goto. Mark the label such that it
964 will not be deleted, much as we would with a label address in
965 static storage. */
966 if (decl_function_context (t) != info->context)
967 FORCED_LABEL (t) = 1;
968 break;
969
970 case ADDR_EXPR:
971 {
972 bool save_val_only = wi->val_only;
973
974 wi->val_only = false;
975 wi->is_lhs = false;
976 wi->changed = false;
977 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference, wi, NULL);
978 wi->val_only = true;
979
980 if (wi->changed)
981 {
982 tree save_context;
983
984 /* If we changed anything, then TREE_INVARIANT is be wrong,
985 since we're no longer directly referencing a decl. */
986 save_context = current_function_decl;
987 current_function_decl = info->context;
988 recompute_tree_invariant_for_addr_expr (t);
989 current_function_decl = save_context;
990
991 /* If the callback converted the address argument in a context
992 where we only accept variables (and min_invariant, presumably),
993 then compute the address into a temporary. */
994 if (save_val_only)
995 *tp = tsi_gimplify_val (wi->info, t, &wi->tsi);
996 }
997 }
998 break;
999
1000 case REALPART_EXPR:
1001 case IMAGPART_EXPR:
1002 case COMPONENT_REF:
1003 case ARRAY_REF:
1004 case ARRAY_RANGE_REF:
1005 case BIT_FIELD_REF:
1006 /* Go down this entire nest and just look at the final prefix and
1007 anything that describes the references. Otherwise, we lose track
1008 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1009 wi->val_only = true;
1010 wi->is_lhs = false;
1011 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1012 {
1013 if (TREE_CODE (t) == COMPONENT_REF)
1014 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1015 NULL);
1016 else if (TREE_CODE (t) == ARRAY_REF
1017 || TREE_CODE (t) == ARRAY_RANGE_REF)
1018 {
1019 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
1020 NULL);
1021 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1022 NULL);
1023 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference, wi,
1024 NULL);
1025 }
1026 else if (TREE_CODE (t) == BIT_FIELD_REF)
1027 {
1028 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
1029 NULL);
1030 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1031 NULL);
1032 }
1033 }
1034 wi->val_only = false;
1035 walk_tree (tp, convert_nonlocal_reference, wi, NULL);
1036 break;
1037
1038 case OMP_PARALLEL:
1039 save_suppress = info->suppress_expansion;
1040 if (convert_nonlocal_omp_clauses (&OMP_PARALLEL_CLAUSES (t), wi))
1041 {
1042 tree c, decl;
1043 decl = get_chain_decl (info);
1044 c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
1045 OMP_CLAUSE_DECL (c) = decl;
1046 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1047 OMP_PARALLEL_CLAUSES (t) = c;
1048 }
1049
1050 save_local_var_chain = info->new_local_var_chain;
1051 info->new_local_var_chain = NULL;
1052
1053 walk_body (convert_nonlocal_reference, info, &OMP_PARALLEL_BODY (t));
1054
1055 if (info->new_local_var_chain)
1056 declare_vars (info->new_local_var_chain, OMP_PARALLEL_BODY (t), false);
1057 info->new_local_var_chain = save_local_var_chain;
1058 info->suppress_expansion = save_suppress;
1059 break;
1060
1061 case OMP_FOR:
1062 case OMP_SECTIONS:
1063 case OMP_SINGLE:
1064 save_suppress = info->suppress_expansion;
1065 convert_nonlocal_omp_clauses (&OMP_CLAUSES (t), wi);
1066 walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
1067 info->suppress_expansion = save_suppress;
1068 break;
1069
1070 case OMP_SECTION:
1071 case OMP_MASTER:
1072 case OMP_ORDERED:
1073 walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
1074 break;
1075
1076 default:
1077 if (!IS_TYPE_OR_DECL_P (t))
1078 {
1079 *walk_subtrees = 1;
1080 wi->val_only = true;
1081 wi->is_lhs = false;
1082 }
1083 break;
1084 }
1085
1086 return NULL_TREE;
1087 }
1088
1089 static bool
1090 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1091 {
1092 struct nesting_info *info = wi->info;
1093 bool need_chain = false;
1094 tree clause, decl;
1095 int dummy;
1096 bitmap new_suppress;
1097
1098 new_suppress = BITMAP_GGC_ALLOC ();
1099 bitmap_copy (new_suppress, info->suppress_expansion);
1100
1101 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1102 {
1103 switch (OMP_CLAUSE_CODE (clause))
1104 {
1105 case OMP_CLAUSE_PRIVATE:
1106 case OMP_CLAUSE_FIRSTPRIVATE:
1107 case OMP_CLAUSE_LASTPRIVATE:
1108 case OMP_CLAUSE_REDUCTION:
1109 case OMP_CLAUSE_COPYPRIVATE:
1110 case OMP_CLAUSE_SHARED:
1111 decl = OMP_CLAUSE_DECL (clause);
1112 if (decl_function_context (decl) != info->context)
1113 {
1114 bitmap_set_bit (new_suppress, DECL_UID (decl));
1115 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1116 need_chain = true;
1117 }
1118 break;
1119
1120 case OMP_CLAUSE_SCHEDULE:
1121 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1122 break;
1123 /* FALLTHRU */
1124 case OMP_CLAUSE_IF:
1125 case OMP_CLAUSE_NUM_THREADS:
1126 wi->val_only = true;
1127 wi->is_lhs = false;
1128 convert_nonlocal_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1129 wi);
1130 break;
1131
1132 case OMP_CLAUSE_NOWAIT:
1133 case OMP_CLAUSE_ORDERED:
1134 case OMP_CLAUSE_DEFAULT:
1135 case OMP_CLAUSE_COPYIN:
1136 break;
1137
1138 default:
1139 gcc_unreachable ();
1140 }
1141 }
1142
1143 info->suppress_expansion = new_suppress;
1144
1145 return need_chain;
1146 }
1147
1148 /* A subroutine of convert_local_reference. Create a local variable
1149 in the parent function with DECL_VALUE_EXPR set to reference the
1150 field in FRAME. This is used both for debug info and in OpenMP
1151 lowering. */
1152
1153 static tree
1154 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1155 {
1156 tree x, new_decl;
1157 void **slot;
1158
1159 slot = pointer_map_insert (info->var_map, decl);
1160 if (*slot)
1161 return *slot;
1162
1163 /* Make sure frame_decl gets created. */
1164 (void) get_frame_type (info);
1165 x = info->frame_decl;
1166 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1167
1168 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1169 DECL_CONTEXT (new_decl) = info->context;
1170 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
1171 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1172 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1173 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1174 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1175 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1176 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1177 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1178
1179 SET_DECL_VALUE_EXPR (new_decl, x);
1180 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1181 *slot = new_decl;
1182
1183 TREE_CHAIN (new_decl) = info->debug_var_chain;
1184 info->debug_var_chain = new_decl;
1185
1186 /* Do not emit debug info twice. */
1187 DECL_IGNORED_P (decl) = 1;
1188
1189 return new_decl;
1190 }
1191
1192 /* Called via walk_function+walk_tree, rewrite all references to VAR
1193 and PARM_DECLs that were referenced by inner nested functions.
1194 The rewrite will be a structure reference to the local frame variable. */
1195
1196 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1197
1198 static tree
1199 convert_local_reference (tree *tp, int *walk_subtrees, void *data)
1200 {
1201 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1202 struct nesting_info *info = wi->info;
1203 tree t = *tp, field, x;
1204 bool save_val_only;
1205 tree save_local_var_chain;
1206 bitmap save_suppress;
1207
1208 *walk_subtrees = 0;
1209 switch (TREE_CODE (t))
1210 {
1211 case VAR_DECL:
1212 /* Non-automatic variables are never processed. */
1213 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1214 break;
1215 /* FALLTHRU */
1216
1217 case PARM_DECL:
1218 if (decl_function_context (t) == info->context)
1219 {
1220 /* If we copied a pointer to the frame, then the original decl
1221 is used unchanged in the parent function. */
1222 if (use_pointer_in_frame (t))
1223 break;
1224
1225 /* No need to transform anything if no child references the
1226 variable. */
1227 field = lookup_field_for_decl (info, t, NO_INSERT);
1228 if (!field)
1229 break;
1230 wi->changed = true;
1231
1232 x = get_local_debug_decl (info, t, field);
1233 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1234 x = get_frame_field (info, info->context, field, &wi->tsi);
1235
1236 if (wi->val_only)
1237 {
1238 if (wi->is_lhs)
1239 x = save_tmp_var (info, x, &wi->tsi);
1240 else
1241 x = init_tmp_var (info, x, &wi->tsi);
1242 }
1243
1244 *tp = x;
1245 }
1246 break;
1247
1248 case ADDR_EXPR:
1249 save_val_only = wi->val_only;
1250 wi->val_only = false;
1251 wi->is_lhs = false;
1252 wi->changed = false;
1253 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference, wi, NULL);
1254 wi->val_only = save_val_only;
1255
1256 /* If we converted anything ... */
1257 if (wi->changed)
1258 {
1259 tree save_context;
1260
1261 /* Then the frame decl is now addressable. */
1262 TREE_ADDRESSABLE (info->frame_decl) = 1;
1263
1264 save_context = current_function_decl;
1265 current_function_decl = info->context;
1266 recompute_tree_invariant_for_addr_expr (t);
1267 current_function_decl = save_context;
1268
1269 /* If we are in a context where we only accept values, then
1270 compute the address into a temporary. */
1271 if (save_val_only)
1272 *tp = tsi_gimplify_val (wi->info, t, &wi->tsi);
1273 }
1274 break;
1275
1276 case REALPART_EXPR:
1277 case IMAGPART_EXPR:
1278 case COMPONENT_REF:
1279 case ARRAY_REF:
1280 case ARRAY_RANGE_REF:
1281 case BIT_FIELD_REF:
1282 /* Go down this entire nest and just look at the final prefix and
1283 anything that describes the references. Otherwise, we lose track
1284 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1285 save_val_only = wi->val_only;
1286 wi->val_only = true;
1287 wi->is_lhs = false;
1288 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1289 {
1290 if (TREE_CODE (t) == COMPONENT_REF)
1291 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1292 NULL);
1293 else if (TREE_CODE (t) == ARRAY_REF
1294 || TREE_CODE (t) == ARRAY_RANGE_REF)
1295 {
1296 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
1297 NULL);
1298 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1299 NULL);
1300 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference, wi,
1301 NULL);
1302 }
1303 else if (TREE_CODE (t) == BIT_FIELD_REF)
1304 {
1305 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
1306 NULL);
1307 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1308 NULL);
1309 }
1310 }
1311 wi->val_only = false;
1312 walk_tree (tp, convert_local_reference, wi, NULL);
1313 wi->val_only = save_val_only;
1314 break;
1315
1316 case OMP_PARALLEL:
1317 save_suppress = info->suppress_expansion;
1318 if (convert_local_omp_clauses (&OMP_PARALLEL_CLAUSES (t), wi))
1319 {
1320 tree c;
1321 (void) get_frame_type (info);
1322 c = build_omp_clause (OMP_CLAUSE_SHARED);
1323 OMP_CLAUSE_DECL (c) = info->frame_decl;
1324 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1325 OMP_PARALLEL_CLAUSES (t) = c;
1326 }
1327
1328 save_local_var_chain = info->new_local_var_chain;
1329 info->new_local_var_chain = NULL;
1330
1331 walk_body (convert_local_reference, info, &OMP_PARALLEL_BODY (t));
1332
1333 if (info->new_local_var_chain)
1334 declare_vars (info->new_local_var_chain, OMP_PARALLEL_BODY (t), false);
1335 info->new_local_var_chain = save_local_var_chain;
1336 info->suppress_expansion = save_suppress;
1337 break;
1338
1339 case OMP_FOR:
1340 case OMP_SECTIONS:
1341 case OMP_SINGLE:
1342 save_suppress = info->suppress_expansion;
1343 convert_local_omp_clauses (&OMP_CLAUSES (t), wi);
1344 walk_body (convert_local_reference, info, &OMP_BODY (t));
1345 info->suppress_expansion = save_suppress;
1346 break;
1347
1348 case OMP_SECTION:
1349 case OMP_MASTER:
1350 case OMP_ORDERED:
1351 walk_body (convert_local_reference, info, &OMP_BODY (t));
1352 break;
1353
1354 default:
1355 if (!IS_TYPE_OR_DECL_P (t))
1356 {
1357 *walk_subtrees = 1;
1358 wi->val_only = true;
1359 wi->is_lhs = false;
1360 }
1361 break;
1362 }
1363
1364 return NULL_TREE;
1365 }
1366
1367 static bool
1368 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1369 {
1370 struct nesting_info *info = wi->info;
1371 bool need_frame = false;
1372 tree clause, decl;
1373 int dummy;
1374 bitmap new_suppress;
1375
1376 new_suppress = BITMAP_GGC_ALLOC ();
1377 bitmap_copy (new_suppress, info->suppress_expansion);
1378
1379 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1380 {
1381 switch (OMP_CLAUSE_CODE (clause))
1382 {
1383 case OMP_CLAUSE_PRIVATE:
1384 case OMP_CLAUSE_FIRSTPRIVATE:
1385 case OMP_CLAUSE_LASTPRIVATE:
1386 case OMP_CLAUSE_REDUCTION:
1387 case OMP_CLAUSE_COPYPRIVATE:
1388 case OMP_CLAUSE_SHARED:
1389 decl = OMP_CLAUSE_DECL (clause);
1390 if (decl_function_context (decl) == info->context
1391 && !use_pointer_in_frame (decl))
1392 {
1393 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1394 if (field)
1395 {
1396 bitmap_set_bit (new_suppress, DECL_UID (decl));
1397 OMP_CLAUSE_DECL (clause)
1398 = get_local_debug_decl (info, decl, field);
1399 need_frame = true;
1400 }
1401 }
1402 break;
1403
1404 case OMP_CLAUSE_SCHEDULE:
1405 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1406 break;
1407 /* FALLTHRU */
1408 case OMP_CLAUSE_IF:
1409 case OMP_CLAUSE_NUM_THREADS:
1410 wi->val_only = true;
1411 wi->is_lhs = false;
1412 convert_local_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy, wi);
1413 break;
1414
1415 case OMP_CLAUSE_NOWAIT:
1416 case OMP_CLAUSE_ORDERED:
1417 case OMP_CLAUSE_DEFAULT:
1418 case OMP_CLAUSE_COPYIN:
1419 break;
1420
1421 default:
1422 gcc_unreachable ();
1423 }
1424 }
1425
1426 info->suppress_expansion = new_suppress;
1427
1428 return need_frame;
1429 }
1430
1431 /* Called via walk_function+walk_tree, rewrite all GOTO_EXPRs that
1432 reference labels from outer functions. The rewrite will be a
1433 call to __builtin_nonlocal_goto. */
1434
1435 static tree
1436 convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data)
1437 {
1438 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1439 struct nesting_info *info = wi->info, *i;
1440 tree t = *tp, label, new_label, target_context, x, arg, field;
1441 void **slot;
1442
1443 *walk_subtrees = 0;
1444 if (TREE_CODE (t) != GOTO_EXPR)
1445 return NULL_TREE;
1446 label = GOTO_DESTINATION (t);
1447 if (TREE_CODE (label) != LABEL_DECL)
1448 return NULL_TREE;
1449 target_context = decl_function_context (label);
1450 if (target_context == info->context)
1451 return NULL_TREE;
1452
1453 for (i = info->outer; target_context != i->context; i = i->outer)
1454 continue;
1455
1456 /* The original user label may also be use for a normal goto, therefore
1457 we must create a new label that will actually receive the abnormal
1458 control transfer. This new label will be marked LABEL_NONLOCAL; this
1459 mark will trigger proper behavior in the cfg, as well as cause the
1460 (hairy target-specific) non-local goto receiver code to be generated
1461 when we expand rtl. Enter this association into var_map so that we
1462 can insert the new label into the IL during a second pass. */
1463 slot = pointer_map_insert (i->var_map, label);
1464 if (*slot == NULL)
1465 {
1466 new_label = create_artificial_label ();
1467 DECL_NONLOCAL (new_label) = 1;
1468 *slot = new_label;
1469 }
1470 else
1471 new_label = *slot;
1472
1473 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1474 field = get_nl_goto_field (i);
1475 x = get_frame_field (info, target_context, field, &wi->tsi);
1476 x = build_addr (x, target_context);
1477 x = tsi_gimplify_val (info, x, &wi->tsi);
1478 arg = tree_cons (NULL, x, NULL);
1479 x = build_addr (new_label, target_context);
1480 arg = tree_cons (NULL, x, arg);
1481 x = implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO];
1482 x = build_function_call_expr (x, arg);
1483
1484 SET_EXPR_LOCUS (x, EXPR_LOCUS (tsi_stmt (wi->tsi)));
1485 *tsi_stmt_ptr (wi->tsi) = x;
1486
1487 return NULL_TREE;
1488 }
1489
1490 /* Called via walk_function+walk_tree, rewrite all LABEL_EXPRs that
1491 are referenced via nonlocal goto from a nested function. The rewrite
1492 will involve installing a newly generated DECL_NONLOCAL label, and
1493 (potentially) a branch around the rtl gunk that is assumed to be
1494 attached to such a label. */
1495
1496 static tree
1497 convert_nl_goto_receiver (tree *tp, int *walk_subtrees, void *data)
1498 {
1499 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1500 struct nesting_info *info = wi->info;
1501 tree t = *tp, label, new_label, x;
1502 tree_stmt_iterator tmp_tsi;
1503 void **slot;
1504
1505 *walk_subtrees = 0;
1506 if (TREE_CODE (t) != LABEL_EXPR)
1507 return NULL_TREE;
1508 label = LABEL_EXPR_LABEL (t);
1509
1510 slot = pointer_map_contains (info->var_map, label);
1511 if (!slot)
1512 return NULL_TREE;
1513
1514 /* If there's any possibility that the previous statement falls through,
1515 then we must branch around the new non-local label. */
1516 tmp_tsi = wi->tsi;
1517 tsi_prev (&tmp_tsi);
1518 if (tsi_end_p (tmp_tsi) || block_may_fallthru (tsi_stmt (tmp_tsi)))
1519 {
1520 x = build1 (GOTO_EXPR, void_type_node, label);
1521 tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
1522 }
1523
1524 new_label = (tree) *slot;
1525 x = build1 (LABEL_EXPR, void_type_node, new_label);
1526 tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
1527
1528 return NULL_TREE;
1529 }
1530
1531 /* Called via walk_function+walk_tree, rewrite all references to addresses
1532 of nested functions that require the use of trampolines. The rewrite
1533 will involve a reference a trampoline generated for the occasion. */
1534
1535 static tree
1536 convert_tramp_reference (tree *tp, int *walk_subtrees, void *data)
1537 {
1538 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1539 struct nesting_info *info = wi->info, *i;
1540 tree t = *tp, decl, target_context, x, arg;
1541
1542 *walk_subtrees = 0;
1543 switch (TREE_CODE (t))
1544 {
1545 case ADDR_EXPR:
1546 /* Build
1547 T.1 = &CHAIN->tramp;
1548 T.2 = __builtin_adjust_trampoline (T.1);
1549 T.3 = (func_type)T.2;
1550 */
1551
1552 decl = TREE_OPERAND (t, 0);
1553 if (TREE_CODE (decl) != FUNCTION_DECL)
1554 break;
1555
1556 /* Only need to process nested functions. */
1557 target_context = decl_function_context (decl);
1558 if (!target_context)
1559 break;
1560
1561 /* If the nested function doesn't use a static chain, then
1562 it doesn't need a trampoline. */
1563 if (DECL_NO_STATIC_CHAIN (decl))
1564 break;
1565
1566 /* Lookup the immediate parent of the callee, as that's where
1567 we need to insert the trampoline. */
1568 for (i = info; i->context != target_context; i = i->outer)
1569 continue;
1570 x = lookup_tramp_for_decl (i, decl, INSERT);
1571
1572 /* Compute the address of the field holding the trampoline. */
1573 x = get_frame_field (info, target_context, x, &wi->tsi);
1574 x = build_addr (x, target_context);
1575 x = tsi_gimplify_val (info, x, &wi->tsi);
1576 arg = tree_cons (NULL, x, NULL);
1577
1578 /* Do machine-specific ugliness. Normally this will involve
1579 computing extra alignment, but it can really be anything. */
1580 x = implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE];
1581 x = build_function_call_expr (x, arg);
1582 x = init_tmp_var (info, x, &wi->tsi);
1583
1584 /* Cast back to the proper function type. */
1585 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1586 x = init_tmp_var (info, x, &wi->tsi);
1587
1588 *tp = x;
1589 break;
1590
1591 case CALL_EXPR:
1592 /* Only walk call arguments, lest we generate trampolines for
1593 direct calls. */
1594 walk_tree (&TREE_OPERAND (t, 1), convert_tramp_reference, wi, NULL);
1595 break;
1596
1597 default:
1598 if (!IS_TYPE_OR_DECL_P (t))
1599 *walk_subtrees = 1;
1600 break;
1601 }
1602
1603 return NULL_TREE;
1604 }
1605
1606 /* Called via walk_function+walk_tree, rewrite all CALL_EXPRs that
1607 reference nested functions to make sure that the static chain is
1608 set up properly for the call. */
1609
1610 static tree
1611 convert_call_expr (tree *tp, int *walk_subtrees, void *data)
1612 {
1613 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1614 struct nesting_info *info = wi->info;
1615 tree t = *tp, decl, target_context;
1616 char save_static_chain_added;
1617 int i;
1618
1619 *walk_subtrees = 0;
1620 switch (TREE_CODE (t))
1621 {
1622 case CALL_EXPR:
1623 decl = get_callee_fndecl (t);
1624 if (!decl)
1625 break;
1626 target_context = decl_function_context (decl);
1627 if (target_context && !DECL_NO_STATIC_CHAIN (decl))
1628 {
1629 TREE_OPERAND (t, 2)
1630 = get_static_chain (info, target_context, &wi->tsi);
1631 info->static_chain_added
1632 |= (1 << (info->context != target_context));
1633 }
1634 break;
1635
1636 case RETURN_EXPR:
1637 case GIMPLE_MODIFY_STMT:
1638 case WITH_SIZE_EXPR:
1639 /* Only return modify and with_size_expr may contain calls. */
1640 *walk_subtrees = 1;
1641 break;
1642
1643 case OMP_PARALLEL:
1644 save_static_chain_added = info->static_chain_added;
1645 info->static_chain_added = 0;
1646 walk_body (convert_call_expr, info, &OMP_PARALLEL_BODY (t));
1647 for (i = 0; i < 2; i++)
1648 {
1649 tree c, decl;
1650 if ((info->static_chain_added & (1 << i)) == 0)
1651 continue;
1652 decl = i ? get_chain_decl (info) : info->frame_decl;
1653 /* Don't add CHAIN.* or FRAME.* twice. */
1654 for (c = OMP_PARALLEL_CLAUSES (t); c; c = OMP_CLAUSE_CHAIN (c))
1655 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1656 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
1657 && OMP_CLAUSE_DECL (c) == decl)
1658 break;
1659 if (c == NULL)
1660 {
1661 c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
1662 OMP_CLAUSE_DECL (c) = decl;
1663 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1664 OMP_PARALLEL_CLAUSES (t) = c;
1665 }
1666 }
1667 info->static_chain_added |= save_static_chain_added;
1668 break;
1669
1670 case OMP_FOR:
1671 case OMP_SECTIONS:
1672 case OMP_SECTION:
1673 case OMP_SINGLE:
1674 case OMP_MASTER:
1675 case OMP_ORDERED:
1676 case OMP_CRITICAL:
1677 walk_body (convert_call_expr, info, &OMP_BODY (t));
1678 break;
1679
1680 default:
1681 break;
1682 }
1683
1684 return NULL_TREE;
1685 }
1686
1687 /* Walk the nesting tree starting with ROOT, depth first. Convert all
1688 trampolines and call expressions. On the way back up, determine if
1689 a nested function actually uses its static chain; if not, remember that. */
1690
1691 static void
1692 convert_all_function_calls (struct nesting_info *root)
1693 {
1694 do
1695 {
1696 if (root->inner)
1697 convert_all_function_calls (root->inner);
1698
1699 walk_function (convert_tramp_reference, root);
1700 walk_function (convert_call_expr, root);
1701
1702 /* If the function does not use a static chain, then remember that. */
1703 if (root->outer && !root->chain_decl && !root->chain_field)
1704 DECL_NO_STATIC_CHAIN (root->context) = 1;
1705 else
1706 gcc_assert (!DECL_NO_STATIC_CHAIN (root->context));
1707
1708 root = root->next;
1709 }
1710 while (root);
1711 }
1712
1713 /* Do "everything else" to clean up or complete state collected by the
1714 various walking passes -- lay out the types and decls, generate code
1715 to initialize the frame decl, store critical expressions in the
1716 struct function for rtl to find. */
1717
1718 static void
1719 finalize_nesting_tree_1 (struct nesting_info *root)
1720 {
1721 tree stmt_list = NULL;
1722 tree context = root->context;
1723 struct function *sf;
1724
1725 /* If we created a non-local frame type or decl, we need to lay them
1726 out at this time. */
1727 if (root->frame_type)
1728 {
1729 /* In some cases the frame type will trigger the -Wpadded warning.
1730 This is not helpful; suppress it. */
1731 int save_warn_padded = warn_padded;
1732 warn_padded = 0;
1733 layout_type (root->frame_type);
1734 warn_padded = save_warn_padded;
1735 layout_decl (root->frame_decl, 0);
1736 }
1737
1738 /* If any parameters were referenced non-locally, then we need to
1739 insert a copy. Likewise, if any variables were referenced by
1740 pointer, we need to initialize the address. */
1741 if (root->any_parm_remapped)
1742 {
1743 tree p;
1744 for (p = DECL_ARGUMENTS (context); p ; p = TREE_CHAIN (p))
1745 {
1746 tree field, x, y;
1747
1748 field = lookup_field_for_decl (root, p, NO_INSERT);
1749 if (!field)
1750 continue;
1751
1752 if (use_pointer_in_frame (p))
1753 x = build_addr (p, context);
1754 else
1755 x = p;
1756
1757 y = build3 (COMPONENT_REF, TREE_TYPE (field),
1758 root->frame_decl, field, NULL_TREE);
1759 x = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (field), y, x);
1760 append_to_statement_list (x, &stmt_list);
1761 }
1762 }
1763
1764 /* If a chain_field was created, then it needs to be initialized
1765 from chain_decl. */
1766 if (root->chain_field)
1767 {
1768 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
1769 root->frame_decl, root->chain_field, NULL_TREE);
1770 x = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (x), x, get_chain_decl (root));
1771 append_to_statement_list (x, &stmt_list);
1772 }
1773
1774 /* If trampolines were created, then we need to initialize them. */
1775 if (root->any_tramp_created)
1776 {
1777 struct nesting_info *i;
1778 for (i = root->inner; i ; i = i->next)
1779 {
1780 tree arg, x, field;
1781
1782 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
1783 if (!field)
1784 continue;
1785
1786 if (DECL_NO_STATIC_CHAIN (i->context))
1787 x = null_pointer_node;
1788 else
1789 x = build_addr (root->frame_decl, context);
1790 arg = tree_cons (NULL, x, NULL);
1791
1792 x = build_addr (i->context, context);
1793 arg = tree_cons (NULL, x, arg);
1794
1795 x = build3 (COMPONENT_REF, TREE_TYPE (field),
1796 root->frame_decl, field, NULL_TREE);
1797 x = build_addr (x, context);
1798 arg = tree_cons (NULL, x, arg);
1799
1800 x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE];
1801 x = build_function_call_expr (x, arg);
1802
1803 append_to_statement_list (x, &stmt_list);
1804 }
1805 }
1806
1807 /* If we created initialization statements, insert them. */
1808 if (stmt_list)
1809 {
1810 annotate_all_with_locus (&stmt_list,
1811 DECL_SOURCE_LOCATION (context));
1812 append_to_statement_list (BIND_EXPR_BODY (DECL_SAVED_TREE (context)),
1813 &stmt_list);
1814 BIND_EXPR_BODY (DECL_SAVED_TREE (context)) = stmt_list;
1815 }
1816
1817 /* If a chain_decl was created, then it needs to be registered with
1818 struct function so that it gets initialized from the static chain
1819 register at the beginning of the function. */
1820 sf = DECL_STRUCT_FUNCTION (root->context);
1821 sf->static_chain_decl = root->chain_decl;
1822
1823 /* Similarly for the non-local goto save area. */
1824 if (root->nl_goto_field)
1825 {
1826 sf->nonlocal_goto_save_area
1827 = get_frame_field (root, context, root->nl_goto_field, NULL);
1828 sf->has_nonlocal_label = 1;
1829 }
1830
1831 /* Make sure all new local variables get inserted into the
1832 proper BIND_EXPR. */
1833 if (root->new_local_var_chain)
1834 declare_vars (root->new_local_var_chain, DECL_SAVED_TREE (root->context),
1835 false);
1836 if (root->debug_var_chain)
1837 declare_vars (root->debug_var_chain, DECL_SAVED_TREE (root->context),
1838 true);
1839
1840 /* Dump the translated tree function. */
1841 dump_function (TDI_nested, root->context);
1842 }
1843
1844 static void
1845 finalize_nesting_tree (struct nesting_info *root)
1846 {
1847 do
1848 {
1849 if (root->inner)
1850 finalize_nesting_tree (root->inner);
1851 finalize_nesting_tree_1 (root);
1852 root = root->next;
1853 }
1854 while (root);
1855 }
1856
1857 /* Unnest the nodes and pass them to cgraph. */
1858
1859 static void
1860 unnest_nesting_tree_1 (struct nesting_info *root)
1861 {
1862 struct cgraph_node *node = cgraph_node (root->context);
1863
1864 /* For nested functions update the cgraph to reflect unnesting.
1865 We also delay finalizing of these functions up to this point. */
1866 if (node->origin)
1867 {
1868 cgraph_unnest_node (cgraph_node (root->context));
1869 cgraph_finalize_function (root->context, true);
1870 }
1871 }
1872
1873 static void
1874 unnest_nesting_tree (struct nesting_info *root)
1875 {
1876 do
1877 {
1878 if (root->inner)
1879 unnest_nesting_tree (root->inner);
1880 unnest_nesting_tree_1 (root);
1881 root = root->next;
1882 }
1883 while (root);
1884 }
1885
1886 /* Free the data structures allocated during this pass. */
1887
1888 static void
1889 free_nesting_tree (struct nesting_info *root)
1890 {
1891 struct nesting_info *next;
1892 do
1893 {
1894 if (root->inner)
1895 free_nesting_tree (root->inner);
1896 pointer_map_destroy (root->var_map);
1897 pointer_map_destroy (root->field_map);
1898 next = root->next;
1899 free (root);
1900 root = next;
1901 }
1902 while (root);
1903 }
1904
1905 /* Main entry point for this pass. Process FNDECL and all of its nested
1906 subroutines and turn them into something less tightly bound. */
1907
1908 void
1909 lower_nested_functions (tree fndecl)
1910 {
1911 struct cgraph_node *cgn;
1912 struct nesting_info *root;
1913
1914 /* If there are no nested functions, there's nothing to do. */
1915 cgn = cgraph_node (fndecl);
1916 if (!cgn->nested)
1917 return;
1918
1919 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
1920 root = create_nesting_tree (cgn);
1921 walk_all_functions (convert_nonlocal_reference, root);
1922 walk_all_functions (convert_local_reference, root);
1923 walk_all_functions (convert_nl_goto_reference, root);
1924 walk_all_functions (convert_nl_goto_receiver, root);
1925 convert_all_function_calls (root);
1926 finalize_nesting_tree (root);
1927 unnest_nesting_tree (root);
1928 free_nesting_tree (root);
1929 bitmap_obstack_release (&nesting_info_bitmap_obstack);
1930 }
1931
1932 #include "gt-tree-nested.h"