gimple-walk.h: New File.
[gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "tm_p.h"
26 #include "function.h"
27 #include "tree-dump.h"
28 #include "tree-inline.h"
29 #include "gimplify.h"
30 #include "gimple-iterator.h"
31 #include "gimple-walk.h"
32 #include "tree-iterator.h"
33 #include "bitmap.h"
34 #include "cgraph.h"
35 #include "tree-cfg.h"
36 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
37 #include "langhooks.h"
38 #include "pointer-set.h"
39 #include "gimple-low.h"
40
41
42 /* The object of this pass is to lower the representation of a set of nested
43 functions in order to expose all of the gory details of the various
44 nonlocal references. We want to do this sooner rather than later, in
45 order to give us more freedom in emitting all of the functions in question.
46
47 Back in olden times, when gcc was young, we developed an insanely
48 complicated scheme whereby variables which were referenced nonlocally
49 were forced to live in the stack of the declaring function, and then
50 the nested functions magically discovered where these variables were
51 placed. In order for this scheme to function properly, it required
52 that the outer function be partially expanded, then we switch to
53 compiling the inner function, and once done with those we switch back
54 to compiling the outer function. Such delicate ordering requirements
55 makes it difficult to do whole translation unit optimizations
56 involving such functions.
57
58 The implementation here is much more direct. Everything that can be
59 referenced by an inner function is a member of an explicitly created
60 structure herein called the "nonlocal frame struct". The incoming
61 static chain for a nested function is a pointer to this struct in
62 the parent. In this way, we settle on known offsets from a known
63 base, and so are decoupled from the logic that places objects in the
64 function's stack frame. More importantly, we don't have to wait for
65 that to happen -- since the compilation of the inner function is no
66 longer tied to a real stack frame, the nonlocal frame struct can be
67 allocated anywhere. Which means that the outer function is now
68 inlinable.
69
70 Theory of operation here is very simple. Iterate over all the
71 statements in all the functions (depth first) several times,
72 allocating structures and fields on demand. In general we want to
73 examine inner functions first, so that we can avoid making changes
74 to outer functions which are unnecessary.
75
76 The order of the passes matters a bit, in that later passes will be
77 skipped if it is discovered that the functions don't actually interact
78 at all. That is, they're nested in the lexical sense but could have
79 been written as independent functions without change. */
80
81
82 struct nesting_info
83 {
84 struct nesting_info *outer;
85 struct nesting_info *inner;
86 struct nesting_info *next;
87
88 struct pointer_map_t *field_map;
89 struct pointer_map_t *var_map;
90 struct pointer_set_t *mem_refs;
91 bitmap suppress_expansion;
92
93 tree context;
94 tree new_local_var_chain;
95 tree debug_var_chain;
96 tree frame_type;
97 tree frame_decl;
98 tree chain_field;
99 tree chain_decl;
100 tree nl_goto_field;
101
102 bool any_parm_remapped;
103 bool any_tramp_created;
104 char static_chain_added;
105 };
106
107
108 /* Iterate over the nesting tree, starting with ROOT, depth first. */
109
110 static inline struct nesting_info *
111 iter_nestinfo_start (struct nesting_info *root)
112 {
113 while (root->inner)
114 root = root->inner;
115 return root;
116 }
117
118 static inline struct nesting_info *
119 iter_nestinfo_next (struct nesting_info *node)
120 {
121 if (node->next)
122 return iter_nestinfo_start (node->next);
123 return node->outer;
124 }
125
126 #define FOR_EACH_NEST_INFO(I, ROOT) \
127 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
128
129 /* Obstack used for the bitmaps in the struct above. */
130 static struct bitmap_obstack nesting_info_bitmap_obstack;
131
132
133 /* We're working in so many different function contexts simultaneously,
134 that create_tmp_var is dangerous. Prevent mishap. */
135 #define create_tmp_var cant_use_create_tmp_var_here_dummy
136
137 /* Like create_tmp_var, except record the variable for registration at
138 the given nesting level. */
139
140 static tree
141 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
142 {
143 tree tmp_var;
144
145 /* If the type is of variable size or a type which must be created by the
146 frontend, something is wrong. Note that we explicitly allow
147 incomplete types here, since we create them ourselves here. */
148 gcc_assert (!TREE_ADDRESSABLE (type));
149 gcc_assert (!TYPE_SIZE_UNIT (type)
150 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
151
152 tmp_var = create_tmp_var_raw (type, prefix);
153 DECL_CONTEXT (tmp_var) = info->context;
154 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
155 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
156 if (TREE_CODE (type) == COMPLEX_TYPE
157 || TREE_CODE (type) == VECTOR_TYPE)
158 DECL_GIMPLE_REG_P (tmp_var) = 1;
159
160 info->new_local_var_chain = tmp_var;
161
162 return tmp_var;
163 }
164
165 /* Take the address of EXP to be used within function CONTEXT.
166 Mark it for addressability as necessary. */
167
168 tree
169 build_addr (tree exp, tree context)
170 {
171 tree base = exp;
172 tree save_context;
173 tree retval;
174
175 while (handled_component_p (base))
176 base = TREE_OPERAND (base, 0);
177
178 if (DECL_P (base))
179 TREE_ADDRESSABLE (base) = 1;
180
181 /* Building the ADDR_EXPR will compute a set of properties for
182 that ADDR_EXPR. Those properties are unfortunately context
183 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
184
185 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
186 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
187 way the properties are for the ADDR_EXPR are computed properly. */
188 save_context = current_function_decl;
189 current_function_decl = context;
190 retval = build_fold_addr_expr (exp);
191 current_function_decl = save_context;
192 return retval;
193 }
194
195 /* Insert FIELD into TYPE, sorted by alignment requirements. */
196
197 void
198 insert_field_into_struct (tree type, tree field)
199 {
200 tree *p;
201
202 DECL_CONTEXT (field) = type;
203
204 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
205 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
206 break;
207
208 DECL_CHAIN (field) = *p;
209 *p = field;
210
211 /* Set correct alignment for frame struct type. */
212 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
213 TYPE_ALIGN (type) = DECL_ALIGN (field);
214 }
215
216 /* Build or return the RECORD_TYPE that describes the frame state that is
217 shared between INFO->CONTEXT and its nested functions. This record will
218 not be complete until finalize_nesting_tree; up until that point we'll
219 be adding fields as necessary.
220
221 We also build the DECL that represents this frame in the function. */
222
223 static tree
224 get_frame_type (struct nesting_info *info)
225 {
226 tree type = info->frame_type;
227 if (!type)
228 {
229 char *name;
230
231 type = make_node (RECORD_TYPE);
232
233 name = concat ("FRAME.",
234 IDENTIFIER_POINTER (DECL_NAME (info->context)),
235 NULL);
236 TYPE_NAME (type) = get_identifier (name);
237 free (name);
238
239 info->frame_type = type;
240 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
241 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
242
243 /* ??? Always make it addressable for now, since it is meant to
244 be pointed to by the static chain pointer. This pessimizes
245 when it turns out that no static chains are needed because
246 the nested functions referencing non-local variables are not
247 reachable, but the true pessimization is to create the non-
248 local frame structure in the first place. */
249 TREE_ADDRESSABLE (info->frame_decl) = 1;
250 }
251 return type;
252 }
253
254 /* Return true if DECL should be referenced by pointer in the non-local
255 frame structure. */
256
257 static bool
258 use_pointer_in_frame (tree decl)
259 {
260 if (TREE_CODE (decl) == PARM_DECL)
261 {
262 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
263 sized decls, and inefficient to copy large aggregates. Don't bother
264 moving anything but scalar variables. */
265 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
266 }
267 else
268 {
269 /* Variable sized types make things "interesting" in the frame. */
270 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
271 }
272 }
273
274 /* Given DECL, a non-locally accessed variable, find or create a field
275 in the non-local frame structure for the given nesting context. */
276
277 static tree
278 lookup_field_for_decl (struct nesting_info *info, tree decl,
279 enum insert_option insert)
280 {
281 void **slot;
282
283 if (insert == NO_INSERT)
284 {
285 slot = pointer_map_contains (info->field_map, decl);
286 return slot ? (tree) *slot : NULL_TREE;
287 }
288
289 slot = pointer_map_insert (info->field_map, decl);
290 if (!*slot)
291 {
292 tree field = make_node (FIELD_DECL);
293 DECL_NAME (field) = DECL_NAME (decl);
294
295 if (use_pointer_in_frame (decl))
296 {
297 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
298 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
299 DECL_NONADDRESSABLE_P (field) = 1;
300 }
301 else
302 {
303 TREE_TYPE (field) = TREE_TYPE (decl);
304 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
305 DECL_ALIGN (field) = DECL_ALIGN (decl);
306 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
307 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
308 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
309 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
310 }
311
312 insert_field_into_struct (get_frame_type (info), field);
313 *slot = field;
314
315 if (TREE_CODE (decl) == PARM_DECL)
316 info->any_parm_remapped = true;
317 }
318
319 return (tree) *slot;
320 }
321
322 /* Build or return the variable that holds the static chain within
323 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
324
325 static tree
326 get_chain_decl (struct nesting_info *info)
327 {
328 tree decl = info->chain_decl;
329
330 if (!decl)
331 {
332 tree type;
333
334 type = get_frame_type (info->outer);
335 type = build_pointer_type (type);
336
337 /* Note that this variable is *not* entered into any BIND_EXPR;
338 the construction of this variable is handled specially in
339 expand_function_start and initialize_inlined_parameters.
340 Note also that it's represented as a parameter. This is more
341 close to the truth, since the initial value does come from
342 the caller. */
343 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
344 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
345 DECL_ARTIFICIAL (decl) = 1;
346 DECL_IGNORED_P (decl) = 1;
347 TREE_USED (decl) = 1;
348 DECL_CONTEXT (decl) = info->context;
349 DECL_ARG_TYPE (decl) = type;
350
351 /* Tell tree-inline.c that we never write to this variable, so
352 it can copy-prop the replacement value immediately. */
353 TREE_READONLY (decl) = 1;
354
355 info->chain_decl = decl;
356
357 if (dump_file
358 && (dump_flags & TDF_DETAILS)
359 && !DECL_STATIC_CHAIN (info->context))
360 fprintf (dump_file, "Setting static-chain for %s\n",
361 lang_hooks.decl_printable_name (info->context, 2));
362
363 DECL_STATIC_CHAIN (info->context) = 1;
364 }
365 return decl;
366 }
367
368 /* Build or return the field within the non-local frame state that holds
369 the static chain for INFO->CONTEXT. This is the way to walk back up
370 multiple nesting levels. */
371
372 static tree
373 get_chain_field (struct nesting_info *info)
374 {
375 tree field = info->chain_field;
376
377 if (!field)
378 {
379 tree type = build_pointer_type (get_frame_type (info->outer));
380
381 field = make_node (FIELD_DECL);
382 DECL_NAME (field) = get_identifier ("__chain");
383 TREE_TYPE (field) = type;
384 DECL_ALIGN (field) = TYPE_ALIGN (type);
385 DECL_NONADDRESSABLE_P (field) = 1;
386
387 insert_field_into_struct (get_frame_type (info), field);
388
389 info->chain_field = field;
390
391 if (dump_file
392 && (dump_flags & TDF_DETAILS)
393 && !DECL_STATIC_CHAIN (info->context))
394 fprintf (dump_file, "Setting static-chain for %s\n",
395 lang_hooks.decl_printable_name (info->context, 2));
396
397 DECL_STATIC_CHAIN (info->context) = 1;
398 }
399 return field;
400 }
401
402 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
403
404 static tree
405 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
406 gimple call)
407 {
408 tree t;
409
410 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
411 gimple_call_set_lhs (call, t);
412 if (! gsi_end_p (*gsi))
413 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
414 gsi_insert_before (gsi, call, GSI_SAME_STMT);
415
416 return t;
417 }
418
419
420 /* Copy EXP into a temporary. Allocate the temporary in the context of
421 INFO and insert the initialization statement before GSI. */
422
423 static tree
424 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
425 {
426 tree t;
427 gimple stmt;
428
429 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
430 stmt = gimple_build_assign (t, exp);
431 if (! gsi_end_p (*gsi))
432 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
433 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
434
435 return t;
436 }
437
438
439 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
440
441 static tree
442 gsi_gimplify_val (struct nesting_info *info, tree exp,
443 gimple_stmt_iterator *gsi)
444 {
445 if (is_gimple_val (exp))
446 return exp;
447 else
448 return init_tmp_var (info, exp, gsi);
449 }
450
451 /* Similarly, but copy from the temporary and insert the statement
452 after the iterator. */
453
454 static tree
455 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
456 {
457 tree t;
458 gimple stmt;
459
460 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
461 stmt = gimple_build_assign (exp, t);
462 if (! gsi_end_p (*gsi))
463 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
464 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
465
466 return t;
467 }
468
469 /* Build or return the type used to represent a nested function trampoline. */
470
471 static GTY(()) tree trampoline_type;
472
473 static tree
474 get_trampoline_type (struct nesting_info *info)
475 {
476 unsigned align, size;
477 tree t;
478
479 if (trampoline_type)
480 return trampoline_type;
481
482 align = TRAMPOLINE_ALIGNMENT;
483 size = TRAMPOLINE_SIZE;
484
485 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
486 then allocate extra space so that we can do dynamic alignment. */
487 if (align > STACK_BOUNDARY)
488 {
489 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
490 align = STACK_BOUNDARY;
491 }
492
493 t = build_index_type (size_int (size - 1));
494 t = build_array_type (char_type_node, t);
495 t = build_decl (DECL_SOURCE_LOCATION (info->context),
496 FIELD_DECL, get_identifier ("__data"), t);
497 DECL_ALIGN (t) = align;
498 DECL_USER_ALIGN (t) = 1;
499
500 trampoline_type = make_node (RECORD_TYPE);
501 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
502 TYPE_FIELDS (trampoline_type) = t;
503 layout_type (trampoline_type);
504 DECL_CONTEXT (t) = trampoline_type;
505
506 return trampoline_type;
507 }
508
509 /* Given DECL, a nested function, find or create a field in the non-local
510 frame structure for a trampoline for this function. */
511
512 static tree
513 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
514 enum insert_option insert)
515 {
516 void **slot;
517
518 if (insert == NO_INSERT)
519 {
520 slot = pointer_map_contains (info->var_map, decl);
521 return slot ? (tree) *slot : NULL_TREE;
522 }
523
524 slot = pointer_map_insert (info->var_map, decl);
525 if (!*slot)
526 {
527 tree field = make_node (FIELD_DECL);
528 DECL_NAME (field) = DECL_NAME (decl);
529 TREE_TYPE (field) = get_trampoline_type (info);
530 TREE_ADDRESSABLE (field) = 1;
531
532 insert_field_into_struct (get_frame_type (info), field);
533 *slot = field;
534
535 info->any_tramp_created = true;
536 }
537
538 return (tree) *slot;
539 }
540
541 /* Build or return the field within the non-local frame state that holds
542 the non-local goto "jmp_buf". The buffer itself is maintained by the
543 rtl middle-end as dynamic stack space is allocated. */
544
545 static tree
546 get_nl_goto_field (struct nesting_info *info)
547 {
548 tree field = info->nl_goto_field;
549 if (!field)
550 {
551 unsigned size;
552 tree type;
553
554 /* For __builtin_nonlocal_goto, we need N words. The first is the
555 frame pointer, the rest is for the target's stack pointer save
556 area. The number of words is controlled by STACK_SAVEAREA_MODE;
557 not the best interface, but it'll do for now. */
558 if (Pmode == ptr_mode)
559 type = ptr_type_node;
560 else
561 type = lang_hooks.types.type_for_mode (Pmode, 1);
562
563 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
564 size = size / GET_MODE_SIZE (Pmode);
565 size = size + 1;
566
567 type = build_array_type
568 (type, build_index_type (size_int (size)));
569
570 field = make_node (FIELD_DECL);
571 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
572 TREE_TYPE (field) = type;
573 DECL_ALIGN (field) = TYPE_ALIGN (type);
574 TREE_ADDRESSABLE (field) = 1;
575
576 insert_field_into_struct (get_frame_type (info), field);
577
578 info->nl_goto_field = field;
579 }
580
581 return field;
582 }
583
584 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
585
586 static void
587 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
588 struct nesting_info *info, gimple_seq *pseq)
589 {
590 struct walk_stmt_info wi;
591
592 memset (&wi, 0, sizeof (wi));
593 wi.info = info;
594 wi.val_only = true;
595 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
596 }
597
598
599 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
600
601 static inline void
602 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
603 struct nesting_info *info)
604 {
605 gimple_seq body = gimple_body (info->context);
606 walk_body (callback_stmt, callback_op, info, &body);
607 gimple_set_body (info->context, body);
608 }
609
610 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
611
612 static void
613 walk_gimple_omp_for (gimple for_stmt,
614 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
615 struct nesting_info *info)
616 {
617 struct walk_stmt_info wi;
618 gimple_seq seq;
619 tree t;
620 size_t i;
621
622 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
623
624 seq = NULL;
625 memset (&wi, 0, sizeof (wi));
626 wi.info = info;
627 wi.gsi = gsi_last (seq);
628
629 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
630 {
631 wi.val_only = false;
632 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
633 &wi, NULL);
634 wi.val_only = true;
635 wi.is_lhs = false;
636 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
637 &wi, NULL);
638
639 wi.val_only = true;
640 wi.is_lhs = false;
641 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
642 &wi, NULL);
643
644 t = gimple_omp_for_incr (for_stmt, i);
645 gcc_assert (BINARY_CLASS_P (t));
646 wi.val_only = false;
647 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
648 wi.val_only = true;
649 wi.is_lhs = false;
650 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
651 }
652
653 seq = gsi_seq (wi.gsi);
654 if (!gimple_seq_empty_p (seq))
655 {
656 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
657 annotate_all_with_location (seq, gimple_location (for_stmt));
658 gimple_seq_add_seq (&pre_body, seq);
659 gimple_omp_for_set_pre_body (for_stmt, pre_body);
660 }
661 }
662
663 /* Similarly for ROOT and all functions nested underneath, depth first. */
664
665 static void
666 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
667 struct nesting_info *root)
668 {
669 struct nesting_info *n;
670 FOR_EACH_NEST_INFO (n, root)
671 walk_function (callback_stmt, callback_op, n);
672 }
673
674
675 /* We have to check for a fairly pathological case. The operands of function
676 nested function are to be interpreted in the context of the enclosing
677 function. So if any are variably-sized, they will get remapped when the
678 enclosing function is inlined. But that remapping would also have to be
679 done in the types of the PARM_DECLs of the nested function, meaning the
680 argument types of that function will disagree with the arguments in the
681 calls to that function. So we'd either have to make a copy of the nested
682 function corresponding to each time the enclosing function was inlined or
683 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
684 function. The former is not practical. The latter would still require
685 detecting this case to know when to add the conversions. So, for now at
686 least, we don't inline such an enclosing function.
687
688 We have to do that check recursively, so here return indicating whether
689 FNDECL has such a nested function. ORIG_FN is the function we were
690 trying to inline to use for checking whether any argument is variably
691 modified by anything in it.
692
693 It would be better to do this in tree-inline.c so that we could give
694 the appropriate warning for why a function can't be inlined, but that's
695 too late since the nesting structure has already been flattened and
696 adding a flag just to record this fact seems a waste of a flag. */
697
698 static bool
699 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
700 {
701 struct cgraph_node *cgn = cgraph_get_node (fndecl);
702 tree arg;
703
704 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
705 {
706 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
707 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
708 return true;
709
710 if (check_for_nested_with_variably_modified (cgn->decl,
711 orig_fndecl))
712 return true;
713 }
714
715 return false;
716 }
717
718 /* Construct our local datastructure describing the function nesting
719 tree rooted by CGN. */
720
721 static struct nesting_info *
722 create_nesting_tree (struct cgraph_node *cgn)
723 {
724 struct nesting_info *info = XCNEW (struct nesting_info);
725 info->field_map = pointer_map_create ();
726 info->var_map = pointer_map_create ();
727 info->mem_refs = pointer_set_create ();
728 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
729 info->context = cgn->decl;
730
731 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
732 {
733 struct nesting_info *sub = create_nesting_tree (cgn);
734 sub->outer = info;
735 sub->next = info->inner;
736 info->inner = sub;
737 }
738
739 /* See discussion at check_for_nested_with_variably_modified for a
740 discussion of why this has to be here. */
741 if (check_for_nested_with_variably_modified (info->context, info->context))
742 DECL_UNINLINABLE (info->context) = true;
743
744 return info;
745 }
746
747 /* Return an expression computing the static chain for TARGET_CONTEXT
748 from INFO->CONTEXT. Insert any necessary computations before TSI. */
749
750 static tree
751 get_static_chain (struct nesting_info *info, tree target_context,
752 gimple_stmt_iterator *gsi)
753 {
754 struct nesting_info *i;
755 tree x;
756
757 if (info->context == target_context)
758 {
759 x = build_addr (info->frame_decl, target_context);
760 }
761 else
762 {
763 x = get_chain_decl (info);
764
765 for (i = info->outer; i->context != target_context; i = i->outer)
766 {
767 tree field = get_chain_field (i);
768
769 x = build_simple_mem_ref (x);
770 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
771 x = init_tmp_var (info, x, gsi);
772 }
773 }
774
775 return x;
776 }
777
778
779 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
780 frame as seen from INFO->CONTEXT. Insert any necessary computations
781 before GSI. */
782
783 static tree
784 get_frame_field (struct nesting_info *info, tree target_context,
785 tree field, gimple_stmt_iterator *gsi)
786 {
787 struct nesting_info *i;
788 tree x;
789
790 if (info->context == target_context)
791 {
792 /* Make sure frame_decl gets created. */
793 (void) get_frame_type (info);
794 x = info->frame_decl;
795 }
796 else
797 {
798 x = get_chain_decl (info);
799
800 for (i = info->outer; i->context != target_context; i = i->outer)
801 {
802 tree field = get_chain_field (i);
803
804 x = build_simple_mem_ref (x);
805 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
806 x = init_tmp_var (info, x, gsi);
807 }
808
809 x = build_simple_mem_ref (x);
810 }
811
812 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
813 return x;
814 }
815
816 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
817
818 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
819 in the nested function with DECL_VALUE_EXPR set to reference the true
820 variable in the parent function. This is used both for debug info
821 and in OpenMP lowering. */
822
823 static tree
824 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
825 {
826 tree target_context;
827 struct nesting_info *i;
828 tree x, field, new_decl;
829 void **slot;
830
831 slot = pointer_map_insert (info->var_map, decl);
832
833 if (*slot)
834 return (tree) *slot;
835
836 target_context = decl_function_context (decl);
837
838 /* A copy of the code in get_frame_field, but without the temporaries. */
839 if (info->context == target_context)
840 {
841 /* Make sure frame_decl gets created. */
842 (void) get_frame_type (info);
843 x = info->frame_decl;
844 i = info;
845 }
846 else
847 {
848 x = get_chain_decl (info);
849 for (i = info->outer; i->context != target_context; i = i->outer)
850 {
851 field = get_chain_field (i);
852 x = build_simple_mem_ref (x);
853 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
854 }
855 x = build_simple_mem_ref (x);
856 }
857
858 field = lookup_field_for_decl (i, decl, INSERT);
859 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
860 if (use_pointer_in_frame (decl))
861 x = build_simple_mem_ref (x);
862
863 /* ??? We should be remapping types as well, surely. */
864 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
865 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
866 DECL_CONTEXT (new_decl) = info->context;
867 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
868 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
869 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
870 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
871 TREE_READONLY (new_decl) = TREE_READONLY (decl);
872 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
873 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
874 if ((TREE_CODE (decl) == PARM_DECL
875 || TREE_CODE (decl) == RESULT_DECL
876 || TREE_CODE (decl) == VAR_DECL)
877 && DECL_BY_REFERENCE (decl))
878 DECL_BY_REFERENCE (new_decl) = 1;
879
880 SET_DECL_VALUE_EXPR (new_decl, x);
881 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
882
883 *slot = new_decl;
884 DECL_CHAIN (new_decl) = info->debug_var_chain;
885 info->debug_var_chain = new_decl;
886
887 if (!optimize
888 && info->context != target_context
889 && variably_modified_type_p (TREE_TYPE (decl), NULL))
890 note_nonlocal_vla_type (info, TREE_TYPE (decl));
891
892 return new_decl;
893 }
894
895
896 /* Callback for walk_gimple_stmt, rewrite all references to VAR
897 and PARM_DECLs that belong to outer functions.
898
899 The rewrite will involve some number of structure accesses back up
900 the static chain. E.g. for a variable FOO up one nesting level it'll
901 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
902 indirections apply to decls for which use_pointer_in_frame is true. */
903
904 static tree
905 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
906 {
907 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
908 struct nesting_info *const info = (struct nesting_info *) wi->info;
909 tree t = *tp;
910
911 *walk_subtrees = 0;
912 switch (TREE_CODE (t))
913 {
914 case VAR_DECL:
915 /* Non-automatic variables are never processed. */
916 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
917 break;
918 /* FALLTHRU */
919
920 case PARM_DECL:
921 if (decl_function_context (t) != info->context)
922 {
923 tree x;
924 wi->changed = true;
925
926 x = get_nonlocal_debug_decl (info, t);
927 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
928 {
929 tree target_context = decl_function_context (t);
930 struct nesting_info *i;
931 for (i = info->outer; i->context != target_context; i = i->outer)
932 continue;
933 x = lookup_field_for_decl (i, t, INSERT);
934 x = get_frame_field (info, target_context, x, &wi->gsi);
935 if (use_pointer_in_frame (t))
936 {
937 x = init_tmp_var (info, x, &wi->gsi);
938 x = build_simple_mem_ref (x);
939 }
940 }
941
942 if (wi->val_only)
943 {
944 if (wi->is_lhs)
945 x = save_tmp_var (info, x, &wi->gsi);
946 else
947 x = init_tmp_var (info, x, &wi->gsi);
948 }
949
950 *tp = x;
951 }
952 break;
953
954 case LABEL_DECL:
955 /* We're taking the address of a label from a parent function, but
956 this is not itself a non-local goto. Mark the label such that it
957 will not be deleted, much as we would with a label address in
958 static storage. */
959 if (decl_function_context (t) != info->context)
960 FORCED_LABEL (t) = 1;
961 break;
962
963 case ADDR_EXPR:
964 {
965 bool save_val_only = wi->val_only;
966
967 wi->val_only = false;
968 wi->is_lhs = false;
969 wi->changed = false;
970 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
971 wi->val_only = true;
972
973 if (wi->changed)
974 {
975 tree save_context;
976
977 /* If we changed anything, we might no longer be directly
978 referencing a decl. */
979 save_context = current_function_decl;
980 current_function_decl = info->context;
981 recompute_tree_invariant_for_addr_expr (t);
982 current_function_decl = save_context;
983
984 /* If the callback converted the address argument in a context
985 where we only accept variables (and min_invariant, presumably),
986 then compute the address into a temporary. */
987 if (save_val_only)
988 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
989 t, &wi->gsi);
990 }
991 }
992 break;
993
994 case REALPART_EXPR:
995 case IMAGPART_EXPR:
996 case COMPONENT_REF:
997 case ARRAY_REF:
998 case ARRAY_RANGE_REF:
999 case BIT_FIELD_REF:
1000 /* Go down this entire nest and just look at the final prefix and
1001 anything that describes the references. Otherwise, we lose track
1002 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1003 wi->val_only = true;
1004 wi->is_lhs = false;
1005 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1006 {
1007 if (TREE_CODE (t) == COMPONENT_REF)
1008 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1009 NULL);
1010 else if (TREE_CODE (t) == ARRAY_REF
1011 || TREE_CODE (t) == ARRAY_RANGE_REF)
1012 {
1013 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1014 wi, NULL);
1015 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1016 wi, NULL);
1017 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1018 wi, NULL);
1019 }
1020 }
1021 wi->val_only = false;
1022 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1023 break;
1024
1025 case VIEW_CONVERT_EXPR:
1026 /* Just request to look at the subtrees, leaving val_only and lhs
1027 untouched. This might actually be for !val_only + lhs, in which
1028 case we don't want to force a replacement by a temporary. */
1029 *walk_subtrees = 1;
1030 break;
1031
1032 default:
1033 if (!IS_TYPE_OR_DECL_P (t))
1034 {
1035 *walk_subtrees = 1;
1036 wi->val_only = true;
1037 wi->is_lhs = false;
1038 }
1039 break;
1040 }
1041
1042 return NULL_TREE;
1043 }
1044
1045 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1046 struct walk_stmt_info *);
1047
1048 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1049 and PARM_DECLs that belong to outer functions. */
1050
1051 static bool
1052 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1053 {
1054 struct nesting_info *const info = (struct nesting_info *) wi->info;
1055 bool need_chain = false, need_stmts = false;
1056 tree clause, decl;
1057 int dummy;
1058 bitmap new_suppress;
1059
1060 new_suppress = BITMAP_GGC_ALLOC ();
1061 bitmap_copy (new_suppress, info->suppress_expansion);
1062
1063 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1064 {
1065 switch (OMP_CLAUSE_CODE (clause))
1066 {
1067 case OMP_CLAUSE_REDUCTION:
1068 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1069 need_stmts = true;
1070 goto do_decl_clause;
1071
1072 case OMP_CLAUSE_LASTPRIVATE:
1073 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1074 need_stmts = true;
1075 goto do_decl_clause;
1076
1077 case OMP_CLAUSE_PRIVATE:
1078 case OMP_CLAUSE_FIRSTPRIVATE:
1079 case OMP_CLAUSE_COPYPRIVATE:
1080 case OMP_CLAUSE_SHARED:
1081 do_decl_clause:
1082 decl = OMP_CLAUSE_DECL (clause);
1083 if (TREE_CODE (decl) == VAR_DECL
1084 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1085 break;
1086 if (decl_function_context (decl) != info->context)
1087 {
1088 bitmap_set_bit (new_suppress, DECL_UID (decl));
1089 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1090 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1091 need_chain = true;
1092 }
1093 break;
1094
1095 case OMP_CLAUSE_SCHEDULE:
1096 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1097 break;
1098 /* FALLTHRU */
1099 case OMP_CLAUSE_FINAL:
1100 case OMP_CLAUSE_IF:
1101 case OMP_CLAUSE_NUM_THREADS:
1102 wi->val_only = true;
1103 wi->is_lhs = false;
1104 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1105 &dummy, wi);
1106 break;
1107
1108 case OMP_CLAUSE_NOWAIT:
1109 case OMP_CLAUSE_ORDERED:
1110 case OMP_CLAUSE_DEFAULT:
1111 case OMP_CLAUSE_COPYIN:
1112 case OMP_CLAUSE_COLLAPSE:
1113 case OMP_CLAUSE_UNTIED:
1114 case OMP_CLAUSE_MERGEABLE:
1115 break;
1116
1117 default:
1118 gcc_unreachable ();
1119 }
1120 }
1121
1122 info->suppress_expansion = new_suppress;
1123
1124 if (need_stmts)
1125 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1126 switch (OMP_CLAUSE_CODE (clause))
1127 {
1128 case OMP_CLAUSE_REDUCTION:
1129 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1130 {
1131 tree old_context
1132 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1133 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1134 = info->context;
1135 walk_body (convert_nonlocal_reference_stmt,
1136 convert_nonlocal_reference_op, info,
1137 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1138 walk_body (convert_nonlocal_reference_stmt,
1139 convert_nonlocal_reference_op, info,
1140 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1141 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1142 = old_context;
1143 }
1144 break;
1145
1146 case OMP_CLAUSE_LASTPRIVATE:
1147 walk_body (convert_nonlocal_reference_stmt,
1148 convert_nonlocal_reference_op, info,
1149 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1150 break;
1151
1152 default:
1153 break;
1154 }
1155
1156 return need_chain;
1157 }
1158
1159 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1160
1161 static void
1162 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1163 {
1164 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1165 type = TREE_TYPE (type);
1166
1167 if (TYPE_NAME (type)
1168 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1169 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1170 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1171
1172 while (POINTER_TYPE_P (type)
1173 || TREE_CODE (type) == VECTOR_TYPE
1174 || TREE_CODE (type) == FUNCTION_TYPE
1175 || TREE_CODE (type) == METHOD_TYPE)
1176 type = TREE_TYPE (type);
1177
1178 if (TREE_CODE (type) == ARRAY_TYPE)
1179 {
1180 tree domain, t;
1181
1182 note_nonlocal_vla_type (info, TREE_TYPE (type));
1183 domain = TYPE_DOMAIN (type);
1184 if (domain)
1185 {
1186 t = TYPE_MIN_VALUE (domain);
1187 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1188 && decl_function_context (t) != info->context)
1189 get_nonlocal_debug_decl (info, t);
1190 t = TYPE_MAX_VALUE (domain);
1191 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1192 && decl_function_context (t) != info->context)
1193 get_nonlocal_debug_decl (info, t);
1194 }
1195 }
1196 }
1197
1198 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1199 in BLOCK. */
1200
1201 static void
1202 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1203 {
1204 tree var;
1205
1206 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1207 if (TREE_CODE (var) == VAR_DECL
1208 && variably_modified_type_p (TREE_TYPE (var), NULL)
1209 && DECL_HAS_VALUE_EXPR_P (var)
1210 && decl_function_context (var) != info->context)
1211 note_nonlocal_vla_type (info, TREE_TYPE (var));
1212 }
1213
1214 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1215 PARM_DECLs that belong to outer functions. This handles statements
1216 that are not handled via the standard recursion done in
1217 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1218 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1219 operands of STMT have been handled by this function. */
1220
1221 static tree
1222 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1223 struct walk_stmt_info *wi)
1224 {
1225 struct nesting_info *info = (struct nesting_info *) wi->info;
1226 tree save_local_var_chain;
1227 bitmap save_suppress;
1228 gimple stmt = gsi_stmt (*gsi);
1229
1230 switch (gimple_code (stmt))
1231 {
1232 case GIMPLE_GOTO:
1233 /* Don't walk non-local gotos for now. */
1234 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1235 {
1236 wi->val_only = true;
1237 wi->is_lhs = false;
1238 *handled_ops_p = true;
1239 return NULL_TREE;
1240 }
1241 break;
1242
1243 case GIMPLE_OMP_PARALLEL:
1244 case GIMPLE_OMP_TASK:
1245 save_suppress = info->suppress_expansion;
1246 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1247 wi))
1248 {
1249 tree c, decl;
1250 decl = get_chain_decl (info);
1251 c = build_omp_clause (gimple_location (stmt),
1252 OMP_CLAUSE_FIRSTPRIVATE);
1253 OMP_CLAUSE_DECL (c) = decl;
1254 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1255 gimple_omp_taskreg_set_clauses (stmt, c);
1256 }
1257
1258 save_local_var_chain = info->new_local_var_chain;
1259 info->new_local_var_chain = NULL;
1260
1261 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1262 info, gimple_omp_body_ptr (stmt));
1263
1264 if (info->new_local_var_chain)
1265 declare_vars (info->new_local_var_chain,
1266 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1267 false);
1268 info->new_local_var_chain = save_local_var_chain;
1269 info->suppress_expansion = save_suppress;
1270 break;
1271
1272 case GIMPLE_OMP_FOR:
1273 save_suppress = info->suppress_expansion;
1274 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1275 walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
1276 convert_nonlocal_reference_op, info);
1277 walk_body (convert_nonlocal_reference_stmt,
1278 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1279 info->suppress_expansion = save_suppress;
1280 break;
1281
1282 case GIMPLE_OMP_SECTIONS:
1283 save_suppress = info->suppress_expansion;
1284 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1285 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1286 info, gimple_omp_body_ptr (stmt));
1287 info->suppress_expansion = save_suppress;
1288 break;
1289
1290 case GIMPLE_OMP_SINGLE:
1291 save_suppress = info->suppress_expansion;
1292 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1293 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1294 info, gimple_omp_body_ptr (stmt));
1295 info->suppress_expansion = save_suppress;
1296 break;
1297
1298 case GIMPLE_OMP_TARGET:
1299 save_suppress = info->suppress_expansion;
1300 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1301 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1302 info, gimple_omp_body_ptr (stmt));
1303 info->suppress_expansion = save_suppress;
1304 break;
1305
1306 case GIMPLE_OMP_TEAMS:
1307 save_suppress = info->suppress_expansion;
1308 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1309 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1310 info, gimple_omp_body_ptr (stmt));
1311 info->suppress_expansion = save_suppress;
1312 break;
1313
1314 case GIMPLE_OMP_SECTION:
1315 case GIMPLE_OMP_MASTER:
1316 case GIMPLE_OMP_TASKGROUP:
1317 case GIMPLE_OMP_ORDERED:
1318 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1319 info, gimple_omp_body_ptr (stmt));
1320 break;
1321
1322 case GIMPLE_BIND:
1323 if (!optimize && gimple_bind_block (stmt))
1324 note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
1325
1326 *handled_ops_p = false;
1327 return NULL_TREE;
1328
1329 case GIMPLE_COND:
1330 wi->val_only = true;
1331 wi->is_lhs = false;
1332 *handled_ops_p = false;
1333 return NULL_TREE;
1334
1335 default:
1336 /* For every other statement that we are not interested in
1337 handling here, let the walker traverse the operands. */
1338 *handled_ops_p = false;
1339 return NULL_TREE;
1340 }
1341
1342 /* We have handled all of STMT operands, no need to traverse the operands. */
1343 *handled_ops_p = true;
1344 return NULL_TREE;
1345 }
1346
1347
1348 /* A subroutine of convert_local_reference. Create a local variable
1349 in the parent function with DECL_VALUE_EXPR set to reference the
1350 field in FRAME. This is used both for debug info and in OpenMP
1351 lowering. */
1352
1353 static tree
1354 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1355 {
1356 tree x, new_decl;
1357 void **slot;
1358
1359 slot = pointer_map_insert (info->var_map, decl);
1360 if (*slot)
1361 return (tree) *slot;
1362
1363 /* Make sure frame_decl gets created. */
1364 (void) get_frame_type (info);
1365 x = info->frame_decl;
1366 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1367
1368 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1369 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1370 DECL_CONTEXT (new_decl) = info->context;
1371 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1372 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1373 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1374 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1375 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1376 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1377 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1378 if ((TREE_CODE (decl) == PARM_DECL
1379 || TREE_CODE (decl) == RESULT_DECL
1380 || TREE_CODE (decl) == VAR_DECL)
1381 && DECL_BY_REFERENCE (decl))
1382 DECL_BY_REFERENCE (new_decl) = 1;
1383
1384 SET_DECL_VALUE_EXPR (new_decl, x);
1385 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1386 *slot = new_decl;
1387
1388 DECL_CHAIN (new_decl) = info->debug_var_chain;
1389 info->debug_var_chain = new_decl;
1390
1391 /* Do not emit debug info twice. */
1392 DECL_IGNORED_P (decl) = 1;
1393
1394 return new_decl;
1395 }
1396
1397
1398 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1399 and PARM_DECLs that were referenced by inner nested functions.
1400 The rewrite will be a structure reference to the local frame variable. */
1401
1402 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1403
1404 static tree
1405 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1406 {
1407 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1408 struct nesting_info *const info = (struct nesting_info *) wi->info;
1409 tree t = *tp, field, x;
1410 bool save_val_only;
1411
1412 *walk_subtrees = 0;
1413 switch (TREE_CODE (t))
1414 {
1415 case VAR_DECL:
1416 /* Non-automatic variables are never processed. */
1417 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1418 break;
1419 /* FALLTHRU */
1420
1421 case PARM_DECL:
1422 if (decl_function_context (t) == info->context)
1423 {
1424 /* If we copied a pointer to the frame, then the original decl
1425 is used unchanged in the parent function. */
1426 if (use_pointer_in_frame (t))
1427 break;
1428
1429 /* No need to transform anything if no child references the
1430 variable. */
1431 field = lookup_field_for_decl (info, t, NO_INSERT);
1432 if (!field)
1433 break;
1434 wi->changed = true;
1435
1436 x = get_local_debug_decl (info, t, field);
1437 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1438 x = get_frame_field (info, info->context, field, &wi->gsi);
1439
1440 if (wi->val_only)
1441 {
1442 if (wi->is_lhs)
1443 x = save_tmp_var (info, x, &wi->gsi);
1444 else
1445 x = init_tmp_var (info, x, &wi->gsi);
1446 }
1447
1448 *tp = x;
1449 }
1450 break;
1451
1452 case ADDR_EXPR:
1453 save_val_only = wi->val_only;
1454 wi->val_only = false;
1455 wi->is_lhs = false;
1456 wi->changed = false;
1457 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1458 wi->val_only = save_val_only;
1459
1460 /* If we converted anything ... */
1461 if (wi->changed)
1462 {
1463 tree save_context;
1464
1465 /* Then the frame decl is now addressable. */
1466 TREE_ADDRESSABLE (info->frame_decl) = 1;
1467
1468 save_context = current_function_decl;
1469 current_function_decl = info->context;
1470 recompute_tree_invariant_for_addr_expr (t);
1471 current_function_decl = save_context;
1472
1473 /* If we are in a context where we only accept values, then
1474 compute the address into a temporary. */
1475 if (save_val_only)
1476 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1477 t, &wi->gsi);
1478 }
1479 break;
1480
1481 case REALPART_EXPR:
1482 case IMAGPART_EXPR:
1483 case COMPONENT_REF:
1484 case ARRAY_REF:
1485 case ARRAY_RANGE_REF:
1486 case BIT_FIELD_REF:
1487 /* Go down this entire nest and just look at the final prefix and
1488 anything that describes the references. Otherwise, we lose track
1489 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1490 save_val_only = wi->val_only;
1491 wi->val_only = true;
1492 wi->is_lhs = false;
1493 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1494 {
1495 if (TREE_CODE (t) == COMPONENT_REF)
1496 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1497 NULL);
1498 else if (TREE_CODE (t) == ARRAY_REF
1499 || TREE_CODE (t) == ARRAY_RANGE_REF)
1500 {
1501 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1502 NULL);
1503 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1504 NULL);
1505 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1506 NULL);
1507 }
1508 }
1509 wi->val_only = false;
1510 walk_tree (tp, convert_local_reference_op, wi, NULL);
1511 wi->val_only = save_val_only;
1512 break;
1513
1514 case MEM_REF:
1515 save_val_only = wi->val_only;
1516 wi->val_only = true;
1517 wi->is_lhs = false;
1518 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1519 wi, NULL);
1520 /* We need to re-fold the MEM_REF as component references as
1521 part of a ADDR_EXPR address are not allowed. But we cannot
1522 fold here, as the chain record type is not yet finalized. */
1523 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1524 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1525 pointer_set_insert (info->mem_refs, tp);
1526 wi->val_only = save_val_only;
1527 break;
1528
1529 case VIEW_CONVERT_EXPR:
1530 /* Just request to look at the subtrees, leaving val_only and lhs
1531 untouched. This might actually be for !val_only + lhs, in which
1532 case we don't want to force a replacement by a temporary. */
1533 *walk_subtrees = 1;
1534 break;
1535
1536 default:
1537 if (!IS_TYPE_OR_DECL_P (t))
1538 {
1539 *walk_subtrees = 1;
1540 wi->val_only = true;
1541 wi->is_lhs = false;
1542 }
1543 break;
1544 }
1545
1546 return NULL_TREE;
1547 }
1548
1549 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1550 struct walk_stmt_info *);
1551
1552 /* Helper for convert_local_reference. Convert all the references in
1553 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1554
1555 static bool
1556 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1557 {
1558 struct nesting_info *const info = (struct nesting_info *) wi->info;
1559 bool need_frame = false, need_stmts = false;
1560 tree clause, decl;
1561 int dummy;
1562 bitmap new_suppress;
1563
1564 new_suppress = BITMAP_GGC_ALLOC ();
1565 bitmap_copy (new_suppress, info->suppress_expansion);
1566
1567 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1568 {
1569 switch (OMP_CLAUSE_CODE (clause))
1570 {
1571 case OMP_CLAUSE_REDUCTION:
1572 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1573 need_stmts = true;
1574 goto do_decl_clause;
1575
1576 case OMP_CLAUSE_LASTPRIVATE:
1577 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1578 need_stmts = true;
1579 goto do_decl_clause;
1580
1581 case OMP_CLAUSE_PRIVATE:
1582 case OMP_CLAUSE_FIRSTPRIVATE:
1583 case OMP_CLAUSE_COPYPRIVATE:
1584 case OMP_CLAUSE_SHARED:
1585 do_decl_clause:
1586 decl = OMP_CLAUSE_DECL (clause);
1587 if (TREE_CODE (decl) == VAR_DECL
1588 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1589 break;
1590 if (decl_function_context (decl) == info->context
1591 && !use_pointer_in_frame (decl))
1592 {
1593 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1594 if (field)
1595 {
1596 bitmap_set_bit (new_suppress, DECL_UID (decl));
1597 OMP_CLAUSE_DECL (clause)
1598 = get_local_debug_decl (info, decl, field);
1599 need_frame = true;
1600 }
1601 }
1602 break;
1603
1604 case OMP_CLAUSE_SCHEDULE:
1605 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1606 break;
1607 /* FALLTHRU */
1608 case OMP_CLAUSE_FINAL:
1609 case OMP_CLAUSE_IF:
1610 case OMP_CLAUSE_NUM_THREADS:
1611 wi->val_only = true;
1612 wi->is_lhs = false;
1613 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1614 wi);
1615 break;
1616
1617 case OMP_CLAUSE_NOWAIT:
1618 case OMP_CLAUSE_ORDERED:
1619 case OMP_CLAUSE_DEFAULT:
1620 case OMP_CLAUSE_COPYIN:
1621 case OMP_CLAUSE_COLLAPSE:
1622 case OMP_CLAUSE_UNTIED:
1623 case OMP_CLAUSE_MERGEABLE:
1624 break;
1625
1626 default:
1627 gcc_unreachable ();
1628 }
1629 }
1630
1631 info->suppress_expansion = new_suppress;
1632
1633 if (need_stmts)
1634 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1635 switch (OMP_CLAUSE_CODE (clause))
1636 {
1637 case OMP_CLAUSE_REDUCTION:
1638 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1639 {
1640 tree old_context
1641 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1642 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1643 = info->context;
1644 walk_body (convert_local_reference_stmt,
1645 convert_local_reference_op, info,
1646 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1647 walk_body (convert_local_reference_stmt,
1648 convert_local_reference_op, info,
1649 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1650 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1651 = old_context;
1652 }
1653 break;
1654
1655 case OMP_CLAUSE_LASTPRIVATE:
1656 walk_body (convert_local_reference_stmt,
1657 convert_local_reference_op, info,
1658 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1659 break;
1660
1661 default:
1662 break;
1663 }
1664
1665 return need_frame;
1666 }
1667
1668
1669 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1670 and PARM_DECLs that were referenced by inner nested functions.
1671 The rewrite will be a structure reference to the local frame variable. */
1672
1673 static tree
1674 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1675 struct walk_stmt_info *wi)
1676 {
1677 struct nesting_info *info = (struct nesting_info *) wi->info;
1678 tree save_local_var_chain;
1679 bitmap save_suppress;
1680 gimple stmt = gsi_stmt (*gsi);
1681
1682 switch (gimple_code (stmt))
1683 {
1684 case GIMPLE_OMP_PARALLEL:
1685 case GIMPLE_OMP_TASK:
1686 save_suppress = info->suppress_expansion;
1687 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1688 wi))
1689 {
1690 tree c;
1691 (void) get_frame_type (info);
1692 c = build_omp_clause (gimple_location (stmt),
1693 OMP_CLAUSE_SHARED);
1694 OMP_CLAUSE_DECL (c) = info->frame_decl;
1695 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1696 gimple_omp_taskreg_set_clauses (stmt, c);
1697 }
1698
1699 save_local_var_chain = info->new_local_var_chain;
1700 info->new_local_var_chain = NULL;
1701
1702 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1703 gimple_omp_body_ptr (stmt));
1704
1705 if (info->new_local_var_chain)
1706 declare_vars (info->new_local_var_chain,
1707 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1708 info->new_local_var_chain = save_local_var_chain;
1709 info->suppress_expansion = save_suppress;
1710 break;
1711
1712 case GIMPLE_OMP_FOR:
1713 save_suppress = info->suppress_expansion;
1714 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1715 walk_gimple_omp_for (stmt, convert_local_reference_stmt,
1716 convert_local_reference_op, info);
1717 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1718 info, gimple_omp_body_ptr (stmt));
1719 info->suppress_expansion = save_suppress;
1720 break;
1721
1722 case GIMPLE_OMP_SECTIONS:
1723 save_suppress = info->suppress_expansion;
1724 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1725 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1726 info, gimple_omp_body_ptr (stmt));
1727 info->suppress_expansion = save_suppress;
1728 break;
1729
1730 case GIMPLE_OMP_SINGLE:
1731 save_suppress = info->suppress_expansion;
1732 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1733 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1734 info, gimple_omp_body_ptr (stmt));
1735 info->suppress_expansion = save_suppress;
1736 break;
1737
1738 case GIMPLE_OMP_TARGET:
1739 save_suppress = info->suppress_expansion;
1740 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1741 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1742 info, gimple_omp_body_ptr (stmt));
1743 info->suppress_expansion = save_suppress;
1744 break;
1745
1746 case GIMPLE_OMP_TEAMS:
1747 save_suppress = info->suppress_expansion;
1748 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1749 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1750 info, gimple_omp_body_ptr (stmt));
1751 info->suppress_expansion = save_suppress;
1752 break;
1753
1754 case GIMPLE_OMP_SECTION:
1755 case GIMPLE_OMP_MASTER:
1756 case GIMPLE_OMP_TASKGROUP:
1757 case GIMPLE_OMP_ORDERED:
1758 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1759 info, gimple_omp_body_ptr (stmt));
1760 break;
1761
1762 case GIMPLE_COND:
1763 wi->val_only = true;
1764 wi->is_lhs = false;
1765 *handled_ops_p = false;
1766 return NULL_TREE;
1767
1768 case GIMPLE_ASSIGN:
1769 if (gimple_clobber_p (stmt))
1770 {
1771 tree lhs = gimple_assign_lhs (stmt);
1772 if (!use_pointer_in_frame (lhs)
1773 && lookup_field_for_decl (info, lhs, NO_INSERT))
1774 {
1775 gsi_replace (gsi, gimple_build_nop (), true);
1776 break;
1777 }
1778 }
1779 *handled_ops_p = false;
1780 return NULL_TREE;
1781
1782 default:
1783 /* For every other statement that we are not interested in
1784 handling here, let the walker traverse the operands. */
1785 *handled_ops_p = false;
1786 return NULL_TREE;
1787 }
1788
1789 /* Indicate that we have handled all the operands ourselves. */
1790 *handled_ops_p = true;
1791 return NULL_TREE;
1792 }
1793
1794
1795 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
1796 that reference labels from outer functions. The rewrite will be a
1797 call to __builtin_nonlocal_goto. */
1798
1799 static tree
1800 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1801 struct walk_stmt_info *wi)
1802 {
1803 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1804 tree label, new_label, target_context, x, field;
1805 void **slot;
1806 gimple call;
1807 gimple stmt = gsi_stmt (*gsi);
1808
1809 if (gimple_code (stmt) != GIMPLE_GOTO)
1810 {
1811 *handled_ops_p = false;
1812 return NULL_TREE;
1813 }
1814
1815 label = gimple_goto_dest (stmt);
1816 if (TREE_CODE (label) != LABEL_DECL)
1817 {
1818 *handled_ops_p = false;
1819 return NULL_TREE;
1820 }
1821
1822 target_context = decl_function_context (label);
1823 if (target_context == info->context)
1824 {
1825 *handled_ops_p = false;
1826 return NULL_TREE;
1827 }
1828
1829 for (i = info->outer; target_context != i->context; i = i->outer)
1830 continue;
1831
1832 /* The original user label may also be use for a normal goto, therefore
1833 we must create a new label that will actually receive the abnormal
1834 control transfer. This new label will be marked LABEL_NONLOCAL; this
1835 mark will trigger proper behavior in the cfg, as well as cause the
1836 (hairy target-specific) non-local goto receiver code to be generated
1837 when we expand rtl. Enter this association into var_map so that we
1838 can insert the new label into the IL during a second pass. */
1839 slot = pointer_map_insert (i->var_map, label);
1840 if (*slot == NULL)
1841 {
1842 new_label = create_artificial_label (UNKNOWN_LOCATION);
1843 DECL_NONLOCAL (new_label) = 1;
1844 *slot = new_label;
1845 }
1846 else
1847 new_label = (tree) *slot;
1848
1849 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1850 field = get_nl_goto_field (i);
1851 x = get_frame_field (info, target_context, field, gsi);
1852 x = build_addr (x, target_context);
1853 x = gsi_gimplify_val (info, x, gsi);
1854 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
1855 2, build_addr (new_label, target_context), x);
1856 gsi_replace (gsi, call, false);
1857
1858 /* We have handled all of STMT's operands, no need to keep going. */
1859 *handled_ops_p = true;
1860 return NULL_TREE;
1861 }
1862
1863
1864 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
1865 are referenced via nonlocal goto from a nested function. The rewrite
1866 will involve installing a newly generated DECL_NONLOCAL label, and
1867 (potentially) a branch around the rtl gunk that is assumed to be
1868 attached to such a label. */
1869
1870 static tree
1871 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1872 struct walk_stmt_info *wi)
1873 {
1874 struct nesting_info *const info = (struct nesting_info *) wi->info;
1875 tree label, new_label;
1876 gimple_stmt_iterator tmp_gsi;
1877 void **slot;
1878 gimple stmt = gsi_stmt (*gsi);
1879
1880 if (gimple_code (stmt) != GIMPLE_LABEL)
1881 {
1882 *handled_ops_p = false;
1883 return NULL_TREE;
1884 }
1885
1886 label = gimple_label_label (stmt);
1887
1888 slot = pointer_map_contains (info->var_map, label);
1889 if (!slot)
1890 {
1891 *handled_ops_p = false;
1892 return NULL_TREE;
1893 }
1894
1895 /* If there's any possibility that the previous statement falls through,
1896 then we must branch around the new non-local label. */
1897 tmp_gsi = wi->gsi;
1898 gsi_prev (&tmp_gsi);
1899 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
1900 {
1901 gimple stmt = gimple_build_goto (label);
1902 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1903 }
1904
1905 new_label = (tree) *slot;
1906 stmt = gimple_build_label (new_label);
1907 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1908
1909 *handled_ops_p = true;
1910 return NULL_TREE;
1911 }
1912
1913
1914 /* Called via walk_function+walk_stmt, rewrite all references to addresses
1915 of nested functions that require the use of trampolines. The rewrite
1916 will involve a reference a trampoline generated for the occasion. */
1917
1918 static tree
1919 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
1920 {
1921 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1922 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1923 tree t = *tp, decl, target_context, x, builtin;
1924 gimple call;
1925
1926 *walk_subtrees = 0;
1927 switch (TREE_CODE (t))
1928 {
1929 case ADDR_EXPR:
1930 /* Build
1931 T.1 = &CHAIN->tramp;
1932 T.2 = __builtin_adjust_trampoline (T.1);
1933 T.3 = (func_type)T.2;
1934 */
1935
1936 decl = TREE_OPERAND (t, 0);
1937 if (TREE_CODE (decl) != FUNCTION_DECL)
1938 break;
1939
1940 /* Only need to process nested functions. */
1941 target_context = decl_function_context (decl);
1942 if (!target_context)
1943 break;
1944
1945 /* If the nested function doesn't use a static chain, then
1946 it doesn't need a trampoline. */
1947 if (!DECL_STATIC_CHAIN (decl))
1948 break;
1949
1950 /* If we don't want a trampoline, then don't build one. */
1951 if (TREE_NO_TRAMPOLINE (t))
1952 break;
1953
1954 /* Lookup the immediate parent of the callee, as that's where
1955 we need to insert the trampoline. */
1956 for (i = info; i->context != target_context; i = i->outer)
1957 continue;
1958 x = lookup_tramp_for_decl (i, decl, INSERT);
1959
1960 /* Compute the address of the field holding the trampoline. */
1961 x = get_frame_field (info, target_context, x, &wi->gsi);
1962 x = build_addr (x, target_context);
1963 x = gsi_gimplify_val (info, x, &wi->gsi);
1964
1965 /* Do machine-specific ugliness. Normally this will involve
1966 computing extra alignment, but it can really be anything. */
1967 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
1968 call = gimple_build_call (builtin, 1, x);
1969 x = init_tmp_var_with_call (info, &wi->gsi, call);
1970
1971 /* Cast back to the proper function type. */
1972 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1973 x = init_tmp_var (info, x, &wi->gsi);
1974
1975 *tp = x;
1976 break;
1977
1978 default:
1979 if (!IS_TYPE_OR_DECL_P (t))
1980 *walk_subtrees = 1;
1981 break;
1982 }
1983
1984 return NULL_TREE;
1985 }
1986
1987
1988 /* Called via walk_function+walk_gimple_stmt, rewrite all references
1989 to addresses of nested functions that require the use of
1990 trampolines. The rewrite will involve a reference a trampoline
1991 generated for the occasion. */
1992
1993 static tree
1994 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1995 struct walk_stmt_info *wi)
1996 {
1997 struct nesting_info *info = (struct nesting_info *) wi->info;
1998 gimple stmt = gsi_stmt (*gsi);
1999
2000 switch (gimple_code (stmt))
2001 {
2002 case GIMPLE_CALL:
2003 {
2004 /* Only walk call arguments, lest we generate trampolines for
2005 direct calls. */
2006 unsigned long i, nargs = gimple_call_num_args (stmt);
2007 for (i = 0; i < nargs; i++)
2008 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2009 wi, NULL);
2010 break;
2011 }
2012
2013 case GIMPLE_OMP_PARALLEL:
2014 case GIMPLE_OMP_TASK:
2015 {
2016 tree save_local_var_chain;
2017 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2018 save_local_var_chain = info->new_local_var_chain;
2019 info->new_local_var_chain = NULL;
2020 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2021 info, gimple_omp_body_ptr (stmt));
2022 if (info->new_local_var_chain)
2023 declare_vars (info->new_local_var_chain,
2024 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2025 false);
2026 info->new_local_var_chain = save_local_var_chain;
2027 }
2028 break;
2029
2030 default:
2031 *handled_ops_p = false;
2032 return NULL_TREE;
2033 break;
2034 }
2035
2036 *handled_ops_p = true;
2037 return NULL_TREE;
2038 }
2039
2040
2041
2042 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2043 that reference nested functions to make sure that the static chain
2044 is set up properly for the call. */
2045
2046 static tree
2047 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2048 struct walk_stmt_info *wi)
2049 {
2050 struct nesting_info *const info = (struct nesting_info *) wi->info;
2051 tree decl, target_context;
2052 char save_static_chain_added;
2053 int i;
2054 gimple stmt = gsi_stmt (*gsi);
2055
2056 switch (gimple_code (stmt))
2057 {
2058 case GIMPLE_CALL:
2059 if (gimple_call_chain (stmt))
2060 break;
2061 decl = gimple_call_fndecl (stmt);
2062 if (!decl)
2063 break;
2064 target_context = decl_function_context (decl);
2065 if (target_context && DECL_STATIC_CHAIN (decl))
2066 {
2067 gimple_call_set_chain (stmt, get_static_chain (info, target_context,
2068 &wi->gsi));
2069 info->static_chain_added |= (1 << (info->context != target_context));
2070 }
2071 break;
2072
2073 case GIMPLE_OMP_PARALLEL:
2074 case GIMPLE_OMP_TASK:
2075 save_static_chain_added = info->static_chain_added;
2076 info->static_chain_added = 0;
2077 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2078 for (i = 0; i < 2; i++)
2079 {
2080 tree c, decl;
2081 if ((info->static_chain_added & (1 << i)) == 0)
2082 continue;
2083 decl = i ? get_chain_decl (info) : info->frame_decl;
2084 /* Don't add CHAIN.* or FRAME.* twice. */
2085 for (c = gimple_omp_taskreg_clauses (stmt);
2086 c;
2087 c = OMP_CLAUSE_CHAIN (c))
2088 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2089 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2090 && OMP_CLAUSE_DECL (c) == decl)
2091 break;
2092 if (c == NULL)
2093 {
2094 c = build_omp_clause (gimple_location (stmt),
2095 i ? OMP_CLAUSE_FIRSTPRIVATE
2096 : OMP_CLAUSE_SHARED);
2097 OMP_CLAUSE_DECL (c) = decl;
2098 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2099 gimple_omp_taskreg_set_clauses (stmt, c);
2100 }
2101 }
2102 info->static_chain_added |= save_static_chain_added;
2103 break;
2104
2105 case GIMPLE_OMP_FOR:
2106 walk_body (convert_gimple_call, NULL, info,
2107 gimple_omp_for_pre_body_ptr (stmt));
2108 /* FALLTHRU */
2109 case GIMPLE_OMP_SECTIONS:
2110 case GIMPLE_OMP_SECTION:
2111 case GIMPLE_OMP_SINGLE:
2112 case GIMPLE_OMP_TARGET:
2113 case GIMPLE_OMP_TEAMS:
2114 case GIMPLE_OMP_MASTER:
2115 case GIMPLE_OMP_TASKGROUP:
2116 case GIMPLE_OMP_ORDERED:
2117 case GIMPLE_OMP_CRITICAL:
2118 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2119 break;
2120
2121 default:
2122 /* Keep looking for other operands. */
2123 *handled_ops_p = false;
2124 return NULL_TREE;
2125 }
2126
2127 *handled_ops_p = true;
2128 return NULL_TREE;
2129 }
2130
2131 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2132 call expressions. At the same time, determine if a nested function
2133 actually uses its static chain; if not, remember that. */
2134
2135 static void
2136 convert_all_function_calls (struct nesting_info *root)
2137 {
2138 unsigned int chain_count = 0, old_chain_count, iter_count;
2139 struct nesting_info *n;
2140
2141 /* First, optimistically clear static_chain for all decls that haven't
2142 used the static chain already for variable access. */
2143 FOR_EACH_NEST_INFO (n, root)
2144 {
2145 tree decl = n->context;
2146 if (!n->outer || (!n->chain_decl && !n->chain_field))
2147 {
2148 DECL_STATIC_CHAIN (decl) = 0;
2149 if (dump_file && (dump_flags & TDF_DETAILS))
2150 fprintf (dump_file, "Guessing no static-chain for %s\n",
2151 lang_hooks.decl_printable_name (decl, 2));
2152 }
2153 else
2154 DECL_STATIC_CHAIN (decl) = 1;
2155 chain_count += DECL_STATIC_CHAIN (decl);
2156 }
2157
2158 /* Walk the functions and perform transformations. Note that these
2159 transformations can induce new uses of the static chain, which in turn
2160 require re-examining all users of the decl. */
2161 /* ??? It would make sense to try to use the call graph to speed this up,
2162 but the call graph hasn't really been built yet. Even if it did, we
2163 would still need to iterate in this loop since address-of references
2164 wouldn't show up in the callgraph anyway. */
2165 iter_count = 0;
2166 do
2167 {
2168 old_chain_count = chain_count;
2169 chain_count = 0;
2170 iter_count++;
2171
2172 if (dump_file && (dump_flags & TDF_DETAILS))
2173 fputc ('\n', dump_file);
2174
2175 FOR_EACH_NEST_INFO (n, root)
2176 {
2177 tree decl = n->context;
2178 walk_function (convert_tramp_reference_stmt,
2179 convert_tramp_reference_op, n);
2180 walk_function (convert_gimple_call, NULL, n);
2181 chain_count += DECL_STATIC_CHAIN (decl);
2182 }
2183 }
2184 while (chain_count != old_chain_count);
2185
2186 if (dump_file && (dump_flags & TDF_DETAILS))
2187 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2188 iter_count);
2189 }
2190
2191 struct nesting_copy_body_data
2192 {
2193 copy_body_data cb;
2194 struct nesting_info *root;
2195 };
2196
2197 /* A helper subroutine for debug_var_chain type remapping. */
2198
2199 static tree
2200 nesting_copy_decl (tree decl, copy_body_data *id)
2201 {
2202 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2203 void **slot = pointer_map_contains (nid->root->var_map, decl);
2204
2205 if (slot)
2206 return (tree) *slot;
2207
2208 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2209 {
2210 tree new_decl = copy_decl_no_change (decl, id);
2211 DECL_ORIGINAL_TYPE (new_decl)
2212 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2213 return new_decl;
2214 }
2215
2216 if (TREE_CODE (decl) == VAR_DECL
2217 || TREE_CODE (decl) == PARM_DECL
2218 || TREE_CODE (decl) == RESULT_DECL)
2219 return decl;
2220
2221 return copy_decl_no_change (decl, id);
2222 }
2223
2224 /* A helper function for remap_vla_decls. See if *TP contains
2225 some remapped variables. */
2226
2227 static tree
2228 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2229 {
2230 struct nesting_info *root = (struct nesting_info *) data;
2231 tree t = *tp;
2232 void **slot;
2233
2234 if (DECL_P (t))
2235 {
2236 *walk_subtrees = 0;
2237 slot = pointer_map_contains (root->var_map, t);
2238
2239 if (slot)
2240 return (tree) *slot;
2241 }
2242 return NULL;
2243 }
2244
2245 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2246 involved. */
2247
2248 static void
2249 remap_vla_decls (tree block, struct nesting_info *root)
2250 {
2251 tree var, subblock, val, type;
2252 struct nesting_copy_body_data id;
2253
2254 for (subblock = BLOCK_SUBBLOCKS (block);
2255 subblock;
2256 subblock = BLOCK_CHAIN (subblock))
2257 remap_vla_decls (subblock, root);
2258
2259 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2260 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2261 {
2262 val = DECL_VALUE_EXPR (var);
2263 type = TREE_TYPE (var);
2264
2265 if (!(TREE_CODE (val) == INDIRECT_REF
2266 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2267 && variably_modified_type_p (type, NULL)))
2268 continue;
2269
2270 if (pointer_map_contains (root->var_map, TREE_OPERAND (val, 0))
2271 || walk_tree (&type, contains_remapped_vars, root, NULL))
2272 break;
2273 }
2274
2275 if (var == NULL_TREE)
2276 return;
2277
2278 memset (&id, 0, sizeof (id));
2279 id.cb.copy_decl = nesting_copy_decl;
2280 id.cb.decl_map = pointer_map_create ();
2281 id.root = root;
2282
2283 for (; var; var = DECL_CHAIN (var))
2284 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2285 {
2286 struct nesting_info *i;
2287 tree newt, context;
2288 void **slot;
2289
2290 val = DECL_VALUE_EXPR (var);
2291 type = TREE_TYPE (var);
2292
2293 if (!(TREE_CODE (val) == INDIRECT_REF
2294 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2295 && variably_modified_type_p (type, NULL)))
2296 continue;
2297
2298 slot = pointer_map_contains (root->var_map, TREE_OPERAND (val, 0));
2299 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2300 continue;
2301
2302 context = decl_function_context (var);
2303 for (i = root; i; i = i->outer)
2304 if (i->context == context)
2305 break;
2306
2307 if (i == NULL)
2308 continue;
2309
2310 /* Fully expand value expressions. This avoids having debug variables
2311 only referenced from them and that can be swept during GC. */
2312 if (slot)
2313 {
2314 tree t = (tree) *slot;
2315 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2316 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2317 }
2318
2319 id.cb.src_fn = i->context;
2320 id.cb.dst_fn = i->context;
2321 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2322
2323 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2324 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2325 {
2326 newt = TREE_TYPE (newt);
2327 type = TREE_TYPE (type);
2328 }
2329 if (TYPE_NAME (newt)
2330 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2331 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2332 && newt != type
2333 && TYPE_NAME (newt) == TYPE_NAME (type))
2334 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2335
2336 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2337 if (val != DECL_VALUE_EXPR (var))
2338 SET_DECL_VALUE_EXPR (var, val);
2339 }
2340
2341 pointer_map_destroy (id.cb.decl_map);
2342 }
2343
2344 /* Fold the MEM_REF *E. */
2345 static bool
2346 fold_mem_refs (const void *e, void *data ATTRIBUTE_UNUSED)
2347 {
2348 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2349 *ref_p = fold (*ref_p);
2350 return true;
2351 }
2352
2353 /* Do "everything else" to clean up or complete state collected by the
2354 various walking passes -- lay out the types and decls, generate code
2355 to initialize the frame decl, store critical expressions in the
2356 struct function for rtl to find. */
2357
2358 static void
2359 finalize_nesting_tree_1 (struct nesting_info *root)
2360 {
2361 gimple_seq stmt_list;
2362 gimple stmt;
2363 tree context = root->context;
2364 struct function *sf;
2365
2366 stmt_list = NULL;
2367
2368 /* If we created a non-local frame type or decl, we need to lay them
2369 out at this time. */
2370 if (root->frame_type)
2371 {
2372 /* In some cases the frame type will trigger the -Wpadded warning.
2373 This is not helpful; suppress it. */
2374 int save_warn_padded = warn_padded;
2375 tree *adjust;
2376
2377 warn_padded = 0;
2378 layout_type (root->frame_type);
2379 warn_padded = save_warn_padded;
2380 layout_decl (root->frame_decl, 0);
2381
2382 /* Remove root->frame_decl from root->new_local_var_chain, so
2383 that we can declare it also in the lexical blocks, which
2384 helps ensure virtual regs that end up appearing in its RTL
2385 expression get substituted in instantiate_virtual_regs(). */
2386 for (adjust = &root->new_local_var_chain;
2387 *adjust != root->frame_decl;
2388 adjust = &DECL_CHAIN (*adjust))
2389 gcc_assert (DECL_CHAIN (*adjust));
2390 *adjust = DECL_CHAIN (*adjust);
2391
2392 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2393 declare_vars (root->frame_decl,
2394 gimple_seq_first_stmt (gimple_body (context)), true);
2395 }
2396
2397 /* If any parameters were referenced non-locally, then we need to
2398 insert a copy. Likewise, if any variables were referenced by
2399 pointer, we need to initialize the address. */
2400 if (root->any_parm_remapped)
2401 {
2402 tree p;
2403 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2404 {
2405 tree field, x, y;
2406
2407 field = lookup_field_for_decl (root, p, NO_INSERT);
2408 if (!field)
2409 continue;
2410
2411 if (use_pointer_in_frame (p))
2412 x = build_addr (p, context);
2413 else
2414 x = p;
2415
2416 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2417 root->frame_decl, field, NULL_TREE);
2418 stmt = gimple_build_assign (y, x);
2419 gimple_seq_add_stmt (&stmt_list, stmt);
2420 /* If the assignment is from a non-register the stmt is
2421 not valid gimple. Make it so by using a temporary instead. */
2422 if (!is_gimple_reg (x)
2423 && is_gimple_reg_type (TREE_TYPE (x)))
2424 {
2425 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2426 x = init_tmp_var (root, x, &gsi);
2427 gimple_assign_set_rhs1 (stmt, x);
2428 }
2429 }
2430 }
2431
2432 /* If a chain_field was created, then it needs to be initialized
2433 from chain_decl. */
2434 if (root->chain_field)
2435 {
2436 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2437 root->frame_decl, root->chain_field, NULL_TREE);
2438 stmt = gimple_build_assign (x, get_chain_decl (root));
2439 gimple_seq_add_stmt (&stmt_list, stmt);
2440 }
2441
2442 /* If trampolines were created, then we need to initialize them. */
2443 if (root->any_tramp_created)
2444 {
2445 struct nesting_info *i;
2446 for (i = root->inner; i ; i = i->next)
2447 {
2448 tree arg1, arg2, arg3, x, field;
2449
2450 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2451 if (!field)
2452 continue;
2453
2454 gcc_assert (DECL_STATIC_CHAIN (i->context));
2455 arg3 = build_addr (root->frame_decl, context);
2456
2457 arg2 = build_addr (i->context, context);
2458
2459 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2460 root->frame_decl, field, NULL_TREE);
2461 arg1 = build_addr (x, context);
2462
2463 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2464 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2465 gimple_seq_add_stmt (&stmt_list, stmt);
2466 }
2467 }
2468
2469 /* If we created initialization statements, insert them. */
2470 if (stmt_list)
2471 {
2472 gimple bind;
2473 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2474 bind = gimple_seq_first_stmt (gimple_body (context));
2475 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2476 gimple_bind_set_body (bind, stmt_list);
2477 }
2478
2479 /* If a chain_decl was created, then it needs to be registered with
2480 struct function so that it gets initialized from the static chain
2481 register at the beginning of the function. */
2482 sf = DECL_STRUCT_FUNCTION (root->context);
2483 sf->static_chain_decl = root->chain_decl;
2484
2485 /* Similarly for the non-local goto save area. */
2486 if (root->nl_goto_field)
2487 {
2488 sf->nonlocal_goto_save_area
2489 = get_frame_field (root, context, root->nl_goto_field, NULL);
2490 sf->has_nonlocal_label = 1;
2491 }
2492
2493 /* Make sure all new local variables get inserted into the
2494 proper BIND_EXPR. */
2495 if (root->new_local_var_chain)
2496 declare_vars (root->new_local_var_chain,
2497 gimple_seq_first_stmt (gimple_body (root->context)),
2498 false);
2499
2500 if (root->debug_var_chain)
2501 {
2502 tree debug_var;
2503 gimple scope;
2504
2505 remap_vla_decls (DECL_INITIAL (root->context), root);
2506
2507 for (debug_var = root->debug_var_chain; debug_var;
2508 debug_var = DECL_CHAIN (debug_var))
2509 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2510 break;
2511
2512 /* If there are any debug decls with variable length types,
2513 remap those types using other debug_var_chain variables. */
2514 if (debug_var)
2515 {
2516 struct nesting_copy_body_data id;
2517
2518 memset (&id, 0, sizeof (id));
2519 id.cb.copy_decl = nesting_copy_decl;
2520 id.cb.decl_map = pointer_map_create ();
2521 id.root = root;
2522
2523 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2524 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2525 {
2526 tree type = TREE_TYPE (debug_var);
2527 tree newt, t = type;
2528 struct nesting_info *i;
2529
2530 for (i = root; i; i = i->outer)
2531 if (variably_modified_type_p (type, i->context))
2532 break;
2533
2534 if (i == NULL)
2535 continue;
2536
2537 id.cb.src_fn = i->context;
2538 id.cb.dst_fn = i->context;
2539 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2540
2541 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2542 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2543 {
2544 newt = TREE_TYPE (newt);
2545 t = TREE_TYPE (t);
2546 }
2547 if (TYPE_NAME (newt)
2548 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2549 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2550 && newt != t
2551 && TYPE_NAME (newt) == TYPE_NAME (t))
2552 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2553 }
2554
2555 pointer_map_destroy (id.cb.decl_map);
2556 }
2557
2558 scope = gimple_seq_first_stmt (gimple_body (root->context));
2559 if (gimple_bind_block (scope))
2560 declare_vars (root->debug_var_chain, scope, true);
2561 else
2562 BLOCK_VARS (DECL_INITIAL (root->context))
2563 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2564 root->debug_var_chain);
2565 }
2566
2567 /* Fold the rewritten MEM_REF trees. */
2568 pointer_set_traverse (root->mem_refs, fold_mem_refs, NULL);
2569
2570 /* Dump the translated tree function. */
2571 if (dump_file)
2572 {
2573 fputs ("\n\n", dump_file);
2574 dump_function_to_file (root->context, dump_file, dump_flags);
2575 }
2576 }
2577
2578 static void
2579 finalize_nesting_tree (struct nesting_info *root)
2580 {
2581 struct nesting_info *n;
2582 FOR_EACH_NEST_INFO (n, root)
2583 finalize_nesting_tree_1 (n);
2584 }
2585
2586 /* Unnest the nodes and pass them to cgraph. */
2587
2588 static void
2589 unnest_nesting_tree_1 (struct nesting_info *root)
2590 {
2591 struct cgraph_node *node = cgraph_get_node (root->context);
2592
2593 /* For nested functions update the cgraph to reflect unnesting.
2594 We also delay finalizing of these functions up to this point. */
2595 if (node->origin)
2596 {
2597 cgraph_unnest_node (node);
2598 cgraph_finalize_function (root->context, true);
2599 }
2600 }
2601
2602 static void
2603 unnest_nesting_tree (struct nesting_info *root)
2604 {
2605 struct nesting_info *n;
2606 FOR_EACH_NEST_INFO (n, root)
2607 unnest_nesting_tree_1 (n);
2608 }
2609
2610 /* Free the data structures allocated during this pass. */
2611
2612 static void
2613 free_nesting_tree (struct nesting_info *root)
2614 {
2615 struct nesting_info *node, *next;
2616
2617 node = iter_nestinfo_start (root);
2618 do
2619 {
2620 next = iter_nestinfo_next (node);
2621 pointer_map_destroy (node->var_map);
2622 pointer_map_destroy (node->field_map);
2623 pointer_set_destroy (node->mem_refs);
2624 free (node);
2625 node = next;
2626 }
2627 while (node);
2628 }
2629
2630 /* Gimplify a function and all its nested functions. */
2631 static void
2632 gimplify_all_functions (struct cgraph_node *root)
2633 {
2634 struct cgraph_node *iter;
2635 if (!gimple_body (root->decl))
2636 gimplify_function_tree (root->decl);
2637 for (iter = root->nested; iter; iter = iter->next_nested)
2638 gimplify_all_functions (iter);
2639 }
2640
2641 /* Main entry point for this pass. Process FNDECL and all of its nested
2642 subroutines and turn them into something less tightly bound. */
2643
2644 void
2645 lower_nested_functions (tree fndecl)
2646 {
2647 struct cgraph_node *cgn;
2648 struct nesting_info *root;
2649
2650 /* If there are no nested functions, there's nothing to do. */
2651 cgn = cgraph_get_node (fndecl);
2652 if (!cgn->nested)
2653 return;
2654
2655 gimplify_all_functions (cgn);
2656
2657 dump_file = dump_begin (TDI_nested, &dump_flags);
2658 if (dump_file)
2659 fprintf (dump_file, "\n;; Function %s\n\n",
2660 lang_hooks.decl_printable_name (fndecl, 2));
2661
2662 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2663 root = create_nesting_tree (cgn);
2664
2665 walk_all_functions (convert_nonlocal_reference_stmt,
2666 convert_nonlocal_reference_op,
2667 root);
2668 walk_all_functions (convert_local_reference_stmt,
2669 convert_local_reference_op,
2670 root);
2671 walk_all_functions (convert_nl_goto_reference, NULL, root);
2672 walk_all_functions (convert_nl_goto_receiver, NULL, root);
2673
2674 convert_all_function_calls (root);
2675 finalize_nesting_tree (root);
2676 unnest_nesting_tree (root);
2677
2678 free_nesting_tree (root);
2679 bitmap_obstack_release (&nesting_info_bitmap_obstack);
2680
2681 if (dump_file)
2682 {
2683 dump_end (TDI_nested, dump_file);
2684 dump_file = NULL;
2685 }
2686 }
2687
2688 #include "gt-tree-nested.h"