function.h (ipa_opt_pass, [...]): Move forward declarations.
[gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "tree.h"
27 #include "fold-const.h"
28 #include "stringpool.h"
29 #include "stor-layout.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "function.h"
33 #include "tree-dump.h"
34 #include "tree-inline.h"
35 #include "predict.h"
36 #include "basic-block.h"
37 #include "tree-ssa-alias.h"
38 #include "internal-fn.h"
39 #include "gimple-expr.h"
40 #include "gimple.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimple-walk.h"
44 #include "tree-iterator.h"
45 #include "bitmap.h"
46 #include "cgraph.h"
47 #include "tree-cfg.h"
48 #include "rtl.h"
49 #include "flags.h"
50 #include "insn-config.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "varasm.h"
57 #include "stmt.h"
58 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
59 #include "langhooks.h"
60 #include "gimple-low.h"
61 #include "gomp-constants.h"
62
63
64 /* The object of this pass is to lower the representation of a set of nested
65 functions in order to expose all of the gory details of the various
66 nonlocal references. We want to do this sooner rather than later, in
67 order to give us more freedom in emitting all of the functions in question.
68
69 Back in olden times, when gcc was young, we developed an insanely
70 complicated scheme whereby variables which were referenced nonlocally
71 were forced to live in the stack of the declaring function, and then
72 the nested functions magically discovered where these variables were
73 placed. In order for this scheme to function properly, it required
74 that the outer function be partially expanded, then we switch to
75 compiling the inner function, and once done with those we switch back
76 to compiling the outer function. Such delicate ordering requirements
77 makes it difficult to do whole translation unit optimizations
78 involving such functions.
79
80 The implementation here is much more direct. Everything that can be
81 referenced by an inner function is a member of an explicitly created
82 structure herein called the "nonlocal frame struct". The incoming
83 static chain for a nested function is a pointer to this struct in
84 the parent. In this way, we settle on known offsets from a known
85 base, and so are decoupled from the logic that places objects in the
86 function's stack frame. More importantly, we don't have to wait for
87 that to happen -- since the compilation of the inner function is no
88 longer tied to a real stack frame, the nonlocal frame struct can be
89 allocated anywhere. Which means that the outer function is now
90 inlinable.
91
92 Theory of operation here is very simple. Iterate over all the
93 statements in all the functions (depth first) several times,
94 allocating structures and fields on demand. In general we want to
95 examine inner functions first, so that we can avoid making changes
96 to outer functions which are unnecessary.
97
98 The order of the passes matters a bit, in that later passes will be
99 skipped if it is discovered that the functions don't actually interact
100 at all. That is, they're nested in the lexical sense but could have
101 been written as independent functions without change. */
102
103
104 struct nesting_info
105 {
106 struct nesting_info *outer;
107 struct nesting_info *inner;
108 struct nesting_info *next;
109
110 hash_map<tree, tree> *field_map;
111 hash_map<tree, tree> *var_map;
112 hash_set<tree *> *mem_refs;
113 bitmap suppress_expansion;
114
115 tree context;
116 tree new_local_var_chain;
117 tree debug_var_chain;
118 tree frame_type;
119 tree frame_decl;
120 tree chain_field;
121 tree chain_decl;
122 tree nl_goto_field;
123
124 bool any_parm_remapped;
125 bool any_tramp_created;
126 char static_chain_added;
127 };
128
129
130 /* Iterate over the nesting tree, starting with ROOT, depth first. */
131
132 static inline struct nesting_info *
133 iter_nestinfo_start (struct nesting_info *root)
134 {
135 while (root->inner)
136 root = root->inner;
137 return root;
138 }
139
140 static inline struct nesting_info *
141 iter_nestinfo_next (struct nesting_info *node)
142 {
143 if (node->next)
144 return iter_nestinfo_start (node->next);
145 return node->outer;
146 }
147
148 #define FOR_EACH_NEST_INFO(I, ROOT) \
149 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
150
151 /* Obstack used for the bitmaps in the struct above. */
152 static struct bitmap_obstack nesting_info_bitmap_obstack;
153
154
155 /* We're working in so many different function contexts simultaneously,
156 that create_tmp_var is dangerous. Prevent mishap. */
157 #define create_tmp_var cant_use_create_tmp_var_here_dummy
158
159 /* Like create_tmp_var, except record the variable for registration at
160 the given nesting level. */
161
162 static tree
163 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
164 {
165 tree tmp_var;
166
167 /* If the type is of variable size or a type which must be created by the
168 frontend, something is wrong. Note that we explicitly allow
169 incomplete types here, since we create them ourselves here. */
170 gcc_assert (!TREE_ADDRESSABLE (type));
171 gcc_assert (!TYPE_SIZE_UNIT (type)
172 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
173
174 tmp_var = create_tmp_var_raw (type, prefix);
175 DECL_CONTEXT (tmp_var) = info->context;
176 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
177 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
178 if (TREE_CODE (type) == COMPLEX_TYPE
179 || TREE_CODE (type) == VECTOR_TYPE)
180 DECL_GIMPLE_REG_P (tmp_var) = 1;
181
182 info->new_local_var_chain = tmp_var;
183
184 return tmp_var;
185 }
186
187 /* Take the address of EXP to be used within function CONTEXT.
188 Mark it for addressability as necessary. */
189
190 tree
191 build_addr (tree exp, tree context)
192 {
193 tree base = exp;
194 tree save_context;
195 tree retval;
196
197 while (handled_component_p (base))
198 base = TREE_OPERAND (base, 0);
199
200 if (DECL_P (base))
201 TREE_ADDRESSABLE (base) = 1;
202
203 /* Building the ADDR_EXPR will compute a set of properties for
204 that ADDR_EXPR. Those properties are unfortunately context
205 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
206
207 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
208 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
209 way the properties are for the ADDR_EXPR are computed properly. */
210 save_context = current_function_decl;
211 current_function_decl = context;
212 retval = build_fold_addr_expr (exp);
213 current_function_decl = save_context;
214 return retval;
215 }
216
217 /* Insert FIELD into TYPE, sorted by alignment requirements. */
218
219 void
220 insert_field_into_struct (tree type, tree field)
221 {
222 tree *p;
223
224 DECL_CONTEXT (field) = type;
225
226 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
227 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
228 break;
229
230 DECL_CHAIN (field) = *p;
231 *p = field;
232
233 /* Set correct alignment for frame struct type. */
234 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
235 TYPE_ALIGN (type) = DECL_ALIGN (field);
236 }
237
238 /* Build or return the RECORD_TYPE that describes the frame state that is
239 shared between INFO->CONTEXT and its nested functions. This record will
240 not be complete until finalize_nesting_tree; up until that point we'll
241 be adding fields as necessary.
242
243 We also build the DECL that represents this frame in the function. */
244
245 static tree
246 get_frame_type (struct nesting_info *info)
247 {
248 tree type = info->frame_type;
249 if (!type)
250 {
251 char *name;
252
253 type = make_node (RECORD_TYPE);
254
255 name = concat ("FRAME.",
256 IDENTIFIER_POINTER (DECL_NAME (info->context)),
257 NULL);
258 TYPE_NAME (type) = get_identifier (name);
259 free (name);
260
261 info->frame_type = type;
262 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
263 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
264
265 /* ??? Always make it addressable for now, since it is meant to
266 be pointed to by the static chain pointer. This pessimizes
267 when it turns out that no static chains are needed because
268 the nested functions referencing non-local variables are not
269 reachable, but the true pessimization is to create the non-
270 local frame structure in the first place. */
271 TREE_ADDRESSABLE (info->frame_decl) = 1;
272 }
273 return type;
274 }
275
276 /* Return true if DECL should be referenced by pointer in the non-local
277 frame structure. */
278
279 static bool
280 use_pointer_in_frame (tree decl)
281 {
282 if (TREE_CODE (decl) == PARM_DECL)
283 {
284 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
285 sized decls, and inefficient to copy large aggregates. Don't bother
286 moving anything but scalar variables. */
287 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
288 }
289 else
290 {
291 /* Variable sized types make things "interesting" in the frame. */
292 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
293 }
294 }
295
296 /* Given DECL, a non-locally accessed variable, find or create a field
297 in the non-local frame structure for the given nesting context. */
298
299 static tree
300 lookup_field_for_decl (struct nesting_info *info, tree decl,
301 enum insert_option insert)
302 {
303 if (insert == NO_INSERT)
304 {
305 tree *slot = info->field_map->get (decl);
306 return slot ? *slot : NULL_TREE;
307 }
308
309 tree *slot = &info->field_map->get_or_insert (decl);
310 if (!*slot)
311 {
312 tree field = make_node (FIELD_DECL);
313 DECL_NAME (field) = DECL_NAME (decl);
314
315 if (use_pointer_in_frame (decl))
316 {
317 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
318 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
319 DECL_NONADDRESSABLE_P (field) = 1;
320 }
321 else
322 {
323 TREE_TYPE (field) = TREE_TYPE (decl);
324 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
325 DECL_ALIGN (field) = DECL_ALIGN (decl);
326 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
327 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
328 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
329 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
330 }
331
332 insert_field_into_struct (get_frame_type (info), field);
333 *slot = field;
334
335 if (TREE_CODE (decl) == PARM_DECL)
336 info->any_parm_remapped = true;
337 }
338
339 return *slot;
340 }
341
342 /* Build or return the variable that holds the static chain within
343 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
344
345 static tree
346 get_chain_decl (struct nesting_info *info)
347 {
348 tree decl = info->chain_decl;
349
350 if (!decl)
351 {
352 tree type;
353
354 type = get_frame_type (info->outer);
355 type = build_pointer_type (type);
356
357 /* Note that this variable is *not* entered into any BIND_EXPR;
358 the construction of this variable is handled specially in
359 expand_function_start and initialize_inlined_parameters.
360 Note also that it's represented as a parameter. This is more
361 close to the truth, since the initial value does come from
362 the caller. */
363 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
364 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
365 DECL_ARTIFICIAL (decl) = 1;
366 DECL_IGNORED_P (decl) = 1;
367 TREE_USED (decl) = 1;
368 DECL_CONTEXT (decl) = info->context;
369 DECL_ARG_TYPE (decl) = type;
370
371 /* Tell tree-inline.c that we never write to this variable, so
372 it can copy-prop the replacement value immediately. */
373 TREE_READONLY (decl) = 1;
374
375 info->chain_decl = decl;
376
377 if (dump_file
378 && (dump_flags & TDF_DETAILS)
379 && !DECL_STATIC_CHAIN (info->context))
380 fprintf (dump_file, "Setting static-chain for %s\n",
381 lang_hooks.decl_printable_name (info->context, 2));
382
383 DECL_STATIC_CHAIN (info->context) = 1;
384 }
385 return decl;
386 }
387
388 /* Build or return the field within the non-local frame state that holds
389 the static chain for INFO->CONTEXT. This is the way to walk back up
390 multiple nesting levels. */
391
392 static tree
393 get_chain_field (struct nesting_info *info)
394 {
395 tree field = info->chain_field;
396
397 if (!field)
398 {
399 tree type = build_pointer_type (get_frame_type (info->outer));
400
401 field = make_node (FIELD_DECL);
402 DECL_NAME (field) = get_identifier ("__chain");
403 TREE_TYPE (field) = type;
404 DECL_ALIGN (field) = TYPE_ALIGN (type);
405 DECL_NONADDRESSABLE_P (field) = 1;
406
407 insert_field_into_struct (get_frame_type (info), field);
408
409 info->chain_field = field;
410
411 if (dump_file
412 && (dump_flags & TDF_DETAILS)
413 && !DECL_STATIC_CHAIN (info->context))
414 fprintf (dump_file, "Setting static-chain for %s\n",
415 lang_hooks.decl_printable_name (info->context, 2));
416
417 DECL_STATIC_CHAIN (info->context) = 1;
418 }
419 return field;
420 }
421
422 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
423
424 static tree
425 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
426 gcall *call)
427 {
428 tree t;
429
430 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
431 gimple_call_set_lhs (call, t);
432 if (! gsi_end_p (*gsi))
433 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
434 gsi_insert_before (gsi, call, GSI_SAME_STMT);
435
436 return t;
437 }
438
439
440 /* Copy EXP into a temporary. Allocate the temporary in the context of
441 INFO and insert the initialization statement before GSI. */
442
443 static tree
444 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
445 {
446 tree t;
447 gimple stmt;
448
449 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
450 stmt = gimple_build_assign (t, exp);
451 if (! gsi_end_p (*gsi))
452 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
453 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
454
455 return t;
456 }
457
458
459 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
460
461 static tree
462 gsi_gimplify_val (struct nesting_info *info, tree exp,
463 gimple_stmt_iterator *gsi)
464 {
465 if (is_gimple_val (exp))
466 return exp;
467 else
468 return init_tmp_var (info, exp, gsi);
469 }
470
471 /* Similarly, but copy from the temporary and insert the statement
472 after the iterator. */
473
474 static tree
475 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
476 {
477 tree t;
478 gimple stmt;
479
480 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
481 stmt = gimple_build_assign (exp, t);
482 if (! gsi_end_p (*gsi))
483 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
484 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
485
486 return t;
487 }
488
489 /* Build or return the type used to represent a nested function trampoline. */
490
491 static GTY(()) tree trampoline_type;
492
493 static tree
494 get_trampoline_type (struct nesting_info *info)
495 {
496 unsigned align, size;
497 tree t;
498
499 if (trampoline_type)
500 return trampoline_type;
501
502 align = TRAMPOLINE_ALIGNMENT;
503 size = TRAMPOLINE_SIZE;
504
505 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
506 then allocate extra space so that we can do dynamic alignment. */
507 if (align > STACK_BOUNDARY)
508 {
509 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
510 align = STACK_BOUNDARY;
511 }
512
513 t = build_index_type (size_int (size - 1));
514 t = build_array_type (char_type_node, t);
515 t = build_decl (DECL_SOURCE_LOCATION (info->context),
516 FIELD_DECL, get_identifier ("__data"), t);
517 DECL_ALIGN (t) = align;
518 DECL_USER_ALIGN (t) = 1;
519
520 trampoline_type = make_node (RECORD_TYPE);
521 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
522 TYPE_FIELDS (trampoline_type) = t;
523 layout_type (trampoline_type);
524 DECL_CONTEXT (t) = trampoline_type;
525
526 return trampoline_type;
527 }
528
529 /* Given DECL, a nested function, find or create a field in the non-local
530 frame structure for a trampoline for this function. */
531
532 static tree
533 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
534 enum insert_option insert)
535 {
536 if (insert == NO_INSERT)
537 {
538 tree *slot = info->var_map->get (decl);
539 return slot ? *slot : NULL_TREE;
540 }
541
542 tree *slot = &info->var_map->get_or_insert (decl);
543 if (!*slot)
544 {
545 tree field = make_node (FIELD_DECL);
546 DECL_NAME (field) = DECL_NAME (decl);
547 TREE_TYPE (field) = get_trampoline_type (info);
548 TREE_ADDRESSABLE (field) = 1;
549
550 insert_field_into_struct (get_frame_type (info), field);
551 *slot = field;
552
553 info->any_tramp_created = true;
554 }
555
556 return *slot;
557 }
558
559 /* Build or return the field within the non-local frame state that holds
560 the non-local goto "jmp_buf". The buffer itself is maintained by the
561 rtl middle-end as dynamic stack space is allocated. */
562
563 static tree
564 get_nl_goto_field (struct nesting_info *info)
565 {
566 tree field = info->nl_goto_field;
567 if (!field)
568 {
569 unsigned size;
570 tree type;
571
572 /* For __builtin_nonlocal_goto, we need N words. The first is the
573 frame pointer, the rest is for the target's stack pointer save
574 area. The number of words is controlled by STACK_SAVEAREA_MODE;
575 not the best interface, but it'll do for now. */
576 if (Pmode == ptr_mode)
577 type = ptr_type_node;
578 else
579 type = lang_hooks.types.type_for_mode (Pmode, 1);
580
581 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
582 size = size / GET_MODE_SIZE (Pmode);
583 size = size + 1;
584
585 type = build_array_type
586 (type, build_index_type (size_int (size)));
587
588 field = make_node (FIELD_DECL);
589 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
590 TREE_TYPE (field) = type;
591 DECL_ALIGN (field) = TYPE_ALIGN (type);
592 TREE_ADDRESSABLE (field) = 1;
593
594 insert_field_into_struct (get_frame_type (info), field);
595
596 info->nl_goto_field = field;
597 }
598
599 return field;
600 }
601
602 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
603
604 static void
605 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
606 struct nesting_info *info, gimple_seq *pseq)
607 {
608 struct walk_stmt_info wi;
609
610 memset (&wi, 0, sizeof (wi));
611 wi.info = info;
612 wi.val_only = true;
613 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
614 }
615
616
617 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
618
619 static inline void
620 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
621 struct nesting_info *info)
622 {
623 gimple_seq body = gimple_body (info->context);
624 walk_body (callback_stmt, callback_op, info, &body);
625 gimple_set_body (info->context, body);
626 }
627
628 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
629
630 static void
631 walk_gimple_omp_for (gomp_for *for_stmt,
632 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
633 struct nesting_info *info)
634 {
635 struct walk_stmt_info wi;
636 gimple_seq seq;
637 tree t;
638 size_t i;
639
640 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
641
642 seq = NULL;
643 memset (&wi, 0, sizeof (wi));
644 wi.info = info;
645 wi.gsi = gsi_last (seq);
646
647 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
648 {
649 wi.val_only = false;
650 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
651 &wi, NULL);
652 wi.val_only = true;
653 wi.is_lhs = false;
654 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
655 &wi, NULL);
656
657 wi.val_only = true;
658 wi.is_lhs = false;
659 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
660 &wi, NULL);
661
662 t = gimple_omp_for_incr (for_stmt, i);
663 gcc_assert (BINARY_CLASS_P (t));
664 wi.val_only = false;
665 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
666 wi.val_only = true;
667 wi.is_lhs = false;
668 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
669 }
670
671 seq = gsi_seq (wi.gsi);
672 if (!gimple_seq_empty_p (seq))
673 {
674 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
675 annotate_all_with_location (seq, gimple_location (for_stmt));
676 gimple_seq_add_seq (&pre_body, seq);
677 gimple_omp_for_set_pre_body (for_stmt, pre_body);
678 }
679 }
680
681 /* Similarly for ROOT and all functions nested underneath, depth first. */
682
683 static void
684 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
685 struct nesting_info *root)
686 {
687 struct nesting_info *n;
688 FOR_EACH_NEST_INFO (n, root)
689 walk_function (callback_stmt, callback_op, n);
690 }
691
692
693 /* We have to check for a fairly pathological case. The operands of function
694 nested function are to be interpreted in the context of the enclosing
695 function. So if any are variably-sized, they will get remapped when the
696 enclosing function is inlined. But that remapping would also have to be
697 done in the types of the PARM_DECLs of the nested function, meaning the
698 argument types of that function will disagree with the arguments in the
699 calls to that function. So we'd either have to make a copy of the nested
700 function corresponding to each time the enclosing function was inlined or
701 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
702 function. The former is not practical. The latter would still require
703 detecting this case to know when to add the conversions. So, for now at
704 least, we don't inline such an enclosing function.
705
706 We have to do that check recursively, so here return indicating whether
707 FNDECL has such a nested function. ORIG_FN is the function we were
708 trying to inline to use for checking whether any argument is variably
709 modified by anything in it.
710
711 It would be better to do this in tree-inline.c so that we could give
712 the appropriate warning for why a function can't be inlined, but that's
713 too late since the nesting structure has already been flattened and
714 adding a flag just to record this fact seems a waste of a flag. */
715
716 static bool
717 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
718 {
719 struct cgraph_node *cgn = cgraph_node::get (fndecl);
720 tree arg;
721
722 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
723 {
724 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
725 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
726 return true;
727
728 if (check_for_nested_with_variably_modified (cgn->decl,
729 orig_fndecl))
730 return true;
731 }
732
733 return false;
734 }
735
736 /* Construct our local datastructure describing the function nesting
737 tree rooted by CGN. */
738
739 static struct nesting_info *
740 create_nesting_tree (struct cgraph_node *cgn)
741 {
742 struct nesting_info *info = XCNEW (struct nesting_info);
743 info->field_map = new hash_map<tree, tree>;
744 info->var_map = new hash_map<tree, tree>;
745 info->mem_refs = new hash_set<tree *>;
746 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
747 info->context = cgn->decl;
748
749 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
750 {
751 struct nesting_info *sub = create_nesting_tree (cgn);
752 sub->outer = info;
753 sub->next = info->inner;
754 info->inner = sub;
755 }
756
757 /* See discussion at check_for_nested_with_variably_modified for a
758 discussion of why this has to be here. */
759 if (check_for_nested_with_variably_modified (info->context, info->context))
760 DECL_UNINLINABLE (info->context) = true;
761
762 return info;
763 }
764
765 /* Return an expression computing the static chain for TARGET_CONTEXT
766 from INFO->CONTEXT. Insert any necessary computations before TSI. */
767
768 static tree
769 get_static_chain (struct nesting_info *info, tree target_context,
770 gimple_stmt_iterator *gsi)
771 {
772 struct nesting_info *i;
773 tree x;
774
775 if (info->context == target_context)
776 {
777 x = build_addr (info->frame_decl, target_context);
778 }
779 else
780 {
781 x = get_chain_decl (info);
782
783 for (i = info->outer; i->context != target_context; i = i->outer)
784 {
785 tree field = get_chain_field (i);
786
787 x = build_simple_mem_ref (x);
788 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
789 x = init_tmp_var (info, x, gsi);
790 }
791 }
792
793 return x;
794 }
795
796
797 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
798 frame as seen from INFO->CONTEXT. Insert any necessary computations
799 before GSI. */
800
801 static tree
802 get_frame_field (struct nesting_info *info, tree target_context,
803 tree field, gimple_stmt_iterator *gsi)
804 {
805 struct nesting_info *i;
806 tree x;
807
808 if (info->context == target_context)
809 {
810 /* Make sure frame_decl gets created. */
811 (void) get_frame_type (info);
812 x = info->frame_decl;
813 }
814 else
815 {
816 x = get_chain_decl (info);
817
818 for (i = info->outer; i->context != target_context; i = i->outer)
819 {
820 tree field = get_chain_field (i);
821
822 x = build_simple_mem_ref (x);
823 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
824 x = init_tmp_var (info, x, gsi);
825 }
826
827 x = build_simple_mem_ref (x);
828 }
829
830 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
831 return x;
832 }
833
834 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
835
836 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
837 in the nested function with DECL_VALUE_EXPR set to reference the true
838 variable in the parent function. This is used both for debug info
839 and in OMP lowering. */
840
841 static tree
842 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
843 {
844 tree target_context;
845 struct nesting_info *i;
846 tree x, field, new_decl;
847
848 tree *slot = &info->var_map->get_or_insert (decl);
849
850 if (*slot)
851 return *slot;
852
853 target_context = decl_function_context (decl);
854
855 /* A copy of the code in get_frame_field, but without the temporaries. */
856 if (info->context == target_context)
857 {
858 /* Make sure frame_decl gets created. */
859 (void) get_frame_type (info);
860 x = info->frame_decl;
861 i = info;
862 }
863 else
864 {
865 x = get_chain_decl (info);
866 for (i = info->outer; i->context != target_context; i = i->outer)
867 {
868 field = get_chain_field (i);
869 x = build_simple_mem_ref (x);
870 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
871 }
872 x = build_simple_mem_ref (x);
873 }
874
875 field = lookup_field_for_decl (i, decl, INSERT);
876 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
877 if (use_pointer_in_frame (decl))
878 x = build_simple_mem_ref (x);
879
880 /* ??? We should be remapping types as well, surely. */
881 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
882 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
883 DECL_CONTEXT (new_decl) = info->context;
884 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
885 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
886 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
887 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
888 TREE_READONLY (new_decl) = TREE_READONLY (decl);
889 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
890 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
891 if ((TREE_CODE (decl) == PARM_DECL
892 || TREE_CODE (decl) == RESULT_DECL
893 || TREE_CODE (decl) == VAR_DECL)
894 && DECL_BY_REFERENCE (decl))
895 DECL_BY_REFERENCE (new_decl) = 1;
896
897 SET_DECL_VALUE_EXPR (new_decl, x);
898 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
899
900 *slot = new_decl;
901 DECL_CHAIN (new_decl) = info->debug_var_chain;
902 info->debug_var_chain = new_decl;
903
904 if (!optimize
905 && info->context != target_context
906 && variably_modified_type_p (TREE_TYPE (decl), NULL))
907 note_nonlocal_vla_type (info, TREE_TYPE (decl));
908
909 return new_decl;
910 }
911
912
913 /* Callback for walk_gimple_stmt, rewrite all references to VAR
914 and PARM_DECLs that belong to outer functions.
915
916 The rewrite will involve some number of structure accesses back up
917 the static chain. E.g. for a variable FOO up one nesting level it'll
918 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
919 indirections apply to decls for which use_pointer_in_frame is true. */
920
921 static tree
922 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
923 {
924 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
925 struct nesting_info *const info = (struct nesting_info *) wi->info;
926 tree t = *tp;
927
928 *walk_subtrees = 0;
929 switch (TREE_CODE (t))
930 {
931 case VAR_DECL:
932 /* Non-automatic variables are never processed. */
933 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
934 break;
935 /* FALLTHRU */
936
937 case PARM_DECL:
938 if (decl_function_context (t) != info->context)
939 {
940 tree x;
941 wi->changed = true;
942
943 x = get_nonlocal_debug_decl (info, t);
944 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
945 {
946 tree target_context = decl_function_context (t);
947 struct nesting_info *i;
948 for (i = info->outer; i->context != target_context; i = i->outer)
949 continue;
950 x = lookup_field_for_decl (i, t, INSERT);
951 x = get_frame_field (info, target_context, x, &wi->gsi);
952 if (use_pointer_in_frame (t))
953 {
954 x = init_tmp_var (info, x, &wi->gsi);
955 x = build_simple_mem_ref (x);
956 }
957 }
958
959 if (wi->val_only)
960 {
961 if (wi->is_lhs)
962 x = save_tmp_var (info, x, &wi->gsi);
963 else
964 x = init_tmp_var (info, x, &wi->gsi);
965 }
966
967 *tp = x;
968 }
969 break;
970
971 case LABEL_DECL:
972 /* We're taking the address of a label from a parent function, but
973 this is not itself a non-local goto. Mark the label such that it
974 will not be deleted, much as we would with a label address in
975 static storage. */
976 if (decl_function_context (t) != info->context)
977 FORCED_LABEL (t) = 1;
978 break;
979
980 case ADDR_EXPR:
981 {
982 bool save_val_only = wi->val_only;
983
984 wi->val_only = false;
985 wi->is_lhs = false;
986 wi->changed = false;
987 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
988 wi->val_only = true;
989
990 if (wi->changed)
991 {
992 tree save_context;
993
994 /* If we changed anything, we might no longer be directly
995 referencing a decl. */
996 save_context = current_function_decl;
997 current_function_decl = info->context;
998 recompute_tree_invariant_for_addr_expr (t);
999 current_function_decl = save_context;
1000
1001 /* If the callback converted the address argument in a context
1002 where we only accept variables (and min_invariant, presumably),
1003 then compute the address into a temporary. */
1004 if (save_val_only)
1005 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1006 t, &wi->gsi);
1007 }
1008 }
1009 break;
1010
1011 case REALPART_EXPR:
1012 case IMAGPART_EXPR:
1013 case COMPONENT_REF:
1014 case ARRAY_REF:
1015 case ARRAY_RANGE_REF:
1016 case BIT_FIELD_REF:
1017 /* Go down this entire nest and just look at the final prefix and
1018 anything that describes the references. Otherwise, we lose track
1019 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1020 wi->val_only = true;
1021 wi->is_lhs = false;
1022 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1023 {
1024 if (TREE_CODE (t) == COMPONENT_REF)
1025 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1026 NULL);
1027 else if (TREE_CODE (t) == ARRAY_REF
1028 || TREE_CODE (t) == ARRAY_RANGE_REF)
1029 {
1030 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1031 wi, NULL);
1032 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1033 wi, NULL);
1034 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1035 wi, NULL);
1036 }
1037 }
1038 wi->val_only = false;
1039 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1040 break;
1041
1042 case VIEW_CONVERT_EXPR:
1043 /* Just request to look at the subtrees, leaving val_only and lhs
1044 untouched. This might actually be for !val_only + lhs, in which
1045 case we don't want to force a replacement by a temporary. */
1046 *walk_subtrees = 1;
1047 break;
1048
1049 default:
1050 if (!IS_TYPE_OR_DECL_P (t))
1051 {
1052 *walk_subtrees = 1;
1053 wi->val_only = true;
1054 wi->is_lhs = false;
1055 }
1056 break;
1057 }
1058
1059 return NULL_TREE;
1060 }
1061
1062 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1063 struct walk_stmt_info *);
1064
1065 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1066 and PARM_DECLs that belong to outer functions. */
1067
1068 static bool
1069 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1070 {
1071 struct nesting_info *const info = (struct nesting_info *) wi->info;
1072 bool need_chain = false, need_stmts = false;
1073 tree clause, decl;
1074 int dummy;
1075 bitmap new_suppress;
1076
1077 new_suppress = BITMAP_GGC_ALLOC ();
1078 bitmap_copy (new_suppress, info->suppress_expansion);
1079
1080 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1081 {
1082 switch (OMP_CLAUSE_CODE (clause))
1083 {
1084 case OMP_CLAUSE_REDUCTION:
1085 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1086 need_stmts = true;
1087 goto do_decl_clause;
1088
1089 case OMP_CLAUSE_LASTPRIVATE:
1090 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1091 need_stmts = true;
1092 goto do_decl_clause;
1093
1094 case OMP_CLAUSE_LINEAR:
1095 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1096 need_stmts = true;
1097 wi->val_only = true;
1098 wi->is_lhs = false;
1099 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1100 &dummy, wi);
1101 goto do_decl_clause;
1102
1103 case OMP_CLAUSE_PRIVATE:
1104 case OMP_CLAUSE_FIRSTPRIVATE:
1105 case OMP_CLAUSE_COPYPRIVATE:
1106 case OMP_CLAUSE_SHARED:
1107 do_decl_clause:
1108 decl = OMP_CLAUSE_DECL (clause);
1109 if (TREE_CODE (decl) == VAR_DECL
1110 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1111 break;
1112 if (decl_function_context (decl) != info->context)
1113 {
1114 bitmap_set_bit (new_suppress, DECL_UID (decl));
1115 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1116 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1117 need_chain = true;
1118 }
1119 break;
1120
1121 case OMP_CLAUSE_SCHEDULE:
1122 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1123 break;
1124 /* FALLTHRU */
1125 case OMP_CLAUSE_FINAL:
1126 case OMP_CLAUSE_IF:
1127 case OMP_CLAUSE_NUM_THREADS:
1128 case OMP_CLAUSE_DEPEND:
1129 case OMP_CLAUSE_DEVICE:
1130 case OMP_CLAUSE_NUM_TEAMS:
1131 case OMP_CLAUSE_THREAD_LIMIT:
1132 case OMP_CLAUSE_SAFELEN:
1133 case OMP_CLAUSE__CILK_FOR_COUNT_:
1134 wi->val_only = true;
1135 wi->is_lhs = false;
1136 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1137 &dummy, wi);
1138 break;
1139
1140 case OMP_CLAUSE_DIST_SCHEDULE:
1141 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1142 {
1143 wi->val_only = true;
1144 wi->is_lhs = false;
1145 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1146 &dummy, wi);
1147 }
1148 break;
1149
1150 case OMP_CLAUSE_MAP:
1151 case OMP_CLAUSE_TO:
1152 case OMP_CLAUSE_FROM:
1153 if (OMP_CLAUSE_SIZE (clause))
1154 {
1155 wi->val_only = true;
1156 wi->is_lhs = false;
1157 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1158 &dummy, wi);
1159 }
1160 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1161 goto do_decl_clause;
1162 wi->val_only = true;
1163 wi->is_lhs = false;
1164 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1165 wi, NULL);
1166 break;
1167
1168 case OMP_CLAUSE_ALIGNED:
1169 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1170 {
1171 wi->val_only = true;
1172 wi->is_lhs = false;
1173 convert_nonlocal_reference_op
1174 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1175 }
1176 /* Like do_decl_clause, but don't add any suppression. */
1177 decl = OMP_CLAUSE_DECL (clause);
1178 if (TREE_CODE (decl) == VAR_DECL
1179 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1180 break;
1181 if (decl_function_context (decl) != info->context)
1182 {
1183 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1184 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1185 need_chain = true;
1186 }
1187 break;
1188
1189 case OMP_CLAUSE_NOWAIT:
1190 case OMP_CLAUSE_ORDERED:
1191 case OMP_CLAUSE_DEFAULT:
1192 case OMP_CLAUSE_COPYIN:
1193 case OMP_CLAUSE_COLLAPSE:
1194 case OMP_CLAUSE_UNTIED:
1195 case OMP_CLAUSE_MERGEABLE:
1196 case OMP_CLAUSE_PROC_BIND:
1197 break;
1198
1199 default:
1200 gcc_unreachable ();
1201 }
1202 }
1203
1204 info->suppress_expansion = new_suppress;
1205
1206 if (need_stmts)
1207 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1208 switch (OMP_CLAUSE_CODE (clause))
1209 {
1210 case OMP_CLAUSE_REDUCTION:
1211 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1212 {
1213 tree old_context
1214 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1215 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1216 = info->context;
1217 walk_body (convert_nonlocal_reference_stmt,
1218 convert_nonlocal_reference_op, info,
1219 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1220 walk_body (convert_nonlocal_reference_stmt,
1221 convert_nonlocal_reference_op, info,
1222 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1223 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1224 = old_context;
1225 }
1226 break;
1227
1228 case OMP_CLAUSE_LASTPRIVATE:
1229 walk_body (convert_nonlocal_reference_stmt,
1230 convert_nonlocal_reference_op, info,
1231 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1232 break;
1233
1234 case OMP_CLAUSE_LINEAR:
1235 walk_body (convert_nonlocal_reference_stmt,
1236 convert_nonlocal_reference_op, info,
1237 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1238 break;
1239
1240 default:
1241 break;
1242 }
1243
1244 return need_chain;
1245 }
1246
1247 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1248
1249 static void
1250 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1251 {
1252 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1253 type = TREE_TYPE (type);
1254
1255 if (TYPE_NAME (type)
1256 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1257 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1258 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1259
1260 while (POINTER_TYPE_P (type)
1261 || TREE_CODE (type) == VECTOR_TYPE
1262 || TREE_CODE (type) == FUNCTION_TYPE
1263 || TREE_CODE (type) == METHOD_TYPE)
1264 type = TREE_TYPE (type);
1265
1266 if (TREE_CODE (type) == ARRAY_TYPE)
1267 {
1268 tree domain, t;
1269
1270 note_nonlocal_vla_type (info, TREE_TYPE (type));
1271 domain = TYPE_DOMAIN (type);
1272 if (domain)
1273 {
1274 t = TYPE_MIN_VALUE (domain);
1275 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1276 && decl_function_context (t) != info->context)
1277 get_nonlocal_debug_decl (info, t);
1278 t = TYPE_MAX_VALUE (domain);
1279 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1280 && decl_function_context (t) != info->context)
1281 get_nonlocal_debug_decl (info, t);
1282 }
1283 }
1284 }
1285
1286 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1287 in BLOCK. */
1288
1289 static void
1290 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1291 {
1292 tree var;
1293
1294 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1295 if (TREE_CODE (var) == VAR_DECL
1296 && variably_modified_type_p (TREE_TYPE (var), NULL)
1297 && DECL_HAS_VALUE_EXPR_P (var)
1298 && decl_function_context (var) != info->context)
1299 note_nonlocal_vla_type (info, TREE_TYPE (var));
1300 }
1301
1302 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1303 PARM_DECLs that belong to outer functions. This handles statements
1304 that are not handled via the standard recursion done in
1305 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1306 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1307 operands of STMT have been handled by this function. */
1308
1309 static tree
1310 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1311 struct walk_stmt_info *wi)
1312 {
1313 struct nesting_info *info = (struct nesting_info *) wi->info;
1314 tree save_local_var_chain;
1315 bitmap save_suppress;
1316 gimple stmt = gsi_stmt (*gsi);
1317
1318 switch (gimple_code (stmt))
1319 {
1320 case GIMPLE_GOTO:
1321 /* Don't walk non-local gotos for now. */
1322 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1323 {
1324 wi->val_only = true;
1325 wi->is_lhs = false;
1326 *handled_ops_p = true;
1327 return NULL_TREE;
1328 }
1329 break;
1330
1331 case GIMPLE_OMP_PARALLEL:
1332 case GIMPLE_OMP_TASK:
1333 save_suppress = info->suppress_expansion;
1334 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1335 wi))
1336 {
1337 tree c, decl;
1338 decl = get_chain_decl (info);
1339 c = build_omp_clause (gimple_location (stmt),
1340 OMP_CLAUSE_FIRSTPRIVATE);
1341 OMP_CLAUSE_DECL (c) = decl;
1342 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1343 gimple_omp_taskreg_set_clauses (stmt, c);
1344 }
1345
1346 save_local_var_chain = info->new_local_var_chain;
1347 info->new_local_var_chain = NULL;
1348
1349 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1350 info, gimple_omp_body_ptr (stmt));
1351
1352 if (info->new_local_var_chain)
1353 declare_vars (info->new_local_var_chain,
1354 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1355 false);
1356 info->new_local_var_chain = save_local_var_chain;
1357 info->suppress_expansion = save_suppress;
1358 break;
1359
1360 case GIMPLE_OMP_FOR:
1361 save_suppress = info->suppress_expansion;
1362 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1363 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1364 convert_nonlocal_reference_stmt,
1365 convert_nonlocal_reference_op, info);
1366 walk_body (convert_nonlocal_reference_stmt,
1367 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1368 info->suppress_expansion = save_suppress;
1369 break;
1370
1371 case GIMPLE_OMP_SECTIONS:
1372 save_suppress = info->suppress_expansion;
1373 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1374 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1375 info, gimple_omp_body_ptr (stmt));
1376 info->suppress_expansion = save_suppress;
1377 break;
1378
1379 case GIMPLE_OMP_SINGLE:
1380 save_suppress = info->suppress_expansion;
1381 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1382 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1383 info, gimple_omp_body_ptr (stmt));
1384 info->suppress_expansion = save_suppress;
1385 break;
1386
1387 case GIMPLE_OMP_TARGET:
1388 if (!is_gimple_omp_offloaded (stmt))
1389 {
1390 save_suppress = info->suppress_expansion;
1391 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1392 wi);
1393 info->suppress_expansion = save_suppress;
1394 walk_body (convert_nonlocal_reference_stmt,
1395 convert_nonlocal_reference_op, info,
1396 gimple_omp_body_ptr (stmt));
1397 break;
1398 }
1399 save_suppress = info->suppress_expansion;
1400 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1401 wi))
1402 {
1403 tree c, decl;
1404 decl = get_chain_decl (info);
1405 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1406 OMP_CLAUSE_DECL (c) = decl;
1407 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1408 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1409 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1410 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1411 }
1412
1413 save_local_var_chain = info->new_local_var_chain;
1414 info->new_local_var_chain = NULL;
1415
1416 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1417 info, gimple_omp_body_ptr (stmt));
1418
1419 if (info->new_local_var_chain)
1420 declare_vars (info->new_local_var_chain,
1421 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1422 false);
1423 info->new_local_var_chain = save_local_var_chain;
1424 info->suppress_expansion = save_suppress;
1425 break;
1426
1427 case GIMPLE_OMP_TEAMS:
1428 save_suppress = info->suppress_expansion;
1429 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1430 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1431 info, gimple_omp_body_ptr (stmt));
1432 info->suppress_expansion = save_suppress;
1433 break;
1434
1435 case GIMPLE_OMP_SECTION:
1436 case GIMPLE_OMP_MASTER:
1437 case GIMPLE_OMP_TASKGROUP:
1438 case GIMPLE_OMP_ORDERED:
1439 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1440 info, gimple_omp_body_ptr (stmt));
1441 break;
1442
1443 case GIMPLE_BIND:
1444 {
1445 gbind *bind_stmt = as_a <gbind *> (stmt);
1446 if (!optimize && gimple_bind_block (bind_stmt))
1447 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1448
1449 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1450 if (TREE_CODE (var) == NAMELIST_DECL)
1451 {
1452 /* Adjust decls mentioned in NAMELIST_DECL. */
1453 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1454 tree decl;
1455 unsigned int i;
1456
1457 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1458 {
1459 if (TREE_CODE (decl) == VAR_DECL
1460 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1461 continue;
1462 if (decl_function_context (decl) != info->context)
1463 CONSTRUCTOR_ELT (decls, i)->value
1464 = get_nonlocal_debug_decl (info, decl);
1465 }
1466 }
1467
1468 *handled_ops_p = false;
1469 return NULL_TREE;
1470 }
1471 case GIMPLE_COND:
1472 wi->val_only = true;
1473 wi->is_lhs = false;
1474 *handled_ops_p = false;
1475 return NULL_TREE;
1476
1477 default:
1478 /* For every other statement that we are not interested in
1479 handling here, let the walker traverse the operands. */
1480 *handled_ops_p = false;
1481 return NULL_TREE;
1482 }
1483
1484 /* We have handled all of STMT operands, no need to traverse the operands. */
1485 *handled_ops_p = true;
1486 return NULL_TREE;
1487 }
1488
1489
1490 /* A subroutine of convert_local_reference. Create a local variable
1491 in the parent function with DECL_VALUE_EXPR set to reference the
1492 field in FRAME. This is used both for debug info and in OMP
1493 lowering. */
1494
1495 static tree
1496 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1497 {
1498 tree x, new_decl;
1499
1500 tree *slot = &info->var_map->get_or_insert (decl);
1501 if (*slot)
1502 return *slot;
1503
1504 /* Make sure frame_decl gets created. */
1505 (void) get_frame_type (info);
1506 x = info->frame_decl;
1507 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1508
1509 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1510 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1511 DECL_CONTEXT (new_decl) = info->context;
1512 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1513 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1514 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1515 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1516 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1517 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1518 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1519 if ((TREE_CODE (decl) == PARM_DECL
1520 || TREE_CODE (decl) == RESULT_DECL
1521 || TREE_CODE (decl) == VAR_DECL)
1522 && DECL_BY_REFERENCE (decl))
1523 DECL_BY_REFERENCE (new_decl) = 1;
1524
1525 SET_DECL_VALUE_EXPR (new_decl, x);
1526 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1527 *slot = new_decl;
1528
1529 DECL_CHAIN (new_decl) = info->debug_var_chain;
1530 info->debug_var_chain = new_decl;
1531
1532 /* Do not emit debug info twice. */
1533 DECL_IGNORED_P (decl) = 1;
1534
1535 return new_decl;
1536 }
1537
1538
1539 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1540 and PARM_DECLs that were referenced by inner nested functions.
1541 The rewrite will be a structure reference to the local frame variable. */
1542
1543 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1544
1545 static tree
1546 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1547 {
1548 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1549 struct nesting_info *const info = (struct nesting_info *) wi->info;
1550 tree t = *tp, field, x;
1551 bool save_val_only;
1552
1553 *walk_subtrees = 0;
1554 switch (TREE_CODE (t))
1555 {
1556 case VAR_DECL:
1557 /* Non-automatic variables are never processed. */
1558 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1559 break;
1560 /* FALLTHRU */
1561
1562 case PARM_DECL:
1563 if (decl_function_context (t) == info->context)
1564 {
1565 /* If we copied a pointer to the frame, then the original decl
1566 is used unchanged in the parent function. */
1567 if (use_pointer_in_frame (t))
1568 break;
1569
1570 /* No need to transform anything if no child references the
1571 variable. */
1572 field = lookup_field_for_decl (info, t, NO_INSERT);
1573 if (!field)
1574 break;
1575 wi->changed = true;
1576
1577 x = get_local_debug_decl (info, t, field);
1578 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1579 x = get_frame_field (info, info->context, field, &wi->gsi);
1580
1581 if (wi->val_only)
1582 {
1583 if (wi->is_lhs)
1584 x = save_tmp_var (info, x, &wi->gsi);
1585 else
1586 x = init_tmp_var (info, x, &wi->gsi);
1587 }
1588
1589 *tp = x;
1590 }
1591 break;
1592
1593 case ADDR_EXPR:
1594 save_val_only = wi->val_only;
1595 wi->val_only = false;
1596 wi->is_lhs = false;
1597 wi->changed = false;
1598 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1599 wi->val_only = save_val_only;
1600
1601 /* If we converted anything ... */
1602 if (wi->changed)
1603 {
1604 tree save_context;
1605
1606 /* Then the frame decl is now addressable. */
1607 TREE_ADDRESSABLE (info->frame_decl) = 1;
1608
1609 save_context = current_function_decl;
1610 current_function_decl = info->context;
1611 recompute_tree_invariant_for_addr_expr (t);
1612 current_function_decl = save_context;
1613
1614 /* If we are in a context where we only accept values, then
1615 compute the address into a temporary. */
1616 if (save_val_only)
1617 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1618 t, &wi->gsi);
1619 }
1620 break;
1621
1622 case REALPART_EXPR:
1623 case IMAGPART_EXPR:
1624 case COMPONENT_REF:
1625 case ARRAY_REF:
1626 case ARRAY_RANGE_REF:
1627 case BIT_FIELD_REF:
1628 /* Go down this entire nest and just look at the final prefix and
1629 anything that describes the references. Otherwise, we lose track
1630 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1631 save_val_only = wi->val_only;
1632 wi->val_only = true;
1633 wi->is_lhs = false;
1634 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1635 {
1636 if (TREE_CODE (t) == COMPONENT_REF)
1637 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1638 NULL);
1639 else if (TREE_CODE (t) == ARRAY_REF
1640 || TREE_CODE (t) == ARRAY_RANGE_REF)
1641 {
1642 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1643 NULL);
1644 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1645 NULL);
1646 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1647 NULL);
1648 }
1649 }
1650 wi->val_only = false;
1651 walk_tree (tp, convert_local_reference_op, wi, NULL);
1652 wi->val_only = save_val_only;
1653 break;
1654
1655 case MEM_REF:
1656 save_val_only = wi->val_only;
1657 wi->val_only = true;
1658 wi->is_lhs = false;
1659 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1660 wi, NULL);
1661 /* We need to re-fold the MEM_REF as component references as
1662 part of a ADDR_EXPR address are not allowed. But we cannot
1663 fold here, as the chain record type is not yet finalized. */
1664 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1665 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1666 info->mem_refs->add (tp);
1667 wi->val_only = save_val_only;
1668 break;
1669
1670 case VIEW_CONVERT_EXPR:
1671 /* Just request to look at the subtrees, leaving val_only and lhs
1672 untouched. This might actually be for !val_only + lhs, in which
1673 case we don't want to force a replacement by a temporary. */
1674 *walk_subtrees = 1;
1675 break;
1676
1677 default:
1678 if (!IS_TYPE_OR_DECL_P (t))
1679 {
1680 *walk_subtrees = 1;
1681 wi->val_only = true;
1682 wi->is_lhs = false;
1683 }
1684 break;
1685 }
1686
1687 return NULL_TREE;
1688 }
1689
1690 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1691 struct walk_stmt_info *);
1692
1693 /* Helper for convert_local_reference. Convert all the references in
1694 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1695
1696 static bool
1697 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1698 {
1699 struct nesting_info *const info = (struct nesting_info *) wi->info;
1700 bool need_frame = false, need_stmts = false;
1701 tree clause, decl;
1702 int dummy;
1703 bitmap new_suppress;
1704
1705 new_suppress = BITMAP_GGC_ALLOC ();
1706 bitmap_copy (new_suppress, info->suppress_expansion);
1707
1708 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1709 {
1710 switch (OMP_CLAUSE_CODE (clause))
1711 {
1712 case OMP_CLAUSE_REDUCTION:
1713 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1714 need_stmts = true;
1715 goto do_decl_clause;
1716
1717 case OMP_CLAUSE_LASTPRIVATE:
1718 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1719 need_stmts = true;
1720 goto do_decl_clause;
1721
1722 case OMP_CLAUSE_LINEAR:
1723 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1724 need_stmts = true;
1725 wi->val_only = true;
1726 wi->is_lhs = false;
1727 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1728 wi);
1729 goto do_decl_clause;
1730
1731 case OMP_CLAUSE_PRIVATE:
1732 case OMP_CLAUSE_FIRSTPRIVATE:
1733 case OMP_CLAUSE_COPYPRIVATE:
1734 case OMP_CLAUSE_SHARED:
1735 do_decl_clause:
1736 decl = OMP_CLAUSE_DECL (clause);
1737 if (TREE_CODE (decl) == VAR_DECL
1738 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1739 break;
1740 if (decl_function_context (decl) == info->context
1741 && !use_pointer_in_frame (decl))
1742 {
1743 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1744 if (field)
1745 {
1746 bitmap_set_bit (new_suppress, DECL_UID (decl));
1747 OMP_CLAUSE_DECL (clause)
1748 = get_local_debug_decl (info, decl, field);
1749 need_frame = true;
1750 }
1751 }
1752 break;
1753
1754 case OMP_CLAUSE_SCHEDULE:
1755 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1756 break;
1757 /* FALLTHRU */
1758 case OMP_CLAUSE_FINAL:
1759 case OMP_CLAUSE_IF:
1760 case OMP_CLAUSE_NUM_THREADS:
1761 case OMP_CLAUSE_DEPEND:
1762 case OMP_CLAUSE_DEVICE:
1763 case OMP_CLAUSE_NUM_TEAMS:
1764 case OMP_CLAUSE_THREAD_LIMIT:
1765 case OMP_CLAUSE_SAFELEN:
1766 case OMP_CLAUSE__CILK_FOR_COUNT_:
1767 wi->val_only = true;
1768 wi->is_lhs = false;
1769 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1770 wi);
1771 break;
1772
1773 case OMP_CLAUSE_DIST_SCHEDULE:
1774 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1775 {
1776 wi->val_only = true;
1777 wi->is_lhs = false;
1778 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1779 &dummy, wi);
1780 }
1781 break;
1782
1783 case OMP_CLAUSE_MAP:
1784 case OMP_CLAUSE_TO:
1785 case OMP_CLAUSE_FROM:
1786 if (OMP_CLAUSE_SIZE (clause))
1787 {
1788 wi->val_only = true;
1789 wi->is_lhs = false;
1790 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1791 &dummy, wi);
1792 }
1793 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1794 goto do_decl_clause;
1795 wi->val_only = true;
1796 wi->is_lhs = false;
1797 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1798 wi, NULL);
1799 break;
1800
1801 case OMP_CLAUSE_ALIGNED:
1802 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1803 {
1804 wi->val_only = true;
1805 wi->is_lhs = false;
1806 convert_local_reference_op
1807 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1808 }
1809 /* Like do_decl_clause, but don't add any suppression. */
1810 decl = OMP_CLAUSE_DECL (clause);
1811 if (TREE_CODE (decl) == VAR_DECL
1812 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1813 break;
1814 if (decl_function_context (decl) == info->context
1815 && !use_pointer_in_frame (decl))
1816 {
1817 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1818 if (field)
1819 {
1820 OMP_CLAUSE_DECL (clause)
1821 = get_local_debug_decl (info, decl, field);
1822 need_frame = true;
1823 }
1824 }
1825 break;
1826
1827 case OMP_CLAUSE_NOWAIT:
1828 case OMP_CLAUSE_ORDERED:
1829 case OMP_CLAUSE_DEFAULT:
1830 case OMP_CLAUSE_COPYIN:
1831 case OMP_CLAUSE_COLLAPSE:
1832 case OMP_CLAUSE_UNTIED:
1833 case OMP_CLAUSE_MERGEABLE:
1834 case OMP_CLAUSE_PROC_BIND:
1835 break;
1836
1837 default:
1838 gcc_unreachable ();
1839 }
1840 }
1841
1842 info->suppress_expansion = new_suppress;
1843
1844 if (need_stmts)
1845 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1846 switch (OMP_CLAUSE_CODE (clause))
1847 {
1848 case OMP_CLAUSE_REDUCTION:
1849 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1850 {
1851 tree old_context
1852 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1853 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1854 = info->context;
1855 walk_body (convert_local_reference_stmt,
1856 convert_local_reference_op, info,
1857 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1858 walk_body (convert_local_reference_stmt,
1859 convert_local_reference_op, info,
1860 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1861 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1862 = old_context;
1863 }
1864 break;
1865
1866 case OMP_CLAUSE_LASTPRIVATE:
1867 walk_body (convert_local_reference_stmt,
1868 convert_local_reference_op, info,
1869 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1870 break;
1871
1872 case OMP_CLAUSE_LINEAR:
1873 walk_body (convert_local_reference_stmt,
1874 convert_local_reference_op, info,
1875 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1876 break;
1877
1878 default:
1879 break;
1880 }
1881
1882 return need_frame;
1883 }
1884
1885
1886 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1887 and PARM_DECLs that were referenced by inner nested functions.
1888 The rewrite will be a structure reference to the local frame variable. */
1889
1890 static tree
1891 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1892 struct walk_stmt_info *wi)
1893 {
1894 struct nesting_info *info = (struct nesting_info *) wi->info;
1895 tree save_local_var_chain;
1896 bitmap save_suppress;
1897 gimple stmt = gsi_stmt (*gsi);
1898
1899 switch (gimple_code (stmt))
1900 {
1901 case GIMPLE_OMP_PARALLEL:
1902 case GIMPLE_OMP_TASK:
1903 save_suppress = info->suppress_expansion;
1904 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1905 wi))
1906 {
1907 tree c;
1908 (void) get_frame_type (info);
1909 c = build_omp_clause (gimple_location (stmt),
1910 OMP_CLAUSE_SHARED);
1911 OMP_CLAUSE_DECL (c) = info->frame_decl;
1912 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1913 gimple_omp_taskreg_set_clauses (stmt, c);
1914 }
1915
1916 save_local_var_chain = info->new_local_var_chain;
1917 info->new_local_var_chain = NULL;
1918
1919 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1920 gimple_omp_body_ptr (stmt));
1921
1922 if (info->new_local_var_chain)
1923 declare_vars (info->new_local_var_chain,
1924 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1925 info->new_local_var_chain = save_local_var_chain;
1926 info->suppress_expansion = save_suppress;
1927 break;
1928
1929 case GIMPLE_OMP_FOR:
1930 save_suppress = info->suppress_expansion;
1931 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1932 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1933 convert_local_reference_stmt,
1934 convert_local_reference_op, info);
1935 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1936 info, gimple_omp_body_ptr (stmt));
1937 info->suppress_expansion = save_suppress;
1938 break;
1939
1940 case GIMPLE_OMP_SECTIONS:
1941 save_suppress = info->suppress_expansion;
1942 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1943 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1944 info, gimple_omp_body_ptr (stmt));
1945 info->suppress_expansion = save_suppress;
1946 break;
1947
1948 case GIMPLE_OMP_SINGLE:
1949 save_suppress = info->suppress_expansion;
1950 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1951 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1952 info, gimple_omp_body_ptr (stmt));
1953 info->suppress_expansion = save_suppress;
1954 break;
1955
1956 case GIMPLE_OMP_TARGET:
1957 if (!is_gimple_omp_offloaded (stmt))
1958 {
1959 save_suppress = info->suppress_expansion;
1960 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1961 info->suppress_expansion = save_suppress;
1962 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1963 info, gimple_omp_body_ptr (stmt));
1964 break;
1965 }
1966 save_suppress = info->suppress_expansion;
1967 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1968 {
1969 tree c;
1970 (void) get_frame_type (info);
1971 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1972 OMP_CLAUSE_DECL (c) = info->frame_decl;
1973 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
1974 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
1975 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1976 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1977 }
1978
1979 save_local_var_chain = info->new_local_var_chain;
1980 info->new_local_var_chain = NULL;
1981
1982 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1983 gimple_omp_body_ptr (stmt));
1984
1985 if (info->new_local_var_chain)
1986 declare_vars (info->new_local_var_chain,
1987 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1988 info->new_local_var_chain = save_local_var_chain;
1989 info->suppress_expansion = save_suppress;
1990 break;
1991
1992 case GIMPLE_OMP_TEAMS:
1993 save_suppress = info->suppress_expansion;
1994 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1995 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1996 info, gimple_omp_body_ptr (stmt));
1997 info->suppress_expansion = save_suppress;
1998 break;
1999
2000 case GIMPLE_OMP_SECTION:
2001 case GIMPLE_OMP_MASTER:
2002 case GIMPLE_OMP_TASKGROUP:
2003 case GIMPLE_OMP_ORDERED:
2004 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2005 info, gimple_omp_body_ptr (stmt));
2006 break;
2007
2008 case GIMPLE_COND:
2009 wi->val_only = true;
2010 wi->is_lhs = false;
2011 *handled_ops_p = false;
2012 return NULL_TREE;
2013
2014 case GIMPLE_ASSIGN:
2015 if (gimple_clobber_p (stmt))
2016 {
2017 tree lhs = gimple_assign_lhs (stmt);
2018 if (!use_pointer_in_frame (lhs)
2019 && lookup_field_for_decl (info, lhs, NO_INSERT))
2020 {
2021 gsi_replace (gsi, gimple_build_nop (), true);
2022 break;
2023 }
2024 }
2025 *handled_ops_p = false;
2026 return NULL_TREE;
2027
2028 case GIMPLE_BIND:
2029 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2030 var;
2031 var = DECL_CHAIN (var))
2032 if (TREE_CODE (var) == NAMELIST_DECL)
2033 {
2034 /* Adjust decls mentioned in NAMELIST_DECL. */
2035 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2036 tree decl;
2037 unsigned int i;
2038
2039 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2040 {
2041 if (TREE_CODE (decl) == VAR_DECL
2042 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2043 continue;
2044 if (decl_function_context (decl) == info->context
2045 && !use_pointer_in_frame (decl))
2046 {
2047 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2048 if (field)
2049 {
2050 CONSTRUCTOR_ELT (decls, i)->value
2051 = get_local_debug_decl (info, decl, field);
2052 }
2053 }
2054 }
2055 }
2056
2057 *handled_ops_p = false;
2058 return NULL_TREE;
2059
2060 default:
2061 /* For every other statement that we are not interested in
2062 handling here, let the walker traverse the operands. */
2063 *handled_ops_p = false;
2064 return NULL_TREE;
2065 }
2066
2067 /* Indicate that we have handled all the operands ourselves. */
2068 *handled_ops_p = true;
2069 return NULL_TREE;
2070 }
2071
2072
2073 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2074 that reference labels from outer functions. The rewrite will be a
2075 call to __builtin_nonlocal_goto. */
2076
2077 static tree
2078 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2079 struct walk_stmt_info *wi)
2080 {
2081 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2082 tree label, new_label, target_context, x, field;
2083 gcall *call;
2084 gimple stmt = gsi_stmt (*gsi);
2085
2086 if (gimple_code (stmt) != GIMPLE_GOTO)
2087 {
2088 *handled_ops_p = false;
2089 return NULL_TREE;
2090 }
2091
2092 label = gimple_goto_dest (stmt);
2093 if (TREE_CODE (label) != LABEL_DECL)
2094 {
2095 *handled_ops_p = false;
2096 return NULL_TREE;
2097 }
2098
2099 target_context = decl_function_context (label);
2100 if (target_context == info->context)
2101 {
2102 *handled_ops_p = false;
2103 return NULL_TREE;
2104 }
2105
2106 for (i = info->outer; target_context != i->context; i = i->outer)
2107 continue;
2108
2109 /* The original user label may also be use for a normal goto, therefore
2110 we must create a new label that will actually receive the abnormal
2111 control transfer. This new label will be marked LABEL_NONLOCAL; this
2112 mark will trigger proper behavior in the cfg, as well as cause the
2113 (hairy target-specific) non-local goto receiver code to be generated
2114 when we expand rtl. Enter this association into var_map so that we
2115 can insert the new label into the IL during a second pass. */
2116 tree *slot = &i->var_map->get_or_insert (label);
2117 if (*slot == NULL)
2118 {
2119 new_label = create_artificial_label (UNKNOWN_LOCATION);
2120 DECL_NONLOCAL (new_label) = 1;
2121 *slot = new_label;
2122 }
2123 else
2124 new_label = *slot;
2125
2126 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2127 field = get_nl_goto_field (i);
2128 x = get_frame_field (info, target_context, field, gsi);
2129 x = build_addr (x, target_context);
2130 x = gsi_gimplify_val (info, x, gsi);
2131 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2132 2, build_addr (new_label, target_context), x);
2133 gsi_replace (gsi, call, false);
2134
2135 /* We have handled all of STMT's operands, no need to keep going. */
2136 *handled_ops_p = true;
2137 return NULL_TREE;
2138 }
2139
2140
2141 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2142 are referenced via nonlocal goto from a nested function. The rewrite
2143 will involve installing a newly generated DECL_NONLOCAL label, and
2144 (potentially) a branch around the rtl gunk that is assumed to be
2145 attached to such a label. */
2146
2147 static tree
2148 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2149 struct walk_stmt_info *wi)
2150 {
2151 struct nesting_info *const info = (struct nesting_info *) wi->info;
2152 tree label, new_label;
2153 gimple_stmt_iterator tmp_gsi;
2154 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2155
2156 if (!stmt)
2157 {
2158 *handled_ops_p = false;
2159 return NULL_TREE;
2160 }
2161
2162 label = gimple_label_label (stmt);
2163
2164 tree *slot = info->var_map->get (label);
2165 if (!slot)
2166 {
2167 *handled_ops_p = false;
2168 return NULL_TREE;
2169 }
2170
2171 /* If there's any possibility that the previous statement falls through,
2172 then we must branch around the new non-local label. */
2173 tmp_gsi = wi->gsi;
2174 gsi_prev (&tmp_gsi);
2175 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2176 {
2177 gimple stmt = gimple_build_goto (label);
2178 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2179 }
2180
2181 new_label = (tree) *slot;
2182 stmt = gimple_build_label (new_label);
2183 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2184
2185 *handled_ops_p = true;
2186 return NULL_TREE;
2187 }
2188
2189
2190 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2191 of nested functions that require the use of trampolines. The rewrite
2192 will involve a reference a trampoline generated for the occasion. */
2193
2194 static tree
2195 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2196 {
2197 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2198 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2199 tree t = *tp, decl, target_context, x, builtin;
2200 gcall *call;
2201
2202 *walk_subtrees = 0;
2203 switch (TREE_CODE (t))
2204 {
2205 case ADDR_EXPR:
2206 /* Build
2207 T.1 = &CHAIN->tramp;
2208 T.2 = __builtin_adjust_trampoline (T.1);
2209 T.3 = (func_type)T.2;
2210 */
2211
2212 decl = TREE_OPERAND (t, 0);
2213 if (TREE_CODE (decl) != FUNCTION_DECL)
2214 break;
2215
2216 /* Only need to process nested functions. */
2217 target_context = decl_function_context (decl);
2218 if (!target_context)
2219 break;
2220
2221 /* If the nested function doesn't use a static chain, then
2222 it doesn't need a trampoline. */
2223 if (!DECL_STATIC_CHAIN (decl))
2224 break;
2225
2226 /* If we don't want a trampoline, then don't build one. */
2227 if (TREE_NO_TRAMPOLINE (t))
2228 break;
2229
2230 /* Lookup the immediate parent of the callee, as that's where
2231 we need to insert the trampoline. */
2232 for (i = info; i->context != target_context; i = i->outer)
2233 continue;
2234 x = lookup_tramp_for_decl (i, decl, INSERT);
2235
2236 /* Compute the address of the field holding the trampoline. */
2237 x = get_frame_field (info, target_context, x, &wi->gsi);
2238 x = build_addr (x, target_context);
2239 x = gsi_gimplify_val (info, x, &wi->gsi);
2240
2241 /* Do machine-specific ugliness. Normally this will involve
2242 computing extra alignment, but it can really be anything. */
2243 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2244 call = gimple_build_call (builtin, 1, x);
2245 x = init_tmp_var_with_call (info, &wi->gsi, call);
2246
2247 /* Cast back to the proper function type. */
2248 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2249 x = init_tmp_var (info, x, &wi->gsi);
2250
2251 *tp = x;
2252 break;
2253
2254 default:
2255 if (!IS_TYPE_OR_DECL_P (t))
2256 *walk_subtrees = 1;
2257 break;
2258 }
2259
2260 return NULL_TREE;
2261 }
2262
2263
2264 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2265 to addresses of nested functions that require the use of
2266 trampolines. The rewrite will involve a reference a trampoline
2267 generated for the occasion. */
2268
2269 static tree
2270 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2271 struct walk_stmt_info *wi)
2272 {
2273 struct nesting_info *info = (struct nesting_info *) wi->info;
2274 gimple stmt = gsi_stmt (*gsi);
2275
2276 switch (gimple_code (stmt))
2277 {
2278 case GIMPLE_CALL:
2279 {
2280 /* Only walk call arguments, lest we generate trampolines for
2281 direct calls. */
2282 unsigned long i, nargs = gimple_call_num_args (stmt);
2283 for (i = 0; i < nargs; i++)
2284 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2285 wi, NULL);
2286 break;
2287 }
2288
2289 case GIMPLE_OMP_TARGET:
2290 if (!is_gimple_omp_offloaded (stmt))
2291 {
2292 *handled_ops_p = false;
2293 return NULL_TREE;
2294 }
2295 /* FALLTHRU */
2296 case GIMPLE_OMP_PARALLEL:
2297 case GIMPLE_OMP_TASK:
2298 {
2299 tree save_local_var_chain;
2300 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2301 save_local_var_chain = info->new_local_var_chain;
2302 info->new_local_var_chain = NULL;
2303 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2304 info, gimple_omp_body_ptr (stmt));
2305 if (info->new_local_var_chain)
2306 declare_vars (info->new_local_var_chain,
2307 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2308 false);
2309 info->new_local_var_chain = save_local_var_chain;
2310 }
2311 break;
2312
2313 default:
2314 *handled_ops_p = false;
2315 return NULL_TREE;
2316 }
2317
2318 *handled_ops_p = true;
2319 return NULL_TREE;
2320 }
2321
2322
2323
2324 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2325 that reference nested functions to make sure that the static chain
2326 is set up properly for the call. */
2327
2328 static tree
2329 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2330 struct walk_stmt_info *wi)
2331 {
2332 struct nesting_info *const info = (struct nesting_info *) wi->info;
2333 tree decl, target_context;
2334 char save_static_chain_added;
2335 int i;
2336 gimple stmt = gsi_stmt (*gsi);
2337
2338 switch (gimple_code (stmt))
2339 {
2340 case GIMPLE_CALL:
2341 if (gimple_call_chain (stmt))
2342 break;
2343 decl = gimple_call_fndecl (stmt);
2344 if (!decl)
2345 break;
2346 target_context = decl_function_context (decl);
2347 if (target_context && DECL_STATIC_CHAIN (decl))
2348 {
2349 gimple_call_set_chain (as_a <gcall *> (stmt),
2350 get_static_chain (info, target_context,
2351 &wi->gsi));
2352 info->static_chain_added |= (1 << (info->context != target_context));
2353 }
2354 break;
2355
2356 case GIMPLE_OMP_PARALLEL:
2357 case GIMPLE_OMP_TASK:
2358 save_static_chain_added = info->static_chain_added;
2359 info->static_chain_added = 0;
2360 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2361 for (i = 0; i < 2; i++)
2362 {
2363 tree c, decl;
2364 if ((info->static_chain_added & (1 << i)) == 0)
2365 continue;
2366 decl = i ? get_chain_decl (info) : info->frame_decl;
2367 /* Don't add CHAIN.* or FRAME.* twice. */
2368 for (c = gimple_omp_taskreg_clauses (stmt);
2369 c;
2370 c = OMP_CLAUSE_CHAIN (c))
2371 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2372 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2373 && OMP_CLAUSE_DECL (c) == decl)
2374 break;
2375 if (c == NULL)
2376 {
2377 c = build_omp_clause (gimple_location (stmt),
2378 i ? OMP_CLAUSE_FIRSTPRIVATE
2379 : OMP_CLAUSE_SHARED);
2380 OMP_CLAUSE_DECL (c) = decl;
2381 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2382 gimple_omp_taskreg_set_clauses (stmt, c);
2383 }
2384 }
2385 info->static_chain_added |= save_static_chain_added;
2386 break;
2387
2388 case GIMPLE_OMP_TARGET:
2389 if (!is_gimple_omp_offloaded (stmt))
2390 {
2391 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2392 break;
2393 }
2394 save_static_chain_added = info->static_chain_added;
2395 info->static_chain_added = 0;
2396 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2397 for (i = 0; i < 2; i++)
2398 {
2399 tree c, decl;
2400 if ((info->static_chain_added & (1 << i)) == 0)
2401 continue;
2402 decl = i ? get_chain_decl (info) : info->frame_decl;
2403 /* Don't add CHAIN.* or FRAME.* twice. */
2404 for (c = gimple_omp_target_clauses (stmt);
2405 c;
2406 c = OMP_CLAUSE_CHAIN (c))
2407 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2408 && OMP_CLAUSE_DECL (c) == decl)
2409 break;
2410 if (c == NULL)
2411 {
2412 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2413 OMP_CLAUSE_DECL (c) = decl;
2414 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2415 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2416 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2417 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2418 c);
2419 }
2420 }
2421 info->static_chain_added |= save_static_chain_added;
2422 break;
2423
2424 case GIMPLE_OMP_FOR:
2425 walk_body (convert_gimple_call, NULL, info,
2426 gimple_omp_for_pre_body_ptr (stmt));
2427 /* FALLTHRU */
2428 case GIMPLE_OMP_SECTIONS:
2429 case GIMPLE_OMP_SECTION:
2430 case GIMPLE_OMP_SINGLE:
2431 case GIMPLE_OMP_TEAMS:
2432 case GIMPLE_OMP_MASTER:
2433 case GIMPLE_OMP_TASKGROUP:
2434 case GIMPLE_OMP_ORDERED:
2435 case GIMPLE_OMP_CRITICAL:
2436 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2437 break;
2438
2439 default:
2440 /* Keep looking for other operands. */
2441 *handled_ops_p = false;
2442 return NULL_TREE;
2443 }
2444
2445 *handled_ops_p = true;
2446 return NULL_TREE;
2447 }
2448
2449 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2450 call expressions. At the same time, determine if a nested function
2451 actually uses its static chain; if not, remember that. */
2452
2453 static void
2454 convert_all_function_calls (struct nesting_info *root)
2455 {
2456 unsigned int chain_count = 0, old_chain_count, iter_count;
2457 struct nesting_info *n;
2458
2459 /* First, optimistically clear static_chain for all decls that haven't
2460 used the static chain already for variable access. But always create
2461 it if not optimizing. This makes it possible to reconstruct the static
2462 nesting tree at run time and thus to resolve up-level references from
2463 within the debugger. */
2464 FOR_EACH_NEST_INFO (n, root)
2465 {
2466 tree decl = n->context;
2467 if (!optimize)
2468 {
2469 if (n->inner)
2470 (void) get_frame_type (n);
2471 if (n->outer)
2472 (void) get_chain_decl (n);
2473 }
2474 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2475 {
2476 DECL_STATIC_CHAIN (decl) = 0;
2477 if (dump_file && (dump_flags & TDF_DETAILS))
2478 fprintf (dump_file, "Guessing no static-chain for %s\n",
2479 lang_hooks.decl_printable_name (decl, 2));
2480 }
2481 else
2482 DECL_STATIC_CHAIN (decl) = 1;
2483 chain_count += DECL_STATIC_CHAIN (decl);
2484 }
2485
2486 /* Walk the functions and perform transformations. Note that these
2487 transformations can induce new uses of the static chain, which in turn
2488 require re-examining all users of the decl. */
2489 /* ??? It would make sense to try to use the call graph to speed this up,
2490 but the call graph hasn't really been built yet. Even if it did, we
2491 would still need to iterate in this loop since address-of references
2492 wouldn't show up in the callgraph anyway. */
2493 iter_count = 0;
2494 do
2495 {
2496 old_chain_count = chain_count;
2497 chain_count = 0;
2498 iter_count++;
2499
2500 if (dump_file && (dump_flags & TDF_DETAILS))
2501 fputc ('\n', dump_file);
2502
2503 FOR_EACH_NEST_INFO (n, root)
2504 {
2505 tree decl = n->context;
2506 walk_function (convert_tramp_reference_stmt,
2507 convert_tramp_reference_op, n);
2508 walk_function (convert_gimple_call, NULL, n);
2509 chain_count += DECL_STATIC_CHAIN (decl);
2510 }
2511 }
2512 while (chain_count != old_chain_count);
2513
2514 if (dump_file && (dump_flags & TDF_DETAILS))
2515 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2516 iter_count);
2517 }
2518
2519 struct nesting_copy_body_data
2520 {
2521 copy_body_data cb;
2522 struct nesting_info *root;
2523 };
2524
2525 /* A helper subroutine for debug_var_chain type remapping. */
2526
2527 static tree
2528 nesting_copy_decl (tree decl, copy_body_data *id)
2529 {
2530 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2531 tree *slot = nid->root->var_map->get (decl);
2532
2533 if (slot)
2534 return (tree) *slot;
2535
2536 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2537 {
2538 tree new_decl = copy_decl_no_change (decl, id);
2539 DECL_ORIGINAL_TYPE (new_decl)
2540 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2541 return new_decl;
2542 }
2543
2544 if (TREE_CODE (decl) == VAR_DECL
2545 || TREE_CODE (decl) == PARM_DECL
2546 || TREE_CODE (decl) == RESULT_DECL)
2547 return decl;
2548
2549 return copy_decl_no_change (decl, id);
2550 }
2551
2552 /* A helper function for remap_vla_decls. See if *TP contains
2553 some remapped variables. */
2554
2555 static tree
2556 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2557 {
2558 struct nesting_info *root = (struct nesting_info *) data;
2559 tree t = *tp;
2560
2561 if (DECL_P (t))
2562 {
2563 *walk_subtrees = 0;
2564 tree *slot = root->var_map->get (t);
2565
2566 if (slot)
2567 return *slot;
2568 }
2569 return NULL;
2570 }
2571
2572 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2573 involved. */
2574
2575 static void
2576 remap_vla_decls (tree block, struct nesting_info *root)
2577 {
2578 tree var, subblock, val, type;
2579 struct nesting_copy_body_data id;
2580
2581 for (subblock = BLOCK_SUBBLOCKS (block);
2582 subblock;
2583 subblock = BLOCK_CHAIN (subblock))
2584 remap_vla_decls (subblock, root);
2585
2586 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2587 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2588 {
2589 val = DECL_VALUE_EXPR (var);
2590 type = TREE_TYPE (var);
2591
2592 if (!(TREE_CODE (val) == INDIRECT_REF
2593 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2594 && variably_modified_type_p (type, NULL)))
2595 continue;
2596
2597 if (root->var_map->get (TREE_OPERAND (val, 0))
2598 || walk_tree (&type, contains_remapped_vars, root, NULL))
2599 break;
2600 }
2601
2602 if (var == NULL_TREE)
2603 return;
2604
2605 memset (&id, 0, sizeof (id));
2606 id.cb.copy_decl = nesting_copy_decl;
2607 id.cb.decl_map = new hash_map<tree, tree>;
2608 id.root = root;
2609
2610 for (; var; var = DECL_CHAIN (var))
2611 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2612 {
2613 struct nesting_info *i;
2614 tree newt, context;
2615
2616 val = DECL_VALUE_EXPR (var);
2617 type = TREE_TYPE (var);
2618
2619 if (!(TREE_CODE (val) == INDIRECT_REF
2620 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2621 && variably_modified_type_p (type, NULL)))
2622 continue;
2623
2624 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2625 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2626 continue;
2627
2628 context = decl_function_context (var);
2629 for (i = root; i; i = i->outer)
2630 if (i->context == context)
2631 break;
2632
2633 if (i == NULL)
2634 continue;
2635
2636 /* Fully expand value expressions. This avoids having debug variables
2637 only referenced from them and that can be swept during GC. */
2638 if (slot)
2639 {
2640 tree t = (tree) *slot;
2641 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2642 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2643 }
2644
2645 id.cb.src_fn = i->context;
2646 id.cb.dst_fn = i->context;
2647 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2648
2649 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2650 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2651 {
2652 newt = TREE_TYPE (newt);
2653 type = TREE_TYPE (type);
2654 }
2655 if (TYPE_NAME (newt)
2656 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2657 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2658 && newt != type
2659 && TYPE_NAME (newt) == TYPE_NAME (type))
2660 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2661
2662 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2663 if (val != DECL_VALUE_EXPR (var))
2664 SET_DECL_VALUE_EXPR (var, val);
2665 }
2666
2667 delete id.cb.decl_map;
2668 }
2669
2670 /* Fold the MEM_REF *E. */
2671 bool
2672 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2673 {
2674 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2675 *ref_p = fold (*ref_p);
2676 return true;
2677 }
2678
2679 /* Do "everything else" to clean up or complete state collected by the
2680 various walking passes -- lay out the types and decls, generate code
2681 to initialize the frame decl, store critical expressions in the
2682 struct function for rtl to find. */
2683
2684 static void
2685 finalize_nesting_tree_1 (struct nesting_info *root)
2686 {
2687 gimple_seq stmt_list;
2688 gimple stmt;
2689 tree context = root->context;
2690 struct function *sf;
2691
2692 stmt_list = NULL;
2693
2694 /* If we created a non-local frame type or decl, we need to lay them
2695 out at this time. */
2696 if (root->frame_type)
2697 {
2698 /* In some cases the frame type will trigger the -Wpadded warning.
2699 This is not helpful; suppress it. */
2700 int save_warn_padded = warn_padded;
2701 tree *adjust;
2702
2703 warn_padded = 0;
2704 layout_type (root->frame_type);
2705 warn_padded = save_warn_padded;
2706 layout_decl (root->frame_decl, 0);
2707
2708 /* Remove root->frame_decl from root->new_local_var_chain, so
2709 that we can declare it also in the lexical blocks, which
2710 helps ensure virtual regs that end up appearing in its RTL
2711 expression get substituted in instantiate_virtual_regs(). */
2712 for (adjust = &root->new_local_var_chain;
2713 *adjust != root->frame_decl;
2714 adjust = &DECL_CHAIN (*adjust))
2715 gcc_assert (DECL_CHAIN (*adjust));
2716 *adjust = DECL_CHAIN (*adjust);
2717
2718 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2719 declare_vars (root->frame_decl,
2720 gimple_seq_first_stmt (gimple_body (context)), true);
2721 }
2722
2723 /* If any parameters were referenced non-locally, then we need to
2724 insert a copy. Likewise, if any variables were referenced by
2725 pointer, we need to initialize the address. */
2726 if (root->any_parm_remapped)
2727 {
2728 tree p;
2729 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2730 {
2731 tree field, x, y;
2732
2733 field = lookup_field_for_decl (root, p, NO_INSERT);
2734 if (!field)
2735 continue;
2736
2737 if (use_pointer_in_frame (p))
2738 x = build_addr (p, context);
2739 else
2740 x = p;
2741
2742 /* If the assignment is from a non-register the stmt is
2743 not valid gimple. Make it so by using a temporary instead. */
2744 if (!is_gimple_reg (x)
2745 && is_gimple_reg_type (TREE_TYPE (x)))
2746 {
2747 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2748 x = init_tmp_var (root, x, &gsi);
2749 }
2750
2751 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2752 root->frame_decl, field, NULL_TREE);
2753 stmt = gimple_build_assign (y, x);
2754 gimple_seq_add_stmt (&stmt_list, stmt);
2755 }
2756 }
2757
2758 /* If a chain_field was created, then it needs to be initialized
2759 from chain_decl. */
2760 if (root->chain_field)
2761 {
2762 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2763 root->frame_decl, root->chain_field, NULL_TREE);
2764 stmt = gimple_build_assign (x, get_chain_decl (root));
2765 gimple_seq_add_stmt (&stmt_list, stmt);
2766 }
2767
2768 /* If trampolines were created, then we need to initialize them. */
2769 if (root->any_tramp_created)
2770 {
2771 struct nesting_info *i;
2772 for (i = root->inner; i ; i = i->next)
2773 {
2774 tree arg1, arg2, arg3, x, field;
2775
2776 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2777 if (!field)
2778 continue;
2779
2780 gcc_assert (DECL_STATIC_CHAIN (i->context));
2781 arg3 = build_addr (root->frame_decl, context);
2782
2783 arg2 = build_addr (i->context, context);
2784
2785 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2786 root->frame_decl, field, NULL_TREE);
2787 arg1 = build_addr (x, context);
2788
2789 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2790 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2791 gimple_seq_add_stmt (&stmt_list, stmt);
2792 }
2793 }
2794
2795 /* If we created initialization statements, insert them. */
2796 if (stmt_list)
2797 {
2798 gbind *bind;
2799 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2800 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
2801 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2802 gimple_bind_set_body (bind, stmt_list);
2803 }
2804
2805 /* If a chain_decl was created, then it needs to be registered with
2806 struct function so that it gets initialized from the static chain
2807 register at the beginning of the function. */
2808 sf = DECL_STRUCT_FUNCTION (root->context);
2809 sf->static_chain_decl = root->chain_decl;
2810
2811 /* Similarly for the non-local goto save area. */
2812 if (root->nl_goto_field)
2813 {
2814 sf->nonlocal_goto_save_area
2815 = get_frame_field (root, context, root->nl_goto_field, NULL);
2816 sf->has_nonlocal_label = 1;
2817 }
2818
2819 /* Make sure all new local variables get inserted into the
2820 proper BIND_EXPR. */
2821 if (root->new_local_var_chain)
2822 declare_vars (root->new_local_var_chain,
2823 gimple_seq_first_stmt (gimple_body (root->context)),
2824 false);
2825
2826 if (root->debug_var_chain)
2827 {
2828 tree debug_var;
2829 gbind *scope;
2830
2831 remap_vla_decls (DECL_INITIAL (root->context), root);
2832
2833 for (debug_var = root->debug_var_chain; debug_var;
2834 debug_var = DECL_CHAIN (debug_var))
2835 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2836 break;
2837
2838 /* If there are any debug decls with variable length types,
2839 remap those types using other debug_var_chain variables. */
2840 if (debug_var)
2841 {
2842 struct nesting_copy_body_data id;
2843
2844 memset (&id, 0, sizeof (id));
2845 id.cb.copy_decl = nesting_copy_decl;
2846 id.cb.decl_map = new hash_map<tree, tree>;
2847 id.root = root;
2848
2849 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2850 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2851 {
2852 tree type = TREE_TYPE (debug_var);
2853 tree newt, t = type;
2854 struct nesting_info *i;
2855
2856 for (i = root; i; i = i->outer)
2857 if (variably_modified_type_p (type, i->context))
2858 break;
2859
2860 if (i == NULL)
2861 continue;
2862
2863 id.cb.src_fn = i->context;
2864 id.cb.dst_fn = i->context;
2865 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2866
2867 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2868 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2869 {
2870 newt = TREE_TYPE (newt);
2871 t = TREE_TYPE (t);
2872 }
2873 if (TYPE_NAME (newt)
2874 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2875 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2876 && newt != t
2877 && TYPE_NAME (newt) == TYPE_NAME (t))
2878 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2879 }
2880
2881 delete id.cb.decl_map;
2882 }
2883
2884 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
2885 if (gimple_bind_block (scope))
2886 declare_vars (root->debug_var_chain, scope, true);
2887 else
2888 BLOCK_VARS (DECL_INITIAL (root->context))
2889 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2890 root->debug_var_chain);
2891 }
2892
2893 /* Fold the rewritten MEM_REF trees. */
2894 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2895
2896 /* Dump the translated tree function. */
2897 if (dump_file)
2898 {
2899 fputs ("\n\n", dump_file);
2900 dump_function_to_file (root->context, dump_file, dump_flags);
2901 }
2902 }
2903
2904 static void
2905 finalize_nesting_tree (struct nesting_info *root)
2906 {
2907 struct nesting_info *n;
2908 FOR_EACH_NEST_INFO (n, root)
2909 finalize_nesting_tree_1 (n);
2910 }
2911
2912 /* Unnest the nodes and pass them to cgraph. */
2913
2914 static void
2915 unnest_nesting_tree_1 (struct nesting_info *root)
2916 {
2917 struct cgraph_node *node = cgraph_node::get (root->context);
2918
2919 /* For nested functions update the cgraph to reflect unnesting.
2920 We also delay finalizing of these functions up to this point. */
2921 if (node->origin)
2922 {
2923 node->unnest ();
2924 cgraph_node::finalize_function (root->context, true);
2925 }
2926 }
2927
2928 static void
2929 unnest_nesting_tree (struct nesting_info *root)
2930 {
2931 struct nesting_info *n;
2932 FOR_EACH_NEST_INFO (n, root)
2933 unnest_nesting_tree_1 (n);
2934 }
2935
2936 /* Free the data structures allocated during this pass. */
2937
2938 static void
2939 free_nesting_tree (struct nesting_info *root)
2940 {
2941 struct nesting_info *node, *next;
2942
2943 node = iter_nestinfo_start (root);
2944 do
2945 {
2946 next = iter_nestinfo_next (node);
2947 delete node->var_map;
2948 delete node->field_map;
2949 delete node->mem_refs;
2950 free (node);
2951 node = next;
2952 }
2953 while (node);
2954 }
2955
2956 /* Gimplify a function and all its nested functions. */
2957 static void
2958 gimplify_all_functions (struct cgraph_node *root)
2959 {
2960 struct cgraph_node *iter;
2961 if (!gimple_body (root->decl))
2962 gimplify_function_tree (root->decl);
2963 for (iter = root->nested; iter; iter = iter->next_nested)
2964 gimplify_all_functions (iter);
2965 }
2966
2967 /* Main entry point for this pass. Process FNDECL and all of its nested
2968 subroutines and turn them into something less tightly bound. */
2969
2970 void
2971 lower_nested_functions (tree fndecl)
2972 {
2973 struct cgraph_node *cgn;
2974 struct nesting_info *root;
2975
2976 /* If there are no nested functions, there's nothing to do. */
2977 cgn = cgraph_node::get (fndecl);
2978 if (!cgn->nested)
2979 return;
2980
2981 gimplify_all_functions (cgn);
2982
2983 dump_file = dump_begin (TDI_nested, &dump_flags);
2984 if (dump_file)
2985 fprintf (dump_file, "\n;; Function %s\n\n",
2986 lang_hooks.decl_printable_name (fndecl, 2));
2987
2988 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2989 root = create_nesting_tree (cgn);
2990
2991 walk_all_functions (convert_nonlocal_reference_stmt,
2992 convert_nonlocal_reference_op,
2993 root);
2994 walk_all_functions (convert_local_reference_stmt,
2995 convert_local_reference_op,
2996 root);
2997 walk_all_functions (convert_nl_goto_reference, NULL, root);
2998 walk_all_functions (convert_nl_goto_receiver, NULL, root);
2999
3000 convert_all_function_calls (root);
3001 finalize_nesting_tree (root);
3002 unnest_nesting_tree (root);
3003
3004 free_nesting_tree (root);
3005 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3006
3007 if (dump_file)
3008 {
3009 dump_end (TDI_nested, dump_file);
3010 dump_file = NULL;
3011 }
3012 }
3013
3014 #include "gt-tree-nested.h"