coretypes.h: Include machmode.h...
[gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "hash-set.h"
25 #include "vec.h"
26 #include "input.h"
27 #include "alias.h"
28 #include "symtab.h"
29 #include "inchash.h"
30 #include "tree.h"
31 #include "fold-const.h"
32 #include "stringpool.h"
33 #include "stor-layout.h"
34 #include "tm_p.h"
35 #include "hard-reg-set.h"
36 #include "function.h"
37 #include "tree-dump.h"
38 #include "tree-inline.h"
39 #include "predict.h"
40 #include "basic-block.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-expr.h"
44 #include "is-a.h"
45 #include "gimple.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimple-walk.h"
49 #include "tree-iterator.h"
50 #include "bitmap.h"
51 #include "hash-map.h"
52 #include "plugin-api.h"
53 #include "ipa-ref.h"
54 #include "cgraph.h"
55 #include "tree-cfg.h"
56 #include "hashtab.h"
57 #include "rtl.h"
58 #include "flags.h"
59 #include "statistics.h"
60 #include "insn-config.h"
61 #include "expmed.h"
62 #include "dojump.h"
63 #include "explow.h"
64 #include "calls.h"
65 #include "emit-rtl.h"
66 #include "varasm.h"
67 #include "stmt.h"
68 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
69 #include "langhooks.h"
70 #include "gimple-low.h"
71 #include "gomp-constants.h"
72
73
74 /* The object of this pass is to lower the representation of a set of nested
75 functions in order to expose all of the gory details of the various
76 nonlocal references. We want to do this sooner rather than later, in
77 order to give us more freedom in emitting all of the functions in question.
78
79 Back in olden times, when gcc was young, we developed an insanely
80 complicated scheme whereby variables which were referenced nonlocally
81 were forced to live in the stack of the declaring function, and then
82 the nested functions magically discovered where these variables were
83 placed. In order for this scheme to function properly, it required
84 that the outer function be partially expanded, then we switch to
85 compiling the inner function, and once done with those we switch back
86 to compiling the outer function. Such delicate ordering requirements
87 makes it difficult to do whole translation unit optimizations
88 involving such functions.
89
90 The implementation here is much more direct. Everything that can be
91 referenced by an inner function is a member of an explicitly created
92 structure herein called the "nonlocal frame struct". The incoming
93 static chain for a nested function is a pointer to this struct in
94 the parent. In this way, we settle on known offsets from a known
95 base, and so are decoupled from the logic that places objects in the
96 function's stack frame. More importantly, we don't have to wait for
97 that to happen -- since the compilation of the inner function is no
98 longer tied to a real stack frame, the nonlocal frame struct can be
99 allocated anywhere. Which means that the outer function is now
100 inlinable.
101
102 Theory of operation here is very simple. Iterate over all the
103 statements in all the functions (depth first) several times,
104 allocating structures and fields on demand. In general we want to
105 examine inner functions first, so that we can avoid making changes
106 to outer functions which are unnecessary.
107
108 The order of the passes matters a bit, in that later passes will be
109 skipped if it is discovered that the functions don't actually interact
110 at all. That is, they're nested in the lexical sense but could have
111 been written as independent functions without change. */
112
113
114 struct nesting_info
115 {
116 struct nesting_info *outer;
117 struct nesting_info *inner;
118 struct nesting_info *next;
119
120 hash_map<tree, tree> *field_map;
121 hash_map<tree, tree> *var_map;
122 hash_set<tree *> *mem_refs;
123 bitmap suppress_expansion;
124
125 tree context;
126 tree new_local_var_chain;
127 tree debug_var_chain;
128 tree frame_type;
129 tree frame_decl;
130 tree chain_field;
131 tree chain_decl;
132 tree nl_goto_field;
133
134 bool any_parm_remapped;
135 bool any_tramp_created;
136 char static_chain_added;
137 };
138
139
140 /* Iterate over the nesting tree, starting with ROOT, depth first. */
141
142 static inline struct nesting_info *
143 iter_nestinfo_start (struct nesting_info *root)
144 {
145 while (root->inner)
146 root = root->inner;
147 return root;
148 }
149
150 static inline struct nesting_info *
151 iter_nestinfo_next (struct nesting_info *node)
152 {
153 if (node->next)
154 return iter_nestinfo_start (node->next);
155 return node->outer;
156 }
157
158 #define FOR_EACH_NEST_INFO(I, ROOT) \
159 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
160
161 /* Obstack used for the bitmaps in the struct above. */
162 static struct bitmap_obstack nesting_info_bitmap_obstack;
163
164
165 /* We're working in so many different function contexts simultaneously,
166 that create_tmp_var is dangerous. Prevent mishap. */
167 #define create_tmp_var cant_use_create_tmp_var_here_dummy
168
169 /* Like create_tmp_var, except record the variable for registration at
170 the given nesting level. */
171
172 static tree
173 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
174 {
175 tree tmp_var;
176
177 /* If the type is of variable size or a type which must be created by the
178 frontend, something is wrong. Note that we explicitly allow
179 incomplete types here, since we create them ourselves here. */
180 gcc_assert (!TREE_ADDRESSABLE (type));
181 gcc_assert (!TYPE_SIZE_UNIT (type)
182 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
183
184 tmp_var = create_tmp_var_raw (type, prefix);
185 DECL_CONTEXT (tmp_var) = info->context;
186 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
187 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
188 if (TREE_CODE (type) == COMPLEX_TYPE
189 || TREE_CODE (type) == VECTOR_TYPE)
190 DECL_GIMPLE_REG_P (tmp_var) = 1;
191
192 info->new_local_var_chain = tmp_var;
193
194 return tmp_var;
195 }
196
197 /* Take the address of EXP to be used within function CONTEXT.
198 Mark it for addressability as necessary. */
199
200 tree
201 build_addr (tree exp, tree context)
202 {
203 tree base = exp;
204 tree save_context;
205 tree retval;
206
207 while (handled_component_p (base))
208 base = TREE_OPERAND (base, 0);
209
210 if (DECL_P (base))
211 TREE_ADDRESSABLE (base) = 1;
212
213 /* Building the ADDR_EXPR will compute a set of properties for
214 that ADDR_EXPR. Those properties are unfortunately context
215 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
216
217 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
218 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
219 way the properties are for the ADDR_EXPR are computed properly. */
220 save_context = current_function_decl;
221 current_function_decl = context;
222 retval = build_fold_addr_expr (exp);
223 current_function_decl = save_context;
224 return retval;
225 }
226
227 /* Insert FIELD into TYPE, sorted by alignment requirements. */
228
229 void
230 insert_field_into_struct (tree type, tree field)
231 {
232 tree *p;
233
234 DECL_CONTEXT (field) = type;
235
236 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
237 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
238 break;
239
240 DECL_CHAIN (field) = *p;
241 *p = field;
242
243 /* Set correct alignment for frame struct type. */
244 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
245 TYPE_ALIGN (type) = DECL_ALIGN (field);
246 }
247
248 /* Build or return the RECORD_TYPE that describes the frame state that is
249 shared between INFO->CONTEXT and its nested functions. This record will
250 not be complete until finalize_nesting_tree; up until that point we'll
251 be adding fields as necessary.
252
253 We also build the DECL that represents this frame in the function. */
254
255 static tree
256 get_frame_type (struct nesting_info *info)
257 {
258 tree type = info->frame_type;
259 if (!type)
260 {
261 char *name;
262
263 type = make_node (RECORD_TYPE);
264
265 name = concat ("FRAME.",
266 IDENTIFIER_POINTER (DECL_NAME (info->context)),
267 NULL);
268 TYPE_NAME (type) = get_identifier (name);
269 free (name);
270
271 info->frame_type = type;
272 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
273 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
274
275 /* ??? Always make it addressable for now, since it is meant to
276 be pointed to by the static chain pointer. This pessimizes
277 when it turns out that no static chains are needed because
278 the nested functions referencing non-local variables are not
279 reachable, but the true pessimization is to create the non-
280 local frame structure in the first place. */
281 TREE_ADDRESSABLE (info->frame_decl) = 1;
282 }
283 return type;
284 }
285
286 /* Return true if DECL should be referenced by pointer in the non-local
287 frame structure. */
288
289 static bool
290 use_pointer_in_frame (tree decl)
291 {
292 if (TREE_CODE (decl) == PARM_DECL)
293 {
294 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
295 sized decls, and inefficient to copy large aggregates. Don't bother
296 moving anything but scalar variables. */
297 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
298 }
299 else
300 {
301 /* Variable sized types make things "interesting" in the frame. */
302 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
303 }
304 }
305
306 /* Given DECL, a non-locally accessed variable, find or create a field
307 in the non-local frame structure for the given nesting context. */
308
309 static tree
310 lookup_field_for_decl (struct nesting_info *info, tree decl,
311 enum insert_option insert)
312 {
313 if (insert == NO_INSERT)
314 {
315 tree *slot = info->field_map->get (decl);
316 return slot ? *slot : NULL_TREE;
317 }
318
319 tree *slot = &info->field_map->get_or_insert (decl);
320 if (!*slot)
321 {
322 tree field = make_node (FIELD_DECL);
323 DECL_NAME (field) = DECL_NAME (decl);
324
325 if (use_pointer_in_frame (decl))
326 {
327 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
328 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
329 DECL_NONADDRESSABLE_P (field) = 1;
330 }
331 else
332 {
333 TREE_TYPE (field) = TREE_TYPE (decl);
334 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
335 DECL_ALIGN (field) = DECL_ALIGN (decl);
336 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
337 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
338 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
339 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
340 }
341
342 insert_field_into_struct (get_frame_type (info), field);
343 *slot = field;
344
345 if (TREE_CODE (decl) == PARM_DECL)
346 info->any_parm_remapped = true;
347 }
348
349 return *slot;
350 }
351
352 /* Build or return the variable that holds the static chain within
353 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
354
355 static tree
356 get_chain_decl (struct nesting_info *info)
357 {
358 tree decl = info->chain_decl;
359
360 if (!decl)
361 {
362 tree type;
363
364 type = get_frame_type (info->outer);
365 type = build_pointer_type (type);
366
367 /* Note that this variable is *not* entered into any BIND_EXPR;
368 the construction of this variable is handled specially in
369 expand_function_start and initialize_inlined_parameters.
370 Note also that it's represented as a parameter. This is more
371 close to the truth, since the initial value does come from
372 the caller. */
373 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
374 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
375 DECL_ARTIFICIAL (decl) = 1;
376 DECL_IGNORED_P (decl) = 1;
377 TREE_USED (decl) = 1;
378 DECL_CONTEXT (decl) = info->context;
379 DECL_ARG_TYPE (decl) = type;
380
381 /* Tell tree-inline.c that we never write to this variable, so
382 it can copy-prop the replacement value immediately. */
383 TREE_READONLY (decl) = 1;
384
385 info->chain_decl = decl;
386
387 if (dump_file
388 && (dump_flags & TDF_DETAILS)
389 && !DECL_STATIC_CHAIN (info->context))
390 fprintf (dump_file, "Setting static-chain for %s\n",
391 lang_hooks.decl_printable_name (info->context, 2));
392
393 DECL_STATIC_CHAIN (info->context) = 1;
394 }
395 return decl;
396 }
397
398 /* Build or return the field within the non-local frame state that holds
399 the static chain for INFO->CONTEXT. This is the way to walk back up
400 multiple nesting levels. */
401
402 static tree
403 get_chain_field (struct nesting_info *info)
404 {
405 tree field = info->chain_field;
406
407 if (!field)
408 {
409 tree type = build_pointer_type (get_frame_type (info->outer));
410
411 field = make_node (FIELD_DECL);
412 DECL_NAME (field) = get_identifier ("__chain");
413 TREE_TYPE (field) = type;
414 DECL_ALIGN (field) = TYPE_ALIGN (type);
415 DECL_NONADDRESSABLE_P (field) = 1;
416
417 insert_field_into_struct (get_frame_type (info), field);
418
419 info->chain_field = field;
420
421 if (dump_file
422 && (dump_flags & TDF_DETAILS)
423 && !DECL_STATIC_CHAIN (info->context))
424 fprintf (dump_file, "Setting static-chain for %s\n",
425 lang_hooks.decl_printable_name (info->context, 2));
426
427 DECL_STATIC_CHAIN (info->context) = 1;
428 }
429 return field;
430 }
431
432 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
433
434 static tree
435 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
436 gcall *call)
437 {
438 tree t;
439
440 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
441 gimple_call_set_lhs (call, t);
442 if (! gsi_end_p (*gsi))
443 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
444 gsi_insert_before (gsi, call, GSI_SAME_STMT);
445
446 return t;
447 }
448
449
450 /* Copy EXP into a temporary. Allocate the temporary in the context of
451 INFO and insert the initialization statement before GSI. */
452
453 static tree
454 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
455 {
456 tree t;
457 gimple stmt;
458
459 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
460 stmt = gimple_build_assign (t, exp);
461 if (! gsi_end_p (*gsi))
462 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
463 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
464
465 return t;
466 }
467
468
469 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
470
471 static tree
472 gsi_gimplify_val (struct nesting_info *info, tree exp,
473 gimple_stmt_iterator *gsi)
474 {
475 if (is_gimple_val (exp))
476 return exp;
477 else
478 return init_tmp_var (info, exp, gsi);
479 }
480
481 /* Similarly, but copy from the temporary and insert the statement
482 after the iterator. */
483
484 static tree
485 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
486 {
487 tree t;
488 gimple stmt;
489
490 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
491 stmt = gimple_build_assign (exp, t);
492 if (! gsi_end_p (*gsi))
493 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
494 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
495
496 return t;
497 }
498
499 /* Build or return the type used to represent a nested function trampoline. */
500
501 static GTY(()) tree trampoline_type;
502
503 static tree
504 get_trampoline_type (struct nesting_info *info)
505 {
506 unsigned align, size;
507 tree t;
508
509 if (trampoline_type)
510 return trampoline_type;
511
512 align = TRAMPOLINE_ALIGNMENT;
513 size = TRAMPOLINE_SIZE;
514
515 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
516 then allocate extra space so that we can do dynamic alignment. */
517 if (align > STACK_BOUNDARY)
518 {
519 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
520 align = STACK_BOUNDARY;
521 }
522
523 t = build_index_type (size_int (size - 1));
524 t = build_array_type (char_type_node, t);
525 t = build_decl (DECL_SOURCE_LOCATION (info->context),
526 FIELD_DECL, get_identifier ("__data"), t);
527 DECL_ALIGN (t) = align;
528 DECL_USER_ALIGN (t) = 1;
529
530 trampoline_type = make_node (RECORD_TYPE);
531 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
532 TYPE_FIELDS (trampoline_type) = t;
533 layout_type (trampoline_type);
534 DECL_CONTEXT (t) = trampoline_type;
535
536 return trampoline_type;
537 }
538
539 /* Given DECL, a nested function, find or create a field in the non-local
540 frame structure for a trampoline for this function. */
541
542 static tree
543 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
544 enum insert_option insert)
545 {
546 if (insert == NO_INSERT)
547 {
548 tree *slot = info->var_map->get (decl);
549 return slot ? *slot : NULL_TREE;
550 }
551
552 tree *slot = &info->var_map->get_or_insert (decl);
553 if (!*slot)
554 {
555 tree field = make_node (FIELD_DECL);
556 DECL_NAME (field) = DECL_NAME (decl);
557 TREE_TYPE (field) = get_trampoline_type (info);
558 TREE_ADDRESSABLE (field) = 1;
559
560 insert_field_into_struct (get_frame_type (info), field);
561 *slot = field;
562
563 info->any_tramp_created = true;
564 }
565
566 return *slot;
567 }
568
569 /* Build or return the field within the non-local frame state that holds
570 the non-local goto "jmp_buf". The buffer itself is maintained by the
571 rtl middle-end as dynamic stack space is allocated. */
572
573 static tree
574 get_nl_goto_field (struct nesting_info *info)
575 {
576 tree field = info->nl_goto_field;
577 if (!field)
578 {
579 unsigned size;
580 tree type;
581
582 /* For __builtin_nonlocal_goto, we need N words. The first is the
583 frame pointer, the rest is for the target's stack pointer save
584 area. The number of words is controlled by STACK_SAVEAREA_MODE;
585 not the best interface, but it'll do for now. */
586 if (Pmode == ptr_mode)
587 type = ptr_type_node;
588 else
589 type = lang_hooks.types.type_for_mode (Pmode, 1);
590
591 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
592 size = size / GET_MODE_SIZE (Pmode);
593 size = size + 1;
594
595 type = build_array_type
596 (type, build_index_type (size_int (size)));
597
598 field = make_node (FIELD_DECL);
599 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
600 TREE_TYPE (field) = type;
601 DECL_ALIGN (field) = TYPE_ALIGN (type);
602 TREE_ADDRESSABLE (field) = 1;
603
604 insert_field_into_struct (get_frame_type (info), field);
605
606 info->nl_goto_field = field;
607 }
608
609 return field;
610 }
611
612 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
613
614 static void
615 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
616 struct nesting_info *info, gimple_seq *pseq)
617 {
618 struct walk_stmt_info wi;
619
620 memset (&wi, 0, sizeof (wi));
621 wi.info = info;
622 wi.val_only = true;
623 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
624 }
625
626
627 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
628
629 static inline void
630 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
631 struct nesting_info *info)
632 {
633 gimple_seq body = gimple_body (info->context);
634 walk_body (callback_stmt, callback_op, info, &body);
635 gimple_set_body (info->context, body);
636 }
637
638 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
639
640 static void
641 walk_gimple_omp_for (gomp_for *for_stmt,
642 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
643 struct nesting_info *info)
644 {
645 struct walk_stmt_info wi;
646 gimple_seq seq;
647 tree t;
648 size_t i;
649
650 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
651
652 seq = NULL;
653 memset (&wi, 0, sizeof (wi));
654 wi.info = info;
655 wi.gsi = gsi_last (seq);
656
657 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
658 {
659 wi.val_only = false;
660 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
661 &wi, NULL);
662 wi.val_only = true;
663 wi.is_lhs = false;
664 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
665 &wi, NULL);
666
667 wi.val_only = true;
668 wi.is_lhs = false;
669 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
670 &wi, NULL);
671
672 t = gimple_omp_for_incr (for_stmt, i);
673 gcc_assert (BINARY_CLASS_P (t));
674 wi.val_only = false;
675 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
676 wi.val_only = true;
677 wi.is_lhs = false;
678 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
679 }
680
681 seq = gsi_seq (wi.gsi);
682 if (!gimple_seq_empty_p (seq))
683 {
684 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
685 annotate_all_with_location (seq, gimple_location (for_stmt));
686 gimple_seq_add_seq (&pre_body, seq);
687 gimple_omp_for_set_pre_body (for_stmt, pre_body);
688 }
689 }
690
691 /* Similarly for ROOT and all functions nested underneath, depth first. */
692
693 static void
694 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
695 struct nesting_info *root)
696 {
697 struct nesting_info *n;
698 FOR_EACH_NEST_INFO (n, root)
699 walk_function (callback_stmt, callback_op, n);
700 }
701
702
703 /* We have to check for a fairly pathological case. The operands of function
704 nested function are to be interpreted in the context of the enclosing
705 function. So if any are variably-sized, they will get remapped when the
706 enclosing function is inlined. But that remapping would also have to be
707 done in the types of the PARM_DECLs of the nested function, meaning the
708 argument types of that function will disagree with the arguments in the
709 calls to that function. So we'd either have to make a copy of the nested
710 function corresponding to each time the enclosing function was inlined or
711 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
712 function. The former is not practical. The latter would still require
713 detecting this case to know when to add the conversions. So, for now at
714 least, we don't inline such an enclosing function.
715
716 We have to do that check recursively, so here return indicating whether
717 FNDECL has such a nested function. ORIG_FN is the function we were
718 trying to inline to use for checking whether any argument is variably
719 modified by anything in it.
720
721 It would be better to do this in tree-inline.c so that we could give
722 the appropriate warning for why a function can't be inlined, but that's
723 too late since the nesting structure has already been flattened and
724 adding a flag just to record this fact seems a waste of a flag. */
725
726 static bool
727 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
728 {
729 struct cgraph_node *cgn = cgraph_node::get (fndecl);
730 tree arg;
731
732 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
733 {
734 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
735 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
736 return true;
737
738 if (check_for_nested_with_variably_modified (cgn->decl,
739 orig_fndecl))
740 return true;
741 }
742
743 return false;
744 }
745
746 /* Construct our local datastructure describing the function nesting
747 tree rooted by CGN. */
748
749 static struct nesting_info *
750 create_nesting_tree (struct cgraph_node *cgn)
751 {
752 struct nesting_info *info = XCNEW (struct nesting_info);
753 info->field_map = new hash_map<tree, tree>;
754 info->var_map = new hash_map<tree, tree>;
755 info->mem_refs = new hash_set<tree *>;
756 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
757 info->context = cgn->decl;
758
759 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
760 {
761 struct nesting_info *sub = create_nesting_tree (cgn);
762 sub->outer = info;
763 sub->next = info->inner;
764 info->inner = sub;
765 }
766
767 /* See discussion at check_for_nested_with_variably_modified for a
768 discussion of why this has to be here. */
769 if (check_for_nested_with_variably_modified (info->context, info->context))
770 DECL_UNINLINABLE (info->context) = true;
771
772 return info;
773 }
774
775 /* Return an expression computing the static chain for TARGET_CONTEXT
776 from INFO->CONTEXT. Insert any necessary computations before TSI. */
777
778 static tree
779 get_static_chain (struct nesting_info *info, tree target_context,
780 gimple_stmt_iterator *gsi)
781 {
782 struct nesting_info *i;
783 tree x;
784
785 if (info->context == target_context)
786 {
787 x = build_addr (info->frame_decl, target_context);
788 }
789 else
790 {
791 x = get_chain_decl (info);
792
793 for (i = info->outer; i->context != target_context; i = i->outer)
794 {
795 tree field = get_chain_field (i);
796
797 x = build_simple_mem_ref (x);
798 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
799 x = init_tmp_var (info, x, gsi);
800 }
801 }
802
803 return x;
804 }
805
806
807 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
808 frame as seen from INFO->CONTEXT. Insert any necessary computations
809 before GSI. */
810
811 static tree
812 get_frame_field (struct nesting_info *info, tree target_context,
813 tree field, gimple_stmt_iterator *gsi)
814 {
815 struct nesting_info *i;
816 tree x;
817
818 if (info->context == target_context)
819 {
820 /* Make sure frame_decl gets created. */
821 (void) get_frame_type (info);
822 x = info->frame_decl;
823 }
824 else
825 {
826 x = get_chain_decl (info);
827
828 for (i = info->outer; i->context != target_context; i = i->outer)
829 {
830 tree field = get_chain_field (i);
831
832 x = build_simple_mem_ref (x);
833 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
834 x = init_tmp_var (info, x, gsi);
835 }
836
837 x = build_simple_mem_ref (x);
838 }
839
840 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
841 return x;
842 }
843
844 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
845
846 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
847 in the nested function with DECL_VALUE_EXPR set to reference the true
848 variable in the parent function. This is used both for debug info
849 and in OMP lowering. */
850
851 static tree
852 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
853 {
854 tree target_context;
855 struct nesting_info *i;
856 tree x, field, new_decl;
857
858 tree *slot = &info->var_map->get_or_insert (decl);
859
860 if (*slot)
861 return *slot;
862
863 target_context = decl_function_context (decl);
864
865 /* A copy of the code in get_frame_field, but without the temporaries. */
866 if (info->context == target_context)
867 {
868 /* Make sure frame_decl gets created. */
869 (void) get_frame_type (info);
870 x = info->frame_decl;
871 i = info;
872 }
873 else
874 {
875 x = get_chain_decl (info);
876 for (i = info->outer; i->context != target_context; i = i->outer)
877 {
878 field = get_chain_field (i);
879 x = build_simple_mem_ref (x);
880 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
881 }
882 x = build_simple_mem_ref (x);
883 }
884
885 field = lookup_field_for_decl (i, decl, INSERT);
886 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
887 if (use_pointer_in_frame (decl))
888 x = build_simple_mem_ref (x);
889
890 /* ??? We should be remapping types as well, surely. */
891 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
892 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
893 DECL_CONTEXT (new_decl) = info->context;
894 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
895 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
896 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
897 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
898 TREE_READONLY (new_decl) = TREE_READONLY (decl);
899 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
900 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
901 if ((TREE_CODE (decl) == PARM_DECL
902 || TREE_CODE (decl) == RESULT_DECL
903 || TREE_CODE (decl) == VAR_DECL)
904 && DECL_BY_REFERENCE (decl))
905 DECL_BY_REFERENCE (new_decl) = 1;
906
907 SET_DECL_VALUE_EXPR (new_decl, x);
908 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
909
910 *slot = new_decl;
911 DECL_CHAIN (new_decl) = info->debug_var_chain;
912 info->debug_var_chain = new_decl;
913
914 if (!optimize
915 && info->context != target_context
916 && variably_modified_type_p (TREE_TYPE (decl), NULL))
917 note_nonlocal_vla_type (info, TREE_TYPE (decl));
918
919 return new_decl;
920 }
921
922
923 /* Callback for walk_gimple_stmt, rewrite all references to VAR
924 and PARM_DECLs that belong to outer functions.
925
926 The rewrite will involve some number of structure accesses back up
927 the static chain. E.g. for a variable FOO up one nesting level it'll
928 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
929 indirections apply to decls for which use_pointer_in_frame is true. */
930
931 static tree
932 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
933 {
934 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
935 struct nesting_info *const info = (struct nesting_info *) wi->info;
936 tree t = *tp;
937
938 *walk_subtrees = 0;
939 switch (TREE_CODE (t))
940 {
941 case VAR_DECL:
942 /* Non-automatic variables are never processed. */
943 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
944 break;
945 /* FALLTHRU */
946
947 case PARM_DECL:
948 if (decl_function_context (t) != info->context)
949 {
950 tree x;
951 wi->changed = true;
952
953 x = get_nonlocal_debug_decl (info, t);
954 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
955 {
956 tree target_context = decl_function_context (t);
957 struct nesting_info *i;
958 for (i = info->outer; i->context != target_context; i = i->outer)
959 continue;
960 x = lookup_field_for_decl (i, t, INSERT);
961 x = get_frame_field (info, target_context, x, &wi->gsi);
962 if (use_pointer_in_frame (t))
963 {
964 x = init_tmp_var (info, x, &wi->gsi);
965 x = build_simple_mem_ref (x);
966 }
967 }
968
969 if (wi->val_only)
970 {
971 if (wi->is_lhs)
972 x = save_tmp_var (info, x, &wi->gsi);
973 else
974 x = init_tmp_var (info, x, &wi->gsi);
975 }
976
977 *tp = x;
978 }
979 break;
980
981 case LABEL_DECL:
982 /* We're taking the address of a label from a parent function, but
983 this is not itself a non-local goto. Mark the label such that it
984 will not be deleted, much as we would with a label address in
985 static storage. */
986 if (decl_function_context (t) != info->context)
987 FORCED_LABEL (t) = 1;
988 break;
989
990 case ADDR_EXPR:
991 {
992 bool save_val_only = wi->val_only;
993
994 wi->val_only = false;
995 wi->is_lhs = false;
996 wi->changed = false;
997 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
998 wi->val_only = true;
999
1000 if (wi->changed)
1001 {
1002 tree save_context;
1003
1004 /* If we changed anything, we might no longer be directly
1005 referencing a decl. */
1006 save_context = current_function_decl;
1007 current_function_decl = info->context;
1008 recompute_tree_invariant_for_addr_expr (t);
1009 current_function_decl = save_context;
1010
1011 /* If the callback converted the address argument in a context
1012 where we only accept variables (and min_invariant, presumably),
1013 then compute the address into a temporary. */
1014 if (save_val_only)
1015 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1016 t, &wi->gsi);
1017 }
1018 }
1019 break;
1020
1021 case REALPART_EXPR:
1022 case IMAGPART_EXPR:
1023 case COMPONENT_REF:
1024 case ARRAY_REF:
1025 case ARRAY_RANGE_REF:
1026 case BIT_FIELD_REF:
1027 /* Go down this entire nest and just look at the final prefix and
1028 anything that describes the references. Otherwise, we lose track
1029 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1030 wi->val_only = true;
1031 wi->is_lhs = false;
1032 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1033 {
1034 if (TREE_CODE (t) == COMPONENT_REF)
1035 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1036 NULL);
1037 else if (TREE_CODE (t) == ARRAY_REF
1038 || TREE_CODE (t) == ARRAY_RANGE_REF)
1039 {
1040 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1041 wi, NULL);
1042 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1043 wi, NULL);
1044 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1045 wi, NULL);
1046 }
1047 }
1048 wi->val_only = false;
1049 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1050 break;
1051
1052 case VIEW_CONVERT_EXPR:
1053 /* Just request to look at the subtrees, leaving val_only and lhs
1054 untouched. This might actually be for !val_only + lhs, in which
1055 case we don't want to force a replacement by a temporary. */
1056 *walk_subtrees = 1;
1057 break;
1058
1059 default:
1060 if (!IS_TYPE_OR_DECL_P (t))
1061 {
1062 *walk_subtrees = 1;
1063 wi->val_only = true;
1064 wi->is_lhs = false;
1065 }
1066 break;
1067 }
1068
1069 return NULL_TREE;
1070 }
1071
1072 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1073 struct walk_stmt_info *);
1074
1075 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1076 and PARM_DECLs that belong to outer functions. */
1077
1078 static bool
1079 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1080 {
1081 struct nesting_info *const info = (struct nesting_info *) wi->info;
1082 bool need_chain = false, need_stmts = false;
1083 tree clause, decl;
1084 int dummy;
1085 bitmap new_suppress;
1086
1087 new_suppress = BITMAP_GGC_ALLOC ();
1088 bitmap_copy (new_suppress, info->suppress_expansion);
1089
1090 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1091 {
1092 switch (OMP_CLAUSE_CODE (clause))
1093 {
1094 case OMP_CLAUSE_REDUCTION:
1095 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1096 need_stmts = true;
1097 goto do_decl_clause;
1098
1099 case OMP_CLAUSE_LASTPRIVATE:
1100 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1101 need_stmts = true;
1102 goto do_decl_clause;
1103
1104 case OMP_CLAUSE_LINEAR:
1105 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1106 need_stmts = true;
1107 wi->val_only = true;
1108 wi->is_lhs = false;
1109 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1110 &dummy, wi);
1111 goto do_decl_clause;
1112
1113 case OMP_CLAUSE_PRIVATE:
1114 case OMP_CLAUSE_FIRSTPRIVATE:
1115 case OMP_CLAUSE_COPYPRIVATE:
1116 case OMP_CLAUSE_SHARED:
1117 do_decl_clause:
1118 decl = OMP_CLAUSE_DECL (clause);
1119 if (TREE_CODE (decl) == VAR_DECL
1120 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1121 break;
1122 if (decl_function_context (decl) != info->context)
1123 {
1124 bitmap_set_bit (new_suppress, DECL_UID (decl));
1125 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1126 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1127 need_chain = true;
1128 }
1129 break;
1130
1131 case OMP_CLAUSE_SCHEDULE:
1132 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1133 break;
1134 /* FALLTHRU */
1135 case OMP_CLAUSE_FINAL:
1136 case OMP_CLAUSE_IF:
1137 case OMP_CLAUSE_NUM_THREADS:
1138 case OMP_CLAUSE_DEPEND:
1139 case OMP_CLAUSE_DEVICE:
1140 case OMP_CLAUSE_NUM_TEAMS:
1141 case OMP_CLAUSE_THREAD_LIMIT:
1142 case OMP_CLAUSE_SAFELEN:
1143 case OMP_CLAUSE__CILK_FOR_COUNT_:
1144 wi->val_only = true;
1145 wi->is_lhs = false;
1146 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1147 &dummy, wi);
1148 break;
1149
1150 case OMP_CLAUSE_DIST_SCHEDULE:
1151 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1152 {
1153 wi->val_only = true;
1154 wi->is_lhs = false;
1155 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1156 &dummy, wi);
1157 }
1158 break;
1159
1160 case OMP_CLAUSE_MAP:
1161 case OMP_CLAUSE_TO:
1162 case OMP_CLAUSE_FROM:
1163 if (OMP_CLAUSE_SIZE (clause))
1164 {
1165 wi->val_only = true;
1166 wi->is_lhs = false;
1167 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1168 &dummy, wi);
1169 }
1170 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1171 goto do_decl_clause;
1172 wi->val_only = true;
1173 wi->is_lhs = false;
1174 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1175 wi, NULL);
1176 break;
1177
1178 case OMP_CLAUSE_ALIGNED:
1179 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1180 {
1181 wi->val_only = true;
1182 wi->is_lhs = false;
1183 convert_nonlocal_reference_op
1184 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1185 }
1186 /* Like do_decl_clause, but don't add any suppression. */
1187 decl = OMP_CLAUSE_DECL (clause);
1188 if (TREE_CODE (decl) == VAR_DECL
1189 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1190 break;
1191 if (decl_function_context (decl) != info->context)
1192 {
1193 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1194 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1195 need_chain = true;
1196 }
1197 break;
1198
1199 case OMP_CLAUSE_NOWAIT:
1200 case OMP_CLAUSE_ORDERED:
1201 case OMP_CLAUSE_DEFAULT:
1202 case OMP_CLAUSE_COPYIN:
1203 case OMP_CLAUSE_COLLAPSE:
1204 case OMP_CLAUSE_UNTIED:
1205 case OMP_CLAUSE_MERGEABLE:
1206 case OMP_CLAUSE_PROC_BIND:
1207 break;
1208
1209 default:
1210 gcc_unreachable ();
1211 }
1212 }
1213
1214 info->suppress_expansion = new_suppress;
1215
1216 if (need_stmts)
1217 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1218 switch (OMP_CLAUSE_CODE (clause))
1219 {
1220 case OMP_CLAUSE_REDUCTION:
1221 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1222 {
1223 tree old_context
1224 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1225 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1226 = info->context;
1227 walk_body (convert_nonlocal_reference_stmt,
1228 convert_nonlocal_reference_op, info,
1229 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1230 walk_body (convert_nonlocal_reference_stmt,
1231 convert_nonlocal_reference_op, info,
1232 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1233 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1234 = old_context;
1235 }
1236 break;
1237
1238 case OMP_CLAUSE_LASTPRIVATE:
1239 walk_body (convert_nonlocal_reference_stmt,
1240 convert_nonlocal_reference_op, info,
1241 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1242 break;
1243
1244 case OMP_CLAUSE_LINEAR:
1245 walk_body (convert_nonlocal_reference_stmt,
1246 convert_nonlocal_reference_op, info,
1247 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1248 break;
1249
1250 default:
1251 break;
1252 }
1253
1254 return need_chain;
1255 }
1256
1257 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1258
1259 static void
1260 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1261 {
1262 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1263 type = TREE_TYPE (type);
1264
1265 if (TYPE_NAME (type)
1266 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1267 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1268 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1269
1270 while (POINTER_TYPE_P (type)
1271 || TREE_CODE (type) == VECTOR_TYPE
1272 || TREE_CODE (type) == FUNCTION_TYPE
1273 || TREE_CODE (type) == METHOD_TYPE)
1274 type = TREE_TYPE (type);
1275
1276 if (TREE_CODE (type) == ARRAY_TYPE)
1277 {
1278 tree domain, t;
1279
1280 note_nonlocal_vla_type (info, TREE_TYPE (type));
1281 domain = TYPE_DOMAIN (type);
1282 if (domain)
1283 {
1284 t = TYPE_MIN_VALUE (domain);
1285 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1286 && decl_function_context (t) != info->context)
1287 get_nonlocal_debug_decl (info, t);
1288 t = TYPE_MAX_VALUE (domain);
1289 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1290 && decl_function_context (t) != info->context)
1291 get_nonlocal_debug_decl (info, t);
1292 }
1293 }
1294 }
1295
1296 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1297 in BLOCK. */
1298
1299 static void
1300 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1301 {
1302 tree var;
1303
1304 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1305 if (TREE_CODE (var) == VAR_DECL
1306 && variably_modified_type_p (TREE_TYPE (var), NULL)
1307 && DECL_HAS_VALUE_EXPR_P (var)
1308 && decl_function_context (var) != info->context)
1309 note_nonlocal_vla_type (info, TREE_TYPE (var));
1310 }
1311
1312 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1313 PARM_DECLs that belong to outer functions. This handles statements
1314 that are not handled via the standard recursion done in
1315 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1316 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1317 operands of STMT have been handled by this function. */
1318
1319 static tree
1320 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1321 struct walk_stmt_info *wi)
1322 {
1323 struct nesting_info *info = (struct nesting_info *) wi->info;
1324 tree save_local_var_chain;
1325 bitmap save_suppress;
1326 gimple stmt = gsi_stmt (*gsi);
1327
1328 switch (gimple_code (stmt))
1329 {
1330 case GIMPLE_GOTO:
1331 /* Don't walk non-local gotos for now. */
1332 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1333 {
1334 wi->val_only = true;
1335 wi->is_lhs = false;
1336 *handled_ops_p = true;
1337 return NULL_TREE;
1338 }
1339 break;
1340
1341 case GIMPLE_OMP_PARALLEL:
1342 case GIMPLE_OMP_TASK:
1343 save_suppress = info->suppress_expansion;
1344 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1345 wi))
1346 {
1347 tree c, decl;
1348 decl = get_chain_decl (info);
1349 c = build_omp_clause (gimple_location (stmt),
1350 OMP_CLAUSE_FIRSTPRIVATE);
1351 OMP_CLAUSE_DECL (c) = decl;
1352 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1353 gimple_omp_taskreg_set_clauses (stmt, c);
1354 }
1355
1356 save_local_var_chain = info->new_local_var_chain;
1357 info->new_local_var_chain = NULL;
1358
1359 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1360 info, gimple_omp_body_ptr (stmt));
1361
1362 if (info->new_local_var_chain)
1363 declare_vars (info->new_local_var_chain,
1364 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1365 false);
1366 info->new_local_var_chain = save_local_var_chain;
1367 info->suppress_expansion = save_suppress;
1368 break;
1369
1370 case GIMPLE_OMP_FOR:
1371 save_suppress = info->suppress_expansion;
1372 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1373 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1374 convert_nonlocal_reference_stmt,
1375 convert_nonlocal_reference_op, info);
1376 walk_body (convert_nonlocal_reference_stmt,
1377 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1378 info->suppress_expansion = save_suppress;
1379 break;
1380
1381 case GIMPLE_OMP_SECTIONS:
1382 save_suppress = info->suppress_expansion;
1383 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1384 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1385 info, gimple_omp_body_ptr (stmt));
1386 info->suppress_expansion = save_suppress;
1387 break;
1388
1389 case GIMPLE_OMP_SINGLE:
1390 save_suppress = info->suppress_expansion;
1391 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1392 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1393 info, gimple_omp_body_ptr (stmt));
1394 info->suppress_expansion = save_suppress;
1395 break;
1396
1397 case GIMPLE_OMP_TARGET:
1398 if (!is_gimple_omp_offloaded (stmt))
1399 {
1400 save_suppress = info->suppress_expansion;
1401 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1402 wi);
1403 info->suppress_expansion = save_suppress;
1404 walk_body (convert_nonlocal_reference_stmt,
1405 convert_nonlocal_reference_op, info,
1406 gimple_omp_body_ptr (stmt));
1407 break;
1408 }
1409 save_suppress = info->suppress_expansion;
1410 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1411 wi))
1412 {
1413 tree c, decl;
1414 decl = get_chain_decl (info);
1415 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1416 OMP_CLAUSE_DECL (c) = decl;
1417 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1418 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1419 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1420 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1421 }
1422
1423 save_local_var_chain = info->new_local_var_chain;
1424 info->new_local_var_chain = NULL;
1425
1426 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1427 info, gimple_omp_body_ptr (stmt));
1428
1429 if (info->new_local_var_chain)
1430 declare_vars (info->new_local_var_chain,
1431 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1432 false);
1433 info->new_local_var_chain = save_local_var_chain;
1434 info->suppress_expansion = save_suppress;
1435 break;
1436
1437 case GIMPLE_OMP_TEAMS:
1438 save_suppress = info->suppress_expansion;
1439 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1440 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1441 info, gimple_omp_body_ptr (stmt));
1442 info->suppress_expansion = save_suppress;
1443 break;
1444
1445 case GIMPLE_OMP_SECTION:
1446 case GIMPLE_OMP_MASTER:
1447 case GIMPLE_OMP_TASKGROUP:
1448 case GIMPLE_OMP_ORDERED:
1449 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1450 info, gimple_omp_body_ptr (stmt));
1451 break;
1452
1453 case GIMPLE_BIND:
1454 {
1455 gbind *bind_stmt = as_a <gbind *> (stmt);
1456 if (!optimize && gimple_bind_block (bind_stmt))
1457 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1458
1459 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1460 if (TREE_CODE (var) == NAMELIST_DECL)
1461 {
1462 /* Adjust decls mentioned in NAMELIST_DECL. */
1463 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1464 tree decl;
1465 unsigned int i;
1466
1467 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1468 {
1469 if (TREE_CODE (decl) == VAR_DECL
1470 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1471 continue;
1472 if (decl_function_context (decl) != info->context)
1473 CONSTRUCTOR_ELT (decls, i)->value
1474 = get_nonlocal_debug_decl (info, decl);
1475 }
1476 }
1477
1478 *handled_ops_p = false;
1479 return NULL_TREE;
1480 }
1481 case GIMPLE_COND:
1482 wi->val_only = true;
1483 wi->is_lhs = false;
1484 *handled_ops_p = false;
1485 return NULL_TREE;
1486
1487 default:
1488 /* For every other statement that we are not interested in
1489 handling here, let the walker traverse the operands. */
1490 *handled_ops_p = false;
1491 return NULL_TREE;
1492 }
1493
1494 /* We have handled all of STMT operands, no need to traverse the operands. */
1495 *handled_ops_p = true;
1496 return NULL_TREE;
1497 }
1498
1499
1500 /* A subroutine of convert_local_reference. Create a local variable
1501 in the parent function with DECL_VALUE_EXPR set to reference the
1502 field in FRAME. This is used both for debug info and in OMP
1503 lowering. */
1504
1505 static tree
1506 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1507 {
1508 tree x, new_decl;
1509
1510 tree *slot = &info->var_map->get_or_insert (decl);
1511 if (*slot)
1512 return *slot;
1513
1514 /* Make sure frame_decl gets created. */
1515 (void) get_frame_type (info);
1516 x = info->frame_decl;
1517 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1518
1519 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1520 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1521 DECL_CONTEXT (new_decl) = info->context;
1522 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1523 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1524 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1525 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1526 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1527 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1528 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1529 if ((TREE_CODE (decl) == PARM_DECL
1530 || TREE_CODE (decl) == RESULT_DECL
1531 || TREE_CODE (decl) == VAR_DECL)
1532 && DECL_BY_REFERENCE (decl))
1533 DECL_BY_REFERENCE (new_decl) = 1;
1534
1535 SET_DECL_VALUE_EXPR (new_decl, x);
1536 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1537 *slot = new_decl;
1538
1539 DECL_CHAIN (new_decl) = info->debug_var_chain;
1540 info->debug_var_chain = new_decl;
1541
1542 /* Do not emit debug info twice. */
1543 DECL_IGNORED_P (decl) = 1;
1544
1545 return new_decl;
1546 }
1547
1548
1549 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1550 and PARM_DECLs that were referenced by inner nested functions.
1551 The rewrite will be a structure reference to the local frame variable. */
1552
1553 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1554
1555 static tree
1556 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1557 {
1558 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1559 struct nesting_info *const info = (struct nesting_info *) wi->info;
1560 tree t = *tp, field, x;
1561 bool save_val_only;
1562
1563 *walk_subtrees = 0;
1564 switch (TREE_CODE (t))
1565 {
1566 case VAR_DECL:
1567 /* Non-automatic variables are never processed. */
1568 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1569 break;
1570 /* FALLTHRU */
1571
1572 case PARM_DECL:
1573 if (decl_function_context (t) == info->context)
1574 {
1575 /* If we copied a pointer to the frame, then the original decl
1576 is used unchanged in the parent function. */
1577 if (use_pointer_in_frame (t))
1578 break;
1579
1580 /* No need to transform anything if no child references the
1581 variable. */
1582 field = lookup_field_for_decl (info, t, NO_INSERT);
1583 if (!field)
1584 break;
1585 wi->changed = true;
1586
1587 x = get_local_debug_decl (info, t, field);
1588 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1589 x = get_frame_field (info, info->context, field, &wi->gsi);
1590
1591 if (wi->val_only)
1592 {
1593 if (wi->is_lhs)
1594 x = save_tmp_var (info, x, &wi->gsi);
1595 else
1596 x = init_tmp_var (info, x, &wi->gsi);
1597 }
1598
1599 *tp = x;
1600 }
1601 break;
1602
1603 case ADDR_EXPR:
1604 save_val_only = wi->val_only;
1605 wi->val_only = false;
1606 wi->is_lhs = false;
1607 wi->changed = false;
1608 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1609 wi->val_only = save_val_only;
1610
1611 /* If we converted anything ... */
1612 if (wi->changed)
1613 {
1614 tree save_context;
1615
1616 /* Then the frame decl is now addressable. */
1617 TREE_ADDRESSABLE (info->frame_decl) = 1;
1618
1619 save_context = current_function_decl;
1620 current_function_decl = info->context;
1621 recompute_tree_invariant_for_addr_expr (t);
1622 current_function_decl = save_context;
1623
1624 /* If we are in a context where we only accept values, then
1625 compute the address into a temporary. */
1626 if (save_val_only)
1627 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1628 t, &wi->gsi);
1629 }
1630 break;
1631
1632 case REALPART_EXPR:
1633 case IMAGPART_EXPR:
1634 case COMPONENT_REF:
1635 case ARRAY_REF:
1636 case ARRAY_RANGE_REF:
1637 case BIT_FIELD_REF:
1638 /* Go down this entire nest and just look at the final prefix and
1639 anything that describes the references. Otherwise, we lose track
1640 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1641 save_val_only = wi->val_only;
1642 wi->val_only = true;
1643 wi->is_lhs = false;
1644 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1645 {
1646 if (TREE_CODE (t) == COMPONENT_REF)
1647 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1648 NULL);
1649 else if (TREE_CODE (t) == ARRAY_REF
1650 || TREE_CODE (t) == ARRAY_RANGE_REF)
1651 {
1652 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1653 NULL);
1654 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1655 NULL);
1656 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1657 NULL);
1658 }
1659 }
1660 wi->val_only = false;
1661 walk_tree (tp, convert_local_reference_op, wi, NULL);
1662 wi->val_only = save_val_only;
1663 break;
1664
1665 case MEM_REF:
1666 save_val_only = wi->val_only;
1667 wi->val_only = true;
1668 wi->is_lhs = false;
1669 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1670 wi, NULL);
1671 /* We need to re-fold the MEM_REF as component references as
1672 part of a ADDR_EXPR address are not allowed. But we cannot
1673 fold here, as the chain record type is not yet finalized. */
1674 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1675 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1676 info->mem_refs->add (tp);
1677 wi->val_only = save_val_only;
1678 break;
1679
1680 case VIEW_CONVERT_EXPR:
1681 /* Just request to look at the subtrees, leaving val_only and lhs
1682 untouched. This might actually be for !val_only + lhs, in which
1683 case we don't want to force a replacement by a temporary. */
1684 *walk_subtrees = 1;
1685 break;
1686
1687 default:
1688 if (!IS_TYPE_OR_DECL_P (t))
1689 {
1690 *walk_subtrees = 1;
1691 wi->val_only = true;
1692 wi->is_lhs = false;
1693 }
1694 break;
1695 }
1696
1697 return NULL_TREE;
1698 }
1699
1700 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1701 struct walk_stmt_info *);
1702
1703 /* Helper for convert_local_reference. Convert all the references in
1704 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1705
1706 static bool
1707 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1708 {
1709 struct nesting_info *const info = (struct nesting_info *) wi->info;
1710 bool need_frame = false, need_stmts = false;
1711 tree clause, decl;
1712 int dummy;
1713 bitmap new_suppress;
1714
1715 new_suppress = BITMAP_GGC_ALLOC ();
1716 bitmap_copy (new_suppress, info->suppress_expansion);
1717
1718 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1719 {
1720 switch (OMP_CLAUSE_CODE (clause))
1721 {
1722 case OMP_CLAUSE_REDUCTION:
1723 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1724 need_stmts = true;
1725 goto do_decl_clause;
1726
1727 case OMP_CLAUSE_LASTPRIVATE:
1728 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1729 need_stmts = true;
1730 goto do_decl_clause;
1731
1732 case OMP_CLAUSE_LINEAR:
1733 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1734 need_stmts = true;
1735 wi->val_only = true;
1736 wi->is_lhs = false;
1737 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1738 wi);
1739 goto do_decl_clause;
1740
1741 case OMP_CLAUSE_PRIVATE:
1742 case OMP_CLAUSE_FIRSTPRIVATE:
1743 case OMP_CLAUSE_COPYPRIVATE:
1744 case OMP_CLAUSE_SHARED:
1745 do_decl_clause:
1746 decl = OMP_CLAUSE_DECL (clause);
1747 if (TREE_CODE (decl) == VAR_DECL
1748 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1749 break;
1750 if (decl_function_context (decl) == info->context
1751 && !use_pointer_in_frame (decl))
1752 {
1753 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1754 if (field)
1755 {
1756 bitmap_set_bit (new_suppress, DECL_UID (decl));
1757 OMP_CLAUSE_DECL (clause)
1758 = get_local_debug_decl (info, decl, field);
1759 need_frame = true;
1760 }
1761 }
1762 break;
1763
1764 case OMP_CLAUSE_SCHEDULE:
1765 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1766 break;
1767 /* FALLTHRU */
1768 case OMP_CLAUSE_FINAL:
1769 case OMP_CLAUSE_IF:
1770 case OMP_CLAUSE_NUM_THREADS:
1771 case OMP_CLAUSE_DEPEND:
1772 case OMP_CLAUSE_DEVICE:
1773 case OMP_CLAUSE_NUM_TEAMS:
1774 case OMP_CLAUSE_THREAD_LIMIT:
1775 case OMP_CLAUSE_SAFELEN:
1776 case OMP_CLAUSE__CILK_FOR_COUNT_:
1777 wi->val_only = true;
1778 wi->is_lhs = false;
1779 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1780 wi);
1781 break;
1782
1783 case OMP_CLAUSE_DIST_SCHEDULE:
1784 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1785 {
1786 wi->val_only = true;
1787 wi->is_lhs = false;
1788 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1789 &dummy, wi);
1790 }
1791 break;
1792
1793 case OMP_CLAUSE_MAP:
1794 case OMP_CLAUSE_TO:
1795 case OMP_CLAUSE_FROM:
1796 if (OMP_CLAUSE_SIZE (clause))
1797 {
1798 wi->val_only = true;
1799 wi->is_lhs = false;
1800 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1801 &dummy, wi);
1802 }
1803 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1804 goto do_decl_clause;
1805 wi->val_only = true;
1806 wi->is_lhs = false;
1807 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1808 wi, NULL);
1809 break;
1810
1811 case OMP_CLAUSE_ALIGNED:
1812 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1813 {
1814 wi->val_only = true;
1815 wi->is_lhs = false;
1816 convert_local_reference_op
1817 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1818 }
1819 /* Like do_decl_clause, but don't add any suppression. */
1820 decl = OMP_CLAUSE_DECL (clause);
1821 if (TREE_CODE (decl) == VAR_DECL
1822 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1823 break;
1824 if (decl_function_context (decl) == info->context
1825 && !use_pointer_in_frame (decl))
1826 {
1827 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1828 if (field)
1829 {
1830 OMP_CLAUSE_DECL (clause)
1831 = get_local_debug_decl (info, decl, field);
1832 need_frame = true;
1833 }
1834 }
1835 break;
1836
1837 case OMP_CLAUSE_NOWAIT:
1838 case OMP_CLAUSE_ORDERED:
1839 case OMP_CLAUSE_DEFAULT:
1840 case OMP_CLAUSE_COPYIN:
1841 case OMP_CLAUSE_COLLAPSE:
1842 case OMP_CLAUSE_UNTIED:
1843 case OMP_CLAUSE_MERGEABLE:
1844 case OMP_CLAUSE_PROC_BIND:
1845 break;
1846
1847 default:
1848 gcc_unreachable ();
1849 }
1850 }
1851
1852 info->suppress_expansion = new_suppress;
1853
1854 if (need_stmts)
1855 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1856 switch (OMP_CLAUSE_CODE (clause))
1857 {
1858 case OMP_CLAUSE_REDUCTION:
1859 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1860 {
1861 tree old_context
1862 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1863 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1864 = info->context;
1865 walk_body (convert_local_reference_stmt,
1866 convert_local_reference_op, info,
1867 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1868 walk_body (convert_local_reference_stmt,
1869 convert_local_reference_op, info,
1870 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1871 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1872 = old_context;
1873 }
1874 break;
1875
1876 case OMP_CLAUSE_LASTPRIVATE:
1877 walk_body (convert_local_reference_stmt,
1878 convert_local_reference_op, info,
1879 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1880 break;
1881
1882 case OMP_CLAUSE_LINEAR:
1883 walk_body (convert_local_reference_stmt,
1884 convert_local_reference_op, info,
1885 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1886 break;
1887
1888 default:
1889 break;
1890 }
1891
1892 return need_frame;
1893 }
1894
1895
1896 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1897 and PARM_DECLs that were referenced by inner nested functions.
1898 The rewrite will be a structure reference to the local frame variable. */
1899
1900 static tree
1901 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1902 struct walk_stmt_info *wi)
1903 {
1904 struct nesting_info *info = (struct nesting_info *) wi->info;
1905 tree save_local_var_chain;
1906 bitmap save_suppress;
1907 gimple stmt = gsi_stmt (*gsi);
1908
1909 switch (gimple_code (stmt))
1910 {
1911 case GIMPLE_OMP_PARALLEL:
1912 case GIMPLE_OMP_TASK:
1913 save_suppress = info->suppress_expansion;
1914 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1915 wi))
1916 {
1917 tree c;
1918 (void) get_frame_type (info);
1919 c = build_omp_clause (gimple_location (stmt),
1920 OMP_CLAUSE_SHARED);
1921 OMP_CLAUSE_DECL (c) = info->frame_decl;
1922 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1923 gimple_omp_taskreg_set_clauses (stmt, c);
1924 }
1925
1926 save_local_var_chain = info->new_local_var_chain;
1927 info->new_local_var_chain = NULL;
1928
1929 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1930 gimple_omp_body_ptr (stmt));
1931
1932 if (info->new_local_var_chain)
1933 declare_vars (info->new_local_var_chain,
1934 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1935 info->new_local_var_chain = save_local_var_chain;
1936 info->suppress_expansion = save_suppress;
1937 break;
1938
1939 case GIMPLE_OMP_FOR:
1940 save_suppress = info->suppress_expansion;
1941 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1942 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1943 convert_local_reference_stmt,
1944 convert_local_reference_op, info);
1945 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1946 info, gimple_omp_body_ptr (stmt));
1947 info->suppress_expansion = save_suppress;
1948 break;
1949
1950 case GIMPLE_OMP_SECTIONS:
1951 save_suppress = info->suppress_expansion;
1952 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1953 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1954 info, gimple_omp_body_ptr (stmt));
1955 info->suppress_expansion = save_suppress;
1956 break;
1957
1958 case GIMPLE_OMP_SINGLE:
1959 save_suppress = info->suppress_expansion;
1960 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1961 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1962 info, gimple_omp_body_ptr (stmt));
1963 info->suppress_expansion = save_suppress;
1964 break;
1965
1966 case GIMPLE_OMP_TARGET:
1967 if (!is_gimple_omp_offloaded (stmt))
1968 {
1969 save_suppress = info->suppress_expansion;
1970 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1971 info->suppress_expansion = save_suppress;
1972 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1973 info, gimple_omp_body_ptr (stmt));
1974 break;
1975 }
1976 save_suppress = info->suppress_expansion;
1977 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1978 {
1979 tree c;
1980 (void) get_frame_type (info);
1981 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1982 OMP_CLAUSE_DECL (c) = info->frame_decl;
1983 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
1984 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
1985 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1986 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1987 }
1988
1989 save_local_var_chain = info->new_local_var_chain;
1990 info->new_local_var_chain = NULL;
1991
1992 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1993 gimple_omp_body_ptr (stmt));
1994
1995 if (info->new_local_var_chain)
1996 declare_vars (info->new_local_var_chain,
1997 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1998 info->new_local_var_chain = save_local_var_chain;
1999 info->suppress_expansion = save_suppress;
2000 break;
2001
2002 case GIMPLE_OMP_TEAMS:
2003 save_suppress = info->suppress_expansion;
2004 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2005 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2006 info, gimple_omp_body_ptr (stmt));
2007 info->suppress_expansion = save_suppress;
2008 break;
2009
2010 case GIMPLE_OMP_SECTION:
2011 case GIMPLE_OMP_MASTER:
2012 case GIMPLE_OMP_TASKGROUP:
2013 case GIMPLE_OMP_ORDERED:
2014 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2015 info, gimple_omp_body_ptr (stmt));
2016 break;
2017
2018 case GIMPLE_COND:
2019 wi->val_only = true;
2020 wi->is_lhs = false;
2021 *handled_ops_p = false;
2022 return NULL_TREE;
2023
2024 case GIMPLE_ASSIGN:
2025 if (gimple_clobber_p (stmt))
2026 {
2027 tree lhs = gimple_assign_lhs (stmt);
2028 if (!use_pointer_in_frame (lhs)
2029 && lookup_field_for_decl (info, lhs, NO_INSERT))
2030 {
2031 gsi_replace (gsi, gimple_build_nop (), true);
2032 break;
2033 }
2034 }
2035 *handled_ops_p = false;
2036 return NULL_TREE;
2037
2038 case GIMPLE_BIND:
2039 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2040 var;
2041 var = DECL_CHAIN (var))
2042 if (TREE_CODE (var) == NAMELIST_DECL)
2043 {
2044 /* Adjust decls mentioned in NAMELIST_DECL. */
2045 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2046 tree decl;
2047 unsigned int i;
2048
2049 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2050 {
2051 if (TREE_CODE (decl) == VAR_DECL
2052 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2053 continue;
2054 if (decl_function_context (decl) == info->context
2055 && !use_pointer_in_frame (decl))
2056 {
2057 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2058 if (field)
2059 {
2060 CONSTRUCTOR_ELT (decls, i)->value
2061 = get_local_debug_decl (info, decl, field);
2062 }
2063 }
2064 }
2065 }
2066
2067 *handled_ops_p = false;
2068 return NULL_TREE;
2069
2070 default:
2071 /* For every other statement that we are not interested in
2072 handling here, let the walker traverse the operands. */
2073 *handled_ops_p = false;
2074 return NULL_TREE;
2075 }
2076
2077 /* Indicate that we have handled all the operands ourselves. */
2078 *handled_ops_p = true;
2079 return NULL_TREE;
2080 }
2081
2082
2083 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2084 that reference labels from outer functions. The rewrite will be a
2085 call to __builtin_nonlocal_goto. */
2086
2087 static tree
2088 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2089 struct walk_stmt_info *wi)
2090 {
2091 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2092 tree label, new_label, target_context, x, field;
2093 gcall *call;
2094 gimple stmt = gsi_stmt (*gsi);
2095
2096 if (gimple_code (stmt) != GIMPLE_GOTO)
2097 {
2098 *handled_ops_p = false;
2099 return NULL_TREE;
2100 }
2101
2102 label = gimple_goto_dest (stmt);
2103 if (TREE_CODE (label) != LABEL_DECL)
2104 {
2105 *handled_ops_p = false;
2106 return NULL_TREE;
2107 }
2108
2109 target_context = decl_function_context (label);
2110 if (target_context == info->context)
2111 {
2112 *handled_ops_p = false;
2113 return NULL_TREE;
2114 }
2115
2116 for (i = info->outer; target_context != i->context; i = i->outer)
2117 continue;
2118
2119 /* The original user label may also be use for a normal goto, therefore
2120 we must create a new label that will actually receive the abnormal
2121 control transfer. This new label will be marked LABEL_NONLOCAL; this
2122 mark will trigger proper behavior in the cfg, as well as cause the
2123 (hairy target-specific) non-local goto receiver code to be generated
2124 when we expand rtl. Enter this association into var_map so that we
2125 can insert the new label into the IL during a second pass. */
2126 tree *slot = &i->var_map->get_or_insert (label);
2127 if (*slot == NULL)
2128 {
2129 new_label = create_artificial_label (UNKNOWN_LOCATION);
2130 DECL_NONLOCAL (new_label) = 1;
2131 *slot = new_label;
2132 }
2133 else
2134 new_label = *slot;
2135
2136 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2137 field = get_nl_goto_field (i);
2138 x = get_frame_field (info, target_context, field, gsi);
2139 x = build_addr (x, target_context);
2140 x = gsi_gimplify_val (info, x, gsi);
2141 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2142 2, build_addr (new_label, target_context), x);
2143 gsi_replace (gsi, call, false);
2144
2145 /* We have handled all of STMT's operands, no need to keep going. */
2146 *handled_ops_p = true;
2147 return NULL_TREE;
2148 }
2149
2150
2151 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2152 are referenced via nonlocal goto from a nested function. The rewrite
2153 will involve installing a newly generated DECL_NONLOCAL label, and
2154 (potentially) a branch around the rtl gunk that is assumed to be
2155 attached to such a label. */
2156
2157 static tree
2158 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2159 struct walk_stmt_info *wi)
2160 {
2161 struct nesting_info *const info = (struct nesting_info *) wi->info;
2162 tree label, new_label;
2163 gimple_stmt_iterator tmp_gsi;
2164 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2165
2166 if (!stmt)
2167 {
2168 *handled_ops_p = false;
2169 return NULL_TREE;
2170 }
2171
2172 label = gimple_label_label (stmt);
2173
2174 tree *slot = info->var_map->get (label);
2175 if (!slot)
2176 {
2177 *handled_ops_p = false;
2178 return NULL_TREE;
2179 }
2180
2181 /* If there's any possibility that the previous statement falls through,
2182 then we must branch around the new non-local label. */
2183 tmp_gsi = wi->gsi;
2184 gsi_prev (&tmp_gsi);
2185 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2186 {
2187 gimple stmt = gimple_build_goto (label);
2188 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2189 }
2190
2191 new_label = (tree) *slot;
2192 stmt = gimple_build_label (new_label);
2193 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2194
2195 *handled_ops_p = true;
2196 return NULL_TREE;
2197 }
2198
2199
2200 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2201 of nested functions that require the use of trampolines. The rewrite
2202 will involve a reference a trampoline generated for the occasion. */
2203
2204 static tree
2205 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2206 {
2207 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2208 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2209 tree t = *tp, decl, target_context, x, builtin;
2210 gcall *call;
2211
2212 *walk_subtrees = 0;
2213 switch (TREE_CODE (t))
2214 {
2215 case ADDR_EXPR:
2216 /* Build
2217 T.1 = &CHAIN->tramp;
2218 T.2 = __builtin_adjust_trampoline (T.1);
2219 T.3 = (func_type)T.2;
2220 */
2221
2222 decl = TREE_OPERAND (t, 0);
2223 if (TREE_CODE (decl) != FUNCTION_DECL)
2224 break;
2225
2226 /* Only need to process nested functions. */
2227 target_context = decl_function_context (decl);
2228 if (!target_context)
2229 break;
2230
2231 /* If the nested function doesn't use a static chain, then
2232 it doesn't need a trampoline. */
2233 if (!DECL_STATIC_CHAIN (decl))
2234 break;
2235
2236 /* If we don't want a trampoline, then don't build one. */
2237 if (TREE_NO_TRAMPOLINE (t))
2238 break;
2239
2240 /* Lookup the immediate parent of the callee, as that's where
2241 we need to insert the trampoline. */
2242 for (i = info; i->context != target_context; i = i->outer)
2243 continue;
2244 x = lookup_tramp_for_decl (i, decl, INSERT);
2245
2246 /* Compute the address of the field holding the trampoline. */
2247 x = get_frame_field (info, target_context, x, &wi->gsi);
2248 x = build_addr (x, target_context);
2249 x = gsi_gimplify_val (info, x, &wi->gsi);
2250
2251 /* Do machine-specific ugliness. Normally this will involve
2252 computing extra alignment, but it can really be anything. */
2253 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2254 call = gimple_build_call (builtin, 1, x);
2255 x = init_tmp_var_with_call (info, &wi->gsi, call);
2256
2257 /* Cast back to the proper function type. */
2258 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2259 x = init_tmp_var (info, x, &wi->gsi);
2260
2261 *tp = x;
2262 break;
2263
2264 default:
2265 if (!IS_TYPE_OR_DECL_P (t))
2266 *walk_subtrees = 1;
2267 break;
2268 }
2269
2270 return NULL_TREE;
2271 }
2272
2273
2274 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2275 to addresses of nested functions that require the use of
2276 trampolines. The rewrite will involve a reference a trampoline
2277 generated for the occasion. */
2278
2279 static tree
2280 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2281 struct walk_stmt_info *wi)
2282 {
2283 struct nesting_info *info = (struct nesting_info *) wi->info;
2284 gimple stmt = gsi_stmt (*gsi);
2285
2286 switch (gimple_code (stmt))
2287 {
2288 case GIMPLE_CALL:
2289 {
2290 /* Only walk call arguments, lest we generate trampolines for
2291 direct calls. */
2292 unsigned long i, nargs = gimple_call_num_args (stmt);
2293 for (i = 0; i < nargs; i++)
2294 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2295 wi, NULL);
2296 break;
2297 }
2298
2299 case GIMPLE_OMP_TARGET:
2300 if (!is_gimple_omp_offloaded (stmt))
2301 {
2302 *handled_ops_p = false;
2303 return NULL_TREE;
2304 }
2305 /* FALLTHRU */
2306 case GIMPLE_OMP_PARALLEL:
2307 case GIMPLE_OMP_TASK:
2308 {
2309 tree save_local_var_chain;
2310 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2311 save_local_var_chain = info->new_local_var_chain;
2312 info->new_local_var_chain = NULL;
2313 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2314 info, gimple_omp_body_ptr (stmt));
2315 if (info->new_local_var_chain)
2316 declare_vars (info->new_local_var_chain,
2317 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2318 false);
2319 info->new_local_var_chain = save_local_var_chain;
2320 }
2321 break;
2322
2323 default:
2324 *handled_ops_p = false;
2325 return NULL_TREE;
2326 }
2327
2328 *handled_ops_p = true;
2329 return NULL_TREE;
2330 }
2331
2332
2333
2334 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2335 that reference nested functions to make sure that the static chain
2336 is set up properly for the call. */
2337
2338 static tree
2339 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2340 struct walk_stmt_info *wi)
2341 {
2342 struct nesting_info *const info = (struct nesting_info *) wi->info;
2343 tree decl, target_context;
2344 char save_static_chain_added;
2345 int i;
2346 gimple stmt = gsi_stmt (*gsi);
2347
2348 switch (gimple_code (stmt))
2349 {
2350 case GIMPLE_CALL:
2351 if (gimple_call_chain (stmt))
2352 break;
2353 decl = gimple_call_fndecl (stmt);
2354 if (!decl)
2355 break;
2356 target_context = decl_function_context (decl);
2357 if (target_context && DECL_STATIC_CHAIN (decl))
2358 {
2359 gimple_call_set_chain (as_a <gcall *> (stmt),
2360 get_static_chain (info, target_context,
2361 &wi->gsi));
2362 info->static_chain_added |= (1 << (info->context != target_context));
2363 }
2364 break;
2365
2366 case GIMPLE_OMP_PARALLEL:
2367 case GIMPLE_OMP_TASK:
2368 save_static_chain_added = info->static_chain_added;
2369 info->static_chain_added = 0;
2370 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2371 for (i = 0; i < 2; i++)
2372 {
2373 tree c, decl;
2374 if ((info->static_chain_added & (1 << i)) == 0)
2375 continue;
2376 decl = i ? get_chain_decl (info) : info->frame_decl;
2377 /* Don't add CHAIN.* or FRAME.* twice. */
2378 for (c = gimple_omp_taskreg_clauses (stmt);
2379 c;
2380 c = OMP_CLAUSE_CHAIN (c))
2381 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2382 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2383 && OMP_CLAUSE_DECL (c) == decl)
2384 break;
2385 if (c == NULL)
2386 {
2387 c = build_omp_clause (gimple_location (stmt),
2388 i ? OMP_CLAUSE_FIRSTPRIVATE
2389 : OMP_CLAUSE_SHARED);
2390 OMP_CLAUSE_DECL (c) = decl;
2391 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2392 gimple_omp_taskreg_set_clauses (stmt, c);
2393 }
2394 }
2395 info->static_chain_added |= save_static_chain_added;
2396 break;
2397
2398 case GIMPLE_OMP_TARGET:
2399 if (!is_gimple_omp_offloaded (stmt))
2400 {
2401 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2402 break;
2403 }
2404 save_static_chain_added = info->static_chain_added;
2405 info->static_chain_added = 0;
2406 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2407 for (i = 0; i < 2; i++)
2408 {
2409 tree c, decl;
2410 if ((info->static_chain_added & (1 << i)) == 0)
2411 continue;
2412 decl = i ? get_chain_decl (info) : info->frame_decl;
2413 /* Don't add CHAIN.* or FRAME.* twice. */
2414 for (c = gimple_omp_target_clauses (stmt);
2415 c;
2416 c = OMP_CLAUSE_CHAIN (c))
2417 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2418 && OMP_CLAUSE_DECL (c) == decl)
2419 break;
2420 if (c == NULL)
2421 {
2422 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2423 OMP_CLAUSE_DECL (c) = decl;
2424 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2425 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2426 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2427 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2428 c);
2429 }
2430 }
2431 info->static_chain_added |= save_static_chain_added;
2432 break;
2433
2434 case GIMPLE_OMP_FOR:
2435 walk_body (convert_gimple_call, NULL, info,
2436 gimple_omp_for_pre_body_ptr (stmt));
2437 /* FALLTHRU */
2438 case GIMPLE_OMP_SECTIONS:
2439 case GIMPLE_OMP_SECTION:
2440 case GIMPLE_OMP_SINGLE:
2441 case GIMPLE_OMP_TEAMS:
2442 case GIMPLE_OMP_MASTER:
2443 case GIMPLE_OMP_TASKGROUP:
2444 case GIMPLE_OMP_ORDERED:
2445 case GIMPLE_OMP_CRITICAL:
2446 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2447 break;
2448
2449 default:
2450 /* Keep looking for other operands. */
2451 *handled_ops_p = false;
2452 return NULL_TREE;
2453 }
2454
2455 *handled_ops_p = true;
2456 return NULL_TREE;
2457 }
2458
2459 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2460 call expressions. At the same time, determine if a nested function
2461 actually uses its static chain; if not, remember that. */
2462
2463 static void
2464 convert_all_function_calls (struct nesting_info *root)
2465 {
2466 unsigned int chain_count = 0, old_chain_count, iter_count;
2467 struct nesting_info *n;
2468
2469 /* First, optimistically clear static_chain for all decls that haven't
2470 used the static chain already for variable access. But always create
2471 it if not optimizing. This makes it possible to reconstruct the static
2472 nesting tree at run time and thus to resolve up-level references from
2473 within the debugger. */
2474 FOR_EACH_NEST_INFO (n, root)
2475 {
2476 tree decl = n->context;
2477 if (!optimize)
2478 {
2479 if (n->inner)
2480 (void) get_frame_type (n);
2481 if (n->outer)
2482 (void) get_chain_decl (n);
2483 }
2484 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2485 {
2486 DECL_STATIC_CHAIN (decl) = 0;
2487 if (dump_file && (dump_flags & TDF_DETAILS))
2488 fprintf (dump_file, "Guessing no static-chain for %s\n",
2489 lang_hooks.decl_printable_name (decl, 2));
2490 }
2491 else
2492 DECL_STATIC_CHAIN (decl) = 1;
2493 chain_count += DECL_STATIC_CHAIN (decl);
2494 }
2495
2496 /* Walk the functions and perform transformations. Note that these
2497 transformations can induce new uses of the static chain, which in turn
2498 require re-examining all users of the decl. */
2499 /* ??? It would make sense to try to use the call graph to speed this up,
2500 but the call graph hasn't really been built yet. Even if it did, we
2501 would still need to iterate in this loop since address-of references
2502 wouldn't show up in the callgraph anyway. */
2503 iter_count = 0;
2504 do
2505 {
2506 old_chain_count = chain_count;
2507 chain_count = 0;
2508 iter_count++;
2509
2510 if (dump_file && (dump_flags & TDF_DETAILS))
2511 fputc ('\n', dump_file);
2512
2513 FOR_EACH_NEST_INFO (n, root)
2514 {
2515 tree decl = n->context;
2516 walk_function (convert_tramp_reference_stmt,
2517 convert_tramp_reference_op, n);
2518 walk_function (convert_gimple_call, NULL, n);
2519 chain_count += DECL_STATIC_CHAIN (decl);
2520 }
2521 }
2522 while (chain_count != old_chain_count);
2523
2524 if (dump_file && (dump_flags & TDF_DETAILS))
2525 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2526 iter_count);
2527 }
2528
2529 struct nesting_copy_body_data
2530 {
2531 copy_body_data cb;
2532 struct nesting_info *root;
2533 };
2534
2535 /* A helper subroutine for debug_var_chain type remapping. */
2536
2537 static tree
2538 nesting_copy_decl (tree decl, copy_body_data *id)
2539 {
2540 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2541 tree *slot = nid->root->var_map->get (decl);
2542
2543 if (slot)
2544 return (tree) *slot;
2545
2546 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2547 {
2548 tree new_decl = copy_decl_no_change (decl, id);
2549 DECL_ORIGINAL_TYPE (new_decl)
2550 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2551 return new_decl;
2552 }
2553
2554 if (TREE_CODE (decl) == VAR_DECL
2555 || TREE_CODE (decl) == PARM_DECL
2556 || TREE_CODE (decl) == RESULT_DECL)
2557 return decl;
2558
2559 return copy_decl_no_change (decl, id);
2560 }
2561
2562 /* A helper function for remap_vla_decls. See if *TP contains
2563 some remapped variables. */
2564
2565 static tree
2566 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2567 {
2568 struct nesting_info *root = (struct nesting_info *) data;
2569 tree t = *tp;
2570
2571 if (DECL_P (t))
2572 {
2573 *walk_subtrees = 0;
2574 tree *slot = root->var_map->get (t);
2575
2576 if (slot)
2577 return *slot;
2578 }
2579 return NULL;
2580 }
2581
2582 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2583 involved. */
2584
2585 static void
2586 remap_vla_decls (tree block, struct nesting_info *root)
2587 {
2588 tree var, subblock, val, type;
2589 struct nesting_copy_body_data id;
2590
2591 for (subblock = BLOCK_SUBBLOCKS (block);
2592 subblock;
2593 subblock = BLOCK_CHAIN (subblock))
2594 remap_vla_decls (subblock, root);
2595
2596 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2597 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2598 {
2599 val = DECL_VALUE_EXPR (var);
2600 type = TREE_TYPE (var);
2601
2602 if (!(TREE_CODE (val) == INDIRECT_REF
2603 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2604 && variably_modified_type_p (type, NULL)))
2605 continue;
2606
2607 if (root->var_map->get (TREE_OPERAND (val, 0))
2608 || walk_tree (&type, contains_remapped_vars, root, NULL))
2609 break;
2610 }
2611
2612 if (var == NULL_TREE)
2613 return;
2614
2615 memset (&id, 0, sizeof (id));
2616 id.cb.copy_decl = nesting_copy_decl;
2617 id.cb.decl_map = new hash_map<tree, tree>;
2618 id.root = root;
2619
2620 for (; var; var = DECL_CHAIN (var))
2621 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2622 {
2623 struct nesting_info *i;
2624 tree newt, context;
2625
2626 val = DECL_VALUE_EXPR (var);
2627 type = TREE_TYPE (var);
2628
2629 if (!(TREE_CODE (val) == INDIRECT_REF
2630 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2631 && variably_modified_type_p (type, NULL)))
2632 continue;
2633
2634 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2635 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2636 continue;
2637
2638 context = decl_function_context (var);
2639 for (i = root; i; i = i->outer)
2640 if (i->context == context)
2641 break;
2642
2643 if (i == NULL)
2644 continue;
2645
2646 /* Fully expand value expressions. This avoids having debug variables
2647 only referenced from them and that can be swept during GC. */
2648 if (slot)
2649 {
2650 tree t = (tree) *slot;
2651 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2652 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2653 }
2654
2655 id.cb.src_fn = i->context;
2656 id.cb.dst_fn = i->context;
2657 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2658
2659 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2660 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2661 {
2662 newt = TREE_TYPE (newt);
2663 type = TREE_TYPE (type);
2664 }
2665 if (TYPE_NAME (newt)
2666 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2667 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2668 && newt != type
2669 && TYPE_NAME (newt) == TYPE_NAME (type))
2670 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2671
2672 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2673 if (val != DECL_VALUE_EXPR (var))
2674 SET_DECL_VALUE_EXPR (var, val);
2675 }
2676
2677 delete id.cb.decl_map;
2678 }
2679
2680 /* Fold the MEM_REF *E. */
2681 bool
2682 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2683 {
2684 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2685 *ref_p = fold (*ref_p);
2686 return true;
2687 }
2688
2689 /* Do "everything else" to clean up or complete state collected by the
2690 various walking passes -- lay out the types and decls, generate code
2691 to initialize the frame decl, store critical expressions in the
2692 struct function for rtl to find. */
2693
2694 static void
2695 finalize_nesting_tree_1 (struct nesting_info *root)
2696 {
2697 gimple_seq stmt_list;
2698 gimple stmt;
2699 tree context = root->context;
2700 struct function *sf;
2701
2702 stmt_list = NULL;
2703
2704 /* If we created a non-local frame type or decl, we need to lay them
2705 out at this time. */
2706 if (root->frame_type)
2707 {
2708 /* In some cases the frame type will trigger the -Wpadded warning.
2709 This is not helpful; suppress it. */
2710 int save_warn_padded = warn_padded;
2711 tree *adjust;
2712
2713 warn_padded = 0;
2714 layout_type (root->frame_type);
2715 warn_padded = save_warn_padded;
2716 layout_decl (root->frame_decl, 0);
2717
2718 /* Remove root->frame_decl from root->new_local_var_chain, so
2719 that we can declare it also in the lexical blocks, which
2720 helps ensure virtual regs that end up appearing in its RTL
2721 expression get substituted in instantiate_virtual_regs(). */
2722 for (adjust = &root->new_local_var_chain;
2723 *adjust != root->frame_decl;
2724 adjust = &DECL_CHAIN (*adjust))
2725 gcc_assert (DECL_CHAIN (*adjust));
2726 *adjust = DECL_CHAIN (*adjust);
2727
2728 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2729 declare_vars (root->frame_decl,
2730 gimple_seq_first_stmt (gimple_body (context)), true);
2731 }
2732
2733 /* If any parameters were referenced non-locally, then we need to
2734 insert a copy. Likewise, if any variables were referenced by
2735 pointer, we need to initialize the address. */
2736 if (root->any_parm_remapped)
2737 {
2738 tree p;
2739 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2740 {
2741 tree field, x, y;
2742
2743 field = lookup_field_for_decl (root, p, NO_INSERT);
2744 if (!field)
2745 continue;
2746
2747 if (use_pointer_in_frame (p))
2748 x = build_addr (p, context);
2749 else
2750 x = p;
2751
2752 /* If the assignment is from a non-register the stmt is
2753 not valid gimple. Make it so by using a temporary instead. */
2754 if (!is_gimple_reg (x)
2755 && is_gimple_reg_type (TREE_TYPE (x)))
2756 {
2757 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2758 x = init_tmp_var (root, x, &gsi);
2759 }
2760
2761 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2762 root->frame_decl, field, NULL_TREE);
2763 stmt = gimple_build_assign (y, x);
2764 gimple_seq_add_stmt (&stmt_list, stmt);
2765 }
2766 }
2767
2768 /* If a chain_field was created, then it needs to be initialized
2769 from chain_decl. */
2770 if (root->chain_field)
2771 {
2772 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2773 root->frame_decl, root->chain_field, NULL_TREE);
2774 stmt = gimple_build_assign (x, get_chain_decl (root));
2775 gimple_seq_add_stmt (&stmt_list, stmt);
2776 }
2777
2778 /* If trampolines were created, then we need to initialize them. */
2779 if (root->any_tramp_created)
2780 {
2781 struct nesting_info *i;
2782 for (i = root->inner; i ; i = i->next)
2783 {
2784 tree arg1, arg2, arg3, x, field;
2785
2786 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2787 if (!field)
2788 continue;
2789
2790 gcc_assert (DECL_STATIC_CHAIN (i->context));
2791 arg3 = build_addr (root->frame_decl, context);
2792
2793 arg2 = build_addr (i->context, context);
2794
2795 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2796 root->frame_decl, field, NULL_TREE);
2797 arg1 = build_addr (x, context);
2798
2799 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2800 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2801 gimple_seq_add_stmt (&stmt_list, stmt);
2802 }
2803 }
2804
2805 /* If we created initialization statements, insert them. */
2806 if (stmt_list)
2807 {
2808 gbind *bind;
2809 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2810 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
2811 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2812 gimple_bind_set_body (bind, stmt_list);
2813 }
2814
2815 /* If a chain_decl was created, then it needs to be registered with
2816 struct function so that it gets initialized from the static chain
2817 register at the beginning of the function. */
2818 sf = DECL_STRUCT_FUNCTION (root->context);
2819 sf->static_chain_decl = root->chain_decl;
2820
2821 /* Similarly for the non-local goto save area. */
2822 if (root->nl_goto_field)
2823 {
2824 sf->nonlocal_goto_save_area
2825 = get_frame_field (root, context, root->nl_goto_field, NULL);
2826 sf->has_nonlocal_label = 1;
2827 }
2828
2829 /* Make sure all new local variables get inserted into the
2830 proper BIND_EXPR. */
2831 if (root->new_local_var_chain)
2832 declare_vars (root->new_local_var_chain,
2833 gimple_seq_first_stmt (gimple_body (root->context)),
2834 false);
2835
2836 if (root->debug_var_chain)
2837 {
2838 tree debug_var;
2839 gbind *scope;
2840
2841 remap_vla_decls (DECL_INITIAL (root->context), root);
2842
2843 for (debug_var = root->debug_var_chain; debug_var;
2844 debug_var = DECL_CHAIN (debug_var))
2845 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2846 break;
2847
2848 /* If there are any debug decls with variable length types,
2849 remap those types using other debug_var_chain variables. */
2850 if (debug_var)
2851 {
2852 struct nesting_copy_body_data id;
2853
2854 memset (&id, 0, sizeof (id));
2855 id.cb.copy_decl = nesting_copy_decl;
2856 id.cb.decl_map = new hash_map<tree, tree>;
2857 id.root = root;
2858
2859 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2860 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2861 {
2862 tree type = TREE_TYPE (debug_var);
2863 tree newt, t = type;
2864 struct nesting_info *i;
2865
2866 for (i = root; i; i = i->outer)
2867 if (variably_modified_type_p (type, i->context))
2868 break;
2869
2870 if (i == NULL)
2871 continue;
2872
2873 id.cb.src_fn = i->context;
2874 id.cb.dst_fn = i->context;
2875 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2876
2877 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2878 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2879 {
2880 newt = TREE_TYPE (newt);
2881 t = TREE_TYPE (t);
2882 }
2883 if (TYPE_NAME (newt)
2884 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2885 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2886 && newt != t
2887 && TYPE_NAME (newt) == TYPE_NAME (t))
2888 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2889 }
2890
2891 delete id.cb.decl_map;
2892 }
2893
2894 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
2895 if (gimple_bind_block (scope))
2896 declare_vars (root->debug_var_chain, scope, true);
2897 else
2898 BLOCK_VARS (DECL_INITIAL (root->context))
2899 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2900 root->debug_var_chain);
2901 }
2902
2903 /* Fold the rewritten MEM_REF trees. */
2904 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2905
2906 /* Dump the translated tree function. */
2907 if (dump_file)
2908 {
2909 fputs ("\n\n", dump_file);
2910 dump_function_to_file (root->context, dump_file, dump_flags);
2911 }
2912 }
2913
2914 static void
2915 finalize_nesting_tree (struct nesting_info *root)
2916 {
2917 struct nesting_info *n;
2918 FOR_EACH_NEST_INFO (n, root)
2919 finalize_nesting_tree_1 (n);
2920 }
2921
2922 /* Unnest the nodes and pass them to cgraph. */
2923
2924 static void
2925 unnest_nesting_tree_1 (struct nesting_info *root)
2926 {
2927 struct cgraph_node *node = cgraph_node::get (root->context);
2928
2929 /* For nested functions update the cgraph to reflect unnesting.
2930 We also delay finalizing of these functions up to this point. */
2931 if (node->origin)
2932 {
2933 node->unnest ();
2934 cgraph_node::finalize_function (root->context, true);
2935 }
2936 }
2937
2938 static void
2939 unnest_nesting_tree (struct nesting_info *root)
2940 {
2941 struct nesting_info *n;
2942 FOR_EACH_NEST_INFO (n, root)
2943 unnest_nesting_tree_1 (n);
2944 }
2945
2946 /* Free the data structures allocated during this pass. */
2947
2948 static void
2949 free_nesting_tree (struct nesting_info *root)
2950 {
2951 struct nesting_info *node, *next;
2952
2953 node = iter_nestinfo_start (root);
2954 do
2955 {
2956 next = iter_nestinfo_next (node);
2957 delete node->var_map;
2958 delete node->field_map;
2959 delete node->mem_refs;
2960 free (node);
2961 node = next;
2962 }
2963 while (node);
2964 }
2965
2966 /* Gimplify a function and all its nested functions. */
2967 static void
2968 gimplify_all_functions (struct cgraph_node *root)
2969 {
2970 struct cgraph_node *iter;
2971 if (!gimple_body (root->decl))
2972 gimplify_function_tree (root->decl);
2973 for (iter = root->nested; iter; iter = iter->next_nested)
2974 gimplify_all_functions (iter);
2975 }
2976
2977 /* Main entry point for this pass. Process FNDECL and all of its nested
2978 subroutines and turn them into something less tightly bound. */
2979
2980 void
2981 lower_nested_functions (tree fndecl)
2982 {
2983 struct cgraph_node *cgn;
2984 struct nesting_info *root;
2985
2986 /* If there are no nested functions, there's nothing to do. */
2987 cgn = cgraph_node::get (fndecl);
2988 if (!cgn->nested)
2989 return;
2990
2991 gimplify_all_functions (cgn);
2992
2993 dump_file = dump_begin (TDI_nested, &dump_flags);
2994 if (dump_file)
2995 fprintf (dump_file, "\n;; Function %s\n\n",
2996 lang_hooks.decl_printable_name (fndecl, 2));
2997
2998 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2999 root = create_nesting_tree (cgn);
3000
3001 walk_all_functions (convert_nonlocal_reference_stmt,
3002 convert_nonlocal_reference_op,
3003 root);
3004 walk_all_functions (convert_local_reference_stmt,
3005 convert_local_reference_op,
3006 root);
3007 walk_all_functions (convert_nl_goto_reference, NULL, root);
3008 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3009
3010 convert_all_function_calls (root);
3011 finalize_nesting_tree (root);
3012 unnest_nesting_tree (root);
3013
3014 free_nesting_tree (root);
3015 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3016
3017 if (dump_file)
3018 {
3019 dump_end (TDI_nested, dump_file);
3020 dump_file = NULL;
3021 }
3022 }
3023
3024 #include "gt-tree-nested.h"