coretypes.h: Include hash-table.h and hash-set.h for host files.
[gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "input.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
31 #include "tm_p.h"
32 #include "hard-reg-set.h"
33 #include "function.h"
34 #include "tree-dump.h"
35 #include "tree-inline.h"
36 #include "predict.h"
37 #include "basic-block.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimplify.h"
44 #include "gimple-iterator.h"
45 #include "gimple-walk.h"
46 #include "tree-iterator.h"
47 #include "bitmap.h"
48 #include "plugin-api.h"
49 #include "ipa-ref.h"
50 #include "cgraph.h"
51 #include "tree-cfg.h"
52 #include "rtl.h"
53 #include "flags.h"
54 #include "insn-config.h"
55 #include "expmed.h"
56 #include "dojump.h"
57 #include "explow.h"
58 #include "calls.h"
59 #include "emit-rtl.h"
60 #include "varasm.h"
61 #include "stmt.h"
62 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
63 #include "langhooks.h"
64 #include "gimple-low.h"
65 #include "gomp-constants.h"
66
67
68 /* The object of this pass is to lower the representation of a set of nested
69 functions in order to expose all of the gory details of the various
70 nonlocal references. We want to do this sooner rather than later, in
71 order to give us more freedom in emitting all of the functions in question.
72
73 Back in olden times, when gcc was young, we developed an insanely
74 complicated scheme whereby variables which were referenced nonlocally
75 were forced to live in the stack of the declaring function, and then
76 the nested functions magically discovered where these variables were
77 placed. In order for this scheme to function properly, it required
78 that the outer function be partially expanded, then we switch to
79 compiling the inner function, and once done with those we switch back
80 to compiling the outer function. Such delicate ordering requirements
81 makes it difficult to do whole translation unit optimizations
82 involving such functions.
83
84 The implementation here is much more direct. Everything that can be
85 referenced by an inner function is a member of an explicitly created
86 structure herein called the "nonlocal frame struct". The incoming
87 static chain for a nested function is a pointer to this struct in
88 the parent. In this way, we settle on known offsets from a known
89 base, and so are decoupled from the logic that places objects in the
90 function's stack frame. More importantly, we don't have to wait for
91 that to happen -- since the compilation of the inner function is no
92 longer tied to a real stack frame, the nonlocal frame struct can be
93 allocated anywhere. Which means that the outer function is now
94 inlinable.
95
96 Theory of operation here is very simple. Iterate over all the
97 statements in all the functions (depth first) several times,
98 allocating structures and fields on demand. In general we want to
99 examine inner functions first, so that we can avoid making changes
100 to outer functions which are unnecessary.
101
102 The order of the passes matters a bit, in that later passes will be
103 skipped if it is discovered that the functions don't actually interact
104 at all. That is, they're nested in the lexical sense but could have
105 been written as independent functions without change. */
106
107
108 struct nesting_info
109 {
110 struct nesting_info *outer;
111 struct nesting_info *inner;
112 struct nesting_info *next;
113
114 hash_map<tree, tree> *field_map;
115 hash_map<tree, tree> *var_map;
116 hash_set<tree *> *mem_refs;
117 bitmap suppress_expansion;
118
119 tree context;
120 tree new_local_var_chain;
121 tree debug_var_chain;
122 tree frame_type;
123 tree frame_decl;
124 tree chain_field;
125 tree chain_decl;
126 tree nl_goto_field;
127
128 bool any_parm_remapped;
129 bool any_tramp_created;
130 char static_chain_added;
131 };
132
133
134 /* Iterate over the nesting tree, starting with ROOT, depth first. */
135
136 static inline struct nesting_info *
137 iter_nestinfo_start (struct nesting_info *root)
138 {
139 while (root->inner)
140 root = root->inner;
141 return root;
142 }
143
144 static inline struct nesting_info *
145 iter_nestinfo_next (struct nesting_info *node)
146 {
147 if (node->next)
148 return iter_nestinfo_start (node->next);
149 return node->outer;
150 }
151
152 #define FOR_EACH_NEST_INFO(I, ROOT) \
153 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
154
155 /* Obstack used for the bitmaps in the struct above. */
156 static struct bitmap_obstack nesting_info_bitmap_obstack;
157
158
159 /* We're working in so many different function contexts simultaneously,
160 that create_tmp_var is dangerous. Prevent mishap. */
161 #define create_tmp_var cant_use_create_tmp_var_here_dummy
162
163 /* Like create_tmp_var, except record the variable for registration at
164 the given nesting level. */
165
166 static tree
167 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
168 {
169 tree tmp_var;
170
171 /* If the type is of variable size or a type which must be created by the
172 frontend, something is wrong. Note that we explicitly allow
173 incomplete types here, since we create them ourselves here. */
174 gcc_assert (!TREE_ADDRESSABLE (type));
175 gcc_assert (!TYPE_SIZE_UNIT (type)
176 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
177
178 tmp_var = create_tmp_var_raw (type, prefix);
179 DECL_CONTEXT (tmp_var) = info->context;
180 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
181 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
182 if (TREE_CODE (type) == COMPLEX_TYPE
183 || TREE_CODE (type) == VECTOR_TYPE)
184 DECL_GIMPLE_REG_P (tmp_var) = 1;
185
186 info->new_local_var_chain = tmp_var;
187
188 return tmp_var;
189 }
190
191 /* Take the address of EXP to be used within function CONTEXT.
192 Mark it for addressability as necessary. */
193
194 tree
195 build_addr (tree exp, tree context)
196 {
197 tree base = exp;
198 tree save_context;
199 tree retval;
200
201 while (handled_component_p (base))
202 base = TREE_OPERAND (base, 0);
203
204 if (DECL_P (base))
205 TREE_ADDRESSABLE (base) = 1;
206
207 /* Building the ADDR_EXPR will compute a set of properties for
208 that ADDR_EXPR. Those properties are unfortunately context
209 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
210
211 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
212 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
213 way the properties are for the ADDR_EXPR are computed properly. */
214 save_context = current_function_decl;
215 current_function_decl = context;
216 retval = build_fold_addr_expr (exp);
217 current_function_decl = save_context;
218 return retval;
219 }
220
221 /* Insert FIELD into TYPE, sorted by alignment requirements. */
222
223 void
224 insert_field_into_struct (tree type, tree field)
225 {
226 tree *p;
227
228 DECL_CONTEXT (field) = type;
229
230 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
231 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
232 break;
233
234 DECL_CHAIN (field) = *p;
235 *p = field;
236
237 /* Set correct alignment for frame struct type. */
238 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
239 TYPE_ALIGN (type) = DECL_ALIGN (field);
240 }
241
242 /* Build or return the RECORD_TYPE that describes the frame state that is
243 shared between INFO->CONTEXT and its nested functions. This record will
244 not be complete until finalize_nesting_tree; up until that point we'll
245 be adding fields as necessary.
246
247 We also build the DECL that represents this frame in the function. */
248
249 static tree
250 get_frame_type (struct nesting_info *info)
251 {
252 tree type = info->frame_type;
253 if (!type)
254 {
255 char *name;
256
257 type = make_node (RECORD_TYPE);
258
259 name = concat ("FRAME.",
260 IDENTIFIER_POINTER (DECL_NAME (info->context)),
261 NULL);
262 TYPE_NAME (type) = get_identifier (name);
263 free (name);
264
265 info->frame_type = type;
266 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
267 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
268
269 /* ??? Always make it addressable for now, since it is meant to
270 be pointed to by the static chain pointer. This pessimizes
271 when it turns out that no static chains are needed because
272 the nested functions referencing non-local variables are not
273 reachable, but the true pessimization is to create the non-
274 local frame structure in the first place. */
275 TREE_ADDRESSABLE (info->frame_decl) = 1;
276 }
277 return type;
278 }
279
280 /* Return true if DECL should be referenced by pointer in the non-local
281 frame structure. */
282
283 static bool
284 use_pointer_in_frame (tree decl)
285 {
286 if (TREE_CODE (decl) == PARM_DECL)
287 {
288 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
289 sized decls, and inefficient to copy large aggregates. Don't bother
290 moving anything but scalar variables. */
291 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
292 }
293 else
294 {
295 /* Variable sized types make things "interesting" in the frame. */
296 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
297 }
298 }
299
300 /* Given DECL, a non-locally accessed variable, find or create a field
301 in the non-local frame structure for the given nesting context. */
302
303 static tree
304 lookup_field_for_decl (struct nesting_info *info, tree decl,
305 enum insert_option insert)
306 {
307 if (insert == NO_INSERT)
308 {
309 tree *slot = info->field_map->get (decl);
310 return slot ? *slot : NULL_TREE;
311 }
312
313 tree *slot = &info->field_map->get_or_insert (decl);
314 if (!*slot)
315 {
316 tree field = make_node (FIELD_DECL);
317 DECL_NAME (field) = DECL_NAME (decl);
318
319 if (use_pointer_in_frame (decl))
320 {
321 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
322 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
323 DECL_NONADDRESSABLE_P (field) = 1;
324 }
325 else
326 {
327 TREE_TYPE (field) = TREE_TYPE (decl);
328 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
329 DECL_ALIGN (field) = DECL_ALIGN (decl);
330 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
331 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
332 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
333 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
334 }
335
336 insert_field_into_struct (get_frame_type (info), field);
337 *slot = field;
338
339 if (TREE_CODE (decl) == PARM_DECL)
340 info->any_parm_remapped = true;
341 }
342
343 return *slot;
344 }
345
346 /* Build or return the variable that holds the static chain within
347 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
348
349 static tree
350 get_chain_decl (struct nesting_info *info)
351 {
352 tree decl = info->chain_decl;
353
354 if (!decl)
355 {
356 tree type;
357
358 type = get_frame_type (info->outer);
359 type = build_pointer_type (type);
360
361 /* Note that this variable is *not* entered into any BIND_EXPR;
362 the construction of this variable is handled specially in
363 expand_function_start and initialize_inlined_parameters.
364 Note also that it's represented as a parameter. This is more
365 close to the truth, since the initial value does come from
366 the caller. */
367 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
368 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
369 DECL_ARTIFICIAL (decl) = 1;
370 DECL_IGNORED_P (decl) = 1;
371 TREE_USED (decl) = 1;
372 DECL_CONTEXT (decl) = info->context;
373 DECL_ARG_TYPE (decl) = type;
374
375 /* Tell tree-inline.c that we never write to this variable, so
376 it can copy-prop the replacement value immediately. */
377 TREE_READONLY (decl) = 1;
378
379 info->chain_decl = decl;
380
381 if (dump_file
382 && (dump_flags & TDF_DETAILS)
383 && !DECL_STATIC_CHAIN (info->context))
384 fprintf (dump_file, "Setting static-chain for %s\n",
385 lang_hooks.decl_printable_name (info->context, 2));
386
387 DECL_STATIC_CHAIN (info->context) = 1;
388 }
389 return decl;
390 }
391
392 /* Build or return the field within the non-local frame state that holds
393 the static chain for INFO->CONTEXT. This is the way to walk back up
394 multiple nesting levels. */
395
396 static tree
397 get_chain_field (struct nesting_info *info)
398 {
399 tree field = info->chain_field;
400
401 if (!field)
402 {
403 tree type = build_pointer_type (get_frame_type (info->outer));
404
405 field = make_node (FIELD_DECL);
406 DECL_NAME (field) = get_identifier ("__chain");
407 TREE_TYPE (field) = type;
408 DECL_ALIGN (field) = TYPE_ALIGN (type);
409 DECL_NONADDRESSABLE_P (field) = 1;
410
411 insert_field_into_struct (get_frame_type (info), field);
412
413 info->chain_field = field;
414
415 if (dump_file
416 && (dump_flags & TDF_DETAILS)
417 && !DECL_STATIC_CHAIN (info->context))
418 fprintf (dump_file, "Setting static-chain for %s\n",
419 lang_hooks.decl_printable_name (info->context, 2));
420
421 DECL_STATIC_CHAIN (info->context) = 1;
422 }
423 return field;
424 }
425
426 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
427
428 static tree
429 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
430 gcall *call)
431 {
432 tree t;
433
434 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
435 gimple_call_set_lhs (call, t);
436 if (! gsi_end_p (*gsi))
437 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
438 gsi_insert_before (gsi, call, GSI_SAME_STMT);
439
440 return t;
441 }
442
443
444 /* Copy EXP into a temporary. Allocate the temporary in the context of
445 INFO and insert the initialization statement before GSI. */
446
447 static tree
448 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
449 {
450 tree t;
451 gimple stmt;
452
453 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
454 stmt = gimple_build_assign (t, exp);
455 if (! gsi_end_p (*gsi))
456 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
457 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
458
459 return t;
460 }
461
462
463 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
464
465 static tree
466 gsi_gimplify_val (struct nesting_info *info, tree exp,
467 gimple_stmt_iterator *gsi)
468 {
469 if (is_gimple_val (exp))
470 return exp;
471 else
472 return init_tmp_var (info, exp, gsi);
473 }
474
475 /* Similarly, but copy from the temporary and insert the statement
476 after the iterator. */
477
478 static tree
479 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
480 {
481 tree t;
482 gimple stmt;
483
484 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
485 stmt = gimple_build_assign (exp, t);
486 if (! gsi_end_p (*gsi))
487 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
488 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
489
490 return t;
491 }
492
493 /* Build or return the type used to represent a nested function trampoline. */
494
495 static GTY(()) tree trampoline_type;
496
497 static tree
498 get_trampoline_type (struct nesting_info *info)
499 {
500 unsigned align, size;
501 tree t;
502
503 if (trampoline_type)
504 return trampoline_type;
505
506 align = TRAMPOLINE_ALIGNMENT;
507 size = TRAMPOLINE_SIZE;
508
509 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
510 then allocate extra space so that we can do dynamic alignment. */
511 if (align > STACK_BOUNDARY)
512 {
513 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
514 align = STACK_BOUNDARY;
515 }
516
517 t = build_index_type (size_int (size - 1));
518 t = build_array_type (char_type_node, t);
519 t = build_decl (DECL_SOURCE_LOCATION (info->context),
520 FIELD_DECL, get_identifier ("__data"), t);
521 DECL_ALIGN (t) = align;
522 DECL_USER_ALIGN (t) = 1;
523
524 trampoline_type = make_node (RECORD_TYPE);
525 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
526 TYPE_FIELDS (trampoline_type) = t;
527 layout_type (trampoline_type);
528 DECL_CONTEXT (t) = trampoline_type;
529
530 return trampoline_type;
531 }
532
533 /* Given DECL, a nested function, find or create a field in the non-local
534 frame structure for a trampoline for this function. */
535
536 static tree
537 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
538 enum insert_option insert)
539 {
540 if (insert == NO_INSERT)
541 {
542 tree *slot = info->var_map->get (decl);
543 return slot ? *slot : NULL_TREE;
544 }
545
546 tree *slot = &info->var_map->get_or_insert (decl);
547 if (!*slot)
548 {
549 tree field = make_node (FIELD_DECL);
550 DECL_NAME (field) = DECL_NAME (decl);
551 TREE_TYPE (field) = get_trampoline_type (info);
552 TREE_ADDRESSABLE (field) = 1;
553
554 insert_field_into_struct (get_frame_type (info), field);
555 *slot = field;
556
557 info->any_tramp_created = true;
558 }
559
560 return *slot;
561 }
562
563 /* Build or return the field within the non-local frame state that holds
564 the non-local goto "jmp_buf". The buffer itself is maintained by the
565 rtl middle-end as dynamic stack space is allocated. */
566
567 static tree
568 get_nl_goto_field (struct nesting_info *info)
569 {
570 tree field = info->nl_goto_field;
571 if (!field)
572 {
573 unsigned size;
574 tree type;
575
576 /* For __builtin_nonlocal_goto, we need N words. The first is the
577 frame pointer, the rest is for the target's stack pointer save
578 area. The number of words is controlled by STACK_SAVEAREA_MODE;
579 not the best interface, but it'll do for now. */
580 if (Pmode == ptr_mode)
581 type = ptr_type_node;
582 else
583 type = lang_hooks.types.type_for_mode (Pmode, 1);
584
585 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
586 size = size / GET_MODE_SIZE (Pmode);
587 size = size + 1;
588
589 type = build_array_type
590 (type, build_index_type (size_int (size)));
591
592 field = make_node (FIELD_DECL);
593 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
594 TREE_TYPE (field) = type;
595 DECL_ALIGN (field) = TYPE_ALIGN (type);
596 TREE_ADDRESSABLE (field) = 1;
597
598 insert_field_into_struct (get_frame_type (info), field);
599
600 info->nl_goto_field = field;
601 }
602
603 return field;
604 }
605
606 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
607
608 static void
609 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
610 struct nesting_info *info, gimple_seq *pseq)
611 {
612 struct walk_stmt_info wi;
613
614 memset (&wi, 0, sizeof (wi));
615 wi.info = info;
616 wi.val_only = true;
617 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
618 }
619
620
621 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
622
623 static inline void
624 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
625 struct nesting_info *info)
626 {
627 gimple_seq body = gimple_body (info->context);
628 walk_body (callback_stmt, callback_op, info, &body);
629 gimple_set_body (info->context, body);
630 }
631
632 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
633
634 static void
635 walk_gimple_omp_for (gomp_for *for_stmt,
636 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
637 struct nesting_info *info)
638 {
639 struct walk_stmt_info wi;
640 gimple_seq seq;
641 tree t;
642 size_t i;
643
644 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
645
646 seq = NULL;
647 memset (&wi, 0, sizeof (wi));
648 wi.info = info;
649 wi.gsi = gsi_last (seq);
650
651 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
652 {
653 wi.val_only = false;
654 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
655 &wi, NULL);
656 wi.val_only = true;
657 wi.is_lhs = false;
658 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
659 &wi, NULL);
660
661 wi.val_only = true;
662 wi.is_lhs = false;
663 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
664 &wi, NULL);
665
666 t = gimple_omp_for_incr (for_stmt, i);
667 gcc_assert (BINARY_CLASS_P (t));
668 wi.val_only = false;
669 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
670 wi.val_only = true;
671 wi.is_lhs = false;
672 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
673 }
674
675 seq = gsi_seq (wi.gsi);
676 if (!gimple_seq_empty_p (seq))
677 {
678 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
679 annotate_all_with_location (seq, gimple_location (for_stmt));
680 gimple_seq_add_seq (&pre_body, seq);
681 gimple_omp_for_set_pre_body (for_stmt, pre_body);
682 }
683 }
684
685 /* Similarly for ROOT and all functions nested underneath, depth first. */
686
687 static void
688 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
689 struct nesting_info *root)
690 {
691 struct nesting_info *n;
692 FOR_EACH_NEST_INFO (n, root)
693 walk_function (callback_stmt, callback_op, n);
694 }
695
696
697 /* We have to check for a fairly pathological case. The operands of function
698 nested function are to be interpreted in the context of the enclosing
699 function. So if any are variably-sized, they will get remapped when the
700 enclosing function is inlined. But that remapping would also have to be
701 done in the types of the PARM_DECLs of the nested function, meaning the
702 argument types of that function will disagree with the arguments in the
703 calls to that function. So we'd either have to make a copy of the nested
704 function corresponding to each time the enclosing function was inlined or
705 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
706 function. The former is not practical. The latter would still require
707 detecting this case to know when to add the conversions. So, for now at
708 least, we don't inline such an enclosing function.
709
710 We have to do that check recursively, so here return indicating whether
711 FNDECL has such a nested function. ORIG_FN is the function we were
712 trying to inline to use for checking whether any argument is variably
713 modified by anything in it.
714
715 It would be better to do this in tree-inline.c so that we could give
716 the appropriate warning for why a function can't be inlined, but that's
717 too late since the nesting structure has already been flattened and
718 adding a flag just to record this fact seems a waste of a flag. */
719
720 static bool
721 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
722 {
723 struct cgraph_node *cgn = cgraph_node::get (fndecl);
724 tree arg;
725
726 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
727 {
728 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
729 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
730 return true;
731
732 if (check_for_nested_with_variably_modified (cgn->decl,
733 orig_fndecl))
734 return true;
735 }
736
737 return false;
738 }
739
740 /* Construct our local datastructure describing the function nesting
741 tree rooted by CGN. */
742
743 static struct nesting_info *
744 create_nesting_tree (struct cgraph_node *cgn)
745 {
746 struct nesting_info *info = XCNEW (struct nesting_info);
747 info->field_map = new hash_map<tree, tree>;
748 info->var_map = new hash_map<tree, tree>;
749 info->mem_refs = new hash_set<tree *>;
750 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
751 info->context = cgn->decl;
752
753 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
754 {
755 struct nesting_info *sub = create_nesting_tree (cgn);
756 sub->outer = info;
757 sub->next = info->inner;
758 info->inner = sub;
759 }
760
761 /* See discussion at check_for_nested_with_variably_modified for a
762 discussion of why this has to be here. */
763 if (check_for_nested_with_variably_modified (info->context, info->context))
764 DECL_UNINLINABLE (info->context) = true;
765
766 return info;
767 }
768
769 /* Return an expression computing the static chain for TARGET_CONTEXT
770 from INFO->CONTEXT. Insert any necessary computations before TSI. */
771
772 static tree
773 get_static_chain (struct nesting_info *info, tree target_context,
774 gimple_stmt_iterator *gsi)
775 {
776 struct nesting_info *i;
777 tree x;
778
779 if (info->context == target_context)
780 {
781 x = build_addr (info->frame_decl, target_context);
782 }
783 else
784 {
785 x = get_chain_decl (info);
786
787 for (i = info->outer; i->context != target_context; i = i->outer)
788 {
789 tree field = get_chain_field (i);
790
791 x = build_simple_mem_ref (x);
792 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
793 x = init_tmp_var (info, x, gsi);
794 }
795 }
796
797 return x;
798 }
799
800
801 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
802 frame as seen from INFO->CONTEXT. Insert any necessary computations
803 before GSI. */
804
805 static tree
806 get_frame_field (struct nesting_info *info, tree target_context,
807 tree field, gimple_stmt_iterator *gsi)
808 {
809 struct nesting_info *i;
810 tree x;
811
812 if (info->context == target_context)
813 {
814 /* Make sure frame_decl gets created. */
815 (void) get_frame_type (info);
816 x = info->frame_decl;
817 }
818 else
819 {
820 x = get_chain_decl (info);
821
822 for (i = info->outer; i->context != target_context; i = i->outer)
823 {
824 tree field = get_chain_field (i);
825
826 x = build_simple_mem_ref (x);
827 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
828 x = init_tmp_var (info, x, gsi);
829 }
830
831 x = build_simple_mem_ref (x);
832 }
833
834 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
835 return x;
836 }
837
838 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
839
840 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
841 in the nested function with DECL_VALUE_EXPR set to reference the true
842 variable in the parent function. This is used both for debug info
843 and in OMP lowering. */
844
845 static tree
846 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
847 {
848 tree target_context;
849 struct nesting_info *i;
850 tree x, field, new_decl;
851
852 tree *slot = &info->var_map->get_or_insert (decl);
853
854 if (*slot)
855 return *slot;
856
857 target_context = decl_function_context (decl);
858
859 /* A copy of the code in get_frame_field, but without the temporaries. */
860 if (info->context == target_context)
861 {
862 /* Make sure frame_decl gets created. */
863 (void) get_frame_type (info);
864 x = info->frame_decl;
865 i = info;
866 }
867 else
868 {
869 x = get_chain_decl (info);
870 for (i = info->outer; i->context != target_context; i = i->outer)
871 {
872 field = get_chain_field (i);
873 x = build_simple_mem_ref (x);
874 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
875 }
876 x = build_simple_mem_ref (x);
877 }
878
879 field = lookup_field_for_decl (i, decl, INSERT);
880 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
881 if (use_pointer_in_frame (decl))
882 x = build_simple_mem_ref (x);
883
884 /* ??? We should be remapping types as well, surely. */
885 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
886 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
887 DECL_CONTEXT (new_decl) = info->context;
888 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
889 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
890 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
891 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
892 TREE_READONLY (new_decl) = TREE_READONLY (decl);
893 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
894 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
895 if ((TREE_CODE (decl) == PARM_DECL
896 || TREE_CODE (decl) == RESULT_DECL
897 || TREE_CODE (decl) == VAR_DECL)
898 && DECL_BY_REFERENCE (decl))
899 DECL_BY_REFERENCE (new_decl) = 1;
900
901 SET_DECL_VALUE_EXPR (new_decl, x);
902 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
903
904 *slot = new_decl;
905 DECL_CHAIN (new_decl) = info->debug_var_chain;
906 info->debug_var_chain = new_decl;
907
908 if (!optimize
909 && info->context != target_context
910 && variably_modified_type_p (TREE_TYPE (decl), NULL))
911 note_nonlocal_vla_type (info, TREE_TYPE (decl));
912
913 return new_decl;
914 }
915
916
917 /* Callback for walk_gimple_stmt, rewrite all references to VAR
918 and PARM_DECLs that belong to outer functions.
919
920 The rewrite will involve some number of structure accesses back up
921 the static chain. E.g. for a variable FOO up one nesting level it'll
922 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
923 indirections apply to decls for which use_pointer_in_frame is true. */
924
925 static tree
926 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
927 {
928 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
929 struct nesting_info *const info = (struct nesting_info *) wi->info;
930 tree t = *tp;
931
932 *walk_subtrees = 0;
933 switch (TREE_CODE (t))
934 {
935 case VAR_DECL:
936 /* Non-automatic variables are never processed. */
937 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
938 break;
939 /* FALLTHRU */
940
941 case PARM_DECL:
942 if (decl_function_context (t) != info->context)
943 {
944 tree x;
945 wi->changed = true;
946
947 x = get_nonlocal_debug_decl (info, t);
948 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
949 {
950 tree target_context = decl_function_context (t);
951 struct nesting_info *i;
952 for (i = info->outer; i->context != target_context; i = i->outer)
953 continue;
954 x = lookup_field_for_decl (i, t, INSERT);
955 x = get_frame_field (info, target_context, x, &wi->gsi);
956 if (use_pointer_in_frame (t))
957 {
958 x = init_tmp_var (info, x, &wi->gsi);
959 x = build_simple_mem_ref (x);
960 }
961 }
962
963 if (wi->val_only)
964 {
965 if (wi->is_lhs)
966 x = save_tmp_var (info, x, &wi->gsi);
967 else
968 x = init_tmp_var (info, x, &wi->gsi);
969 }
970
971 *tp = x;
972 }
973 break;
974
975 case LABEL_DECL:
976 /* We're taking the address of a label from a parent function, but
977 this is not itself a non-local goto. Mark the label such that it
978 will not be deleted, much as we would with a label address in
979 static storage. */
980 if (decl_function_context (t) != info->context)
981 FORCED_LABEL (t) = 1;
982 break;
983
984 case ADDR_EXPR:
985 {
986 bool save_val_only = wi->val_only;
987
988 wi->val_only = false;
989 wi->is_lhs = false;
990 wi->changed = false;
991 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
992 wi->val_only = true;
993
994 if (wi->changed)
995 {
996 tree save_context;
997
998 /* If we changed anything, we might no longer be directly
999 referencing a decl. */
1000 save_context = current_function_decl;
1001 current_function_decl = info->context;
1002 recompute_tree_invariant_for_addr_expr (t);
1003 current_function_decl = save_context;
1004
1005 /* If the callback converted the address argument in a context
1006 where we only accept variables (and min_invariant, presumably),
1007 then compute the address into a temporary. */
1008 if (save_val_only)
1009 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1010 t, &wi->gsi);
1011 }
1012 }
1013 break;
1014
1015 case REALPART_EXPR:
1016 case IMAGPART_EXPR:
1017 case COMPONENT_REF:
1018 case ARRAY_REF:
1019 case ARRAY_RANGE_REF:
1020 case BIT_FIELD_REF:
1021 /* Go down this entire nest and just look at the final prefix and
1022 anything that describes the references. Otherwise, we lose track
1023 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1024 wi->val_only = true;
1025 wi->is_lhs = false;
1026 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1027 {
1028 if (TREE_CODE (t) == COMPONENT_REF)
1029 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1030 NULL);
1031 else if (TREE_CODE (t) == ARRAY_REF
1032 || TREE_CODE (t) == ARRAY_RANGE_REF)
1033 {
1034 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1035 wi, NULL);
1036 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1037 wi, NULL);
1038 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1039 wi, NULL);
1040 }
1041 }
1042 wi->val_only = false;
1043 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1044 break;
1045
1046 case VIEW_CONVERT_EXPR:
1047 /* Just request to look at the subtrees, leaving val_only and lhs
1048 untouched. This might actually be for !val_only + lhs, in which
1049 case we don't want to force a replacement by a temporary. */
1050 *walk_subtrees = 1;
1051 break;
1052
1053 default:
1054 if (!IS_TYPE_OR_DECL_P (t))
1055 {
1056 *walk_subtrees = 1;
1057 wi->val_only = true;
1058 wi->is_lhs = false;
1059 }
1060 break;
1061 }
1062
1063 return NULL_TREE;
1064 }
1065
1066 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1067 struct walk_stmt_info *);
1068
1069 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1070 and PARM_DECLs that belong to outer functions. */
1071
1072 static bool
1073 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1074 {
1075 struct nesting_info *const info = (struct nesting_info *) wi->info;
1076 bool need_chain = false, need_stmts = false;
1077 tree clause, decl;
1078 int dummy;
1079 bitmap new_suppress;
1080
1081 new_suppress = BITMAP_GGC_ALLOC ();
1082 bitmap_copy (new_suppress, info->suppress_expansion);
1083
1084 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1085 {
1086 switch (OMP_CLAUSE_CODE (clause))
1087 {
1088 case OMP_CLAUSE_REDUCTION:
1089 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1090 need_stmts = true;
1091 goto do_decl_clause;
1092
1093 case OMP_CLAUSE_LASTPRIVATE:
1094 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1095 need_stmts = true;
1096 goto do_decl_clause;
1097
1098 case OMP_CLAUSE_LINEAR:
1099 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1100 need_stmts = true;
1101 wi->val_only = true;
1102 wi->is_lhs = false;
1103 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1104 &dummy, wi);
1105 goto do_decl_clause;
1106
1107 case OMP_CLAUSE_PRIVATE:
1108 case OMP_CLAUSE_FIRSTPRIVATE:
1109 case OMP_CLAUSE_COPYPRIVATE:
1110 case OMP_CLAUSE_SHARED:
1111 do_decl_clause:
1112 decl = OMP_CLAUSE_DECL (clause);
1113 if (TREE_CODE (decl) == VAR_DECL
1114 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1115 break;
1116 if (decl_function_context (decl) != info->context)
1117 {
1118 bitmap_set_bit (new_suppress, DECL_UID (decl));
1119 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1120 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1121 need_chain = true;
1122 }
1123 break;
1124
1125 case OMP_CLAUSE_SCHEDULE:
1126 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1127 break;
1128 /* FALLTHRU */
1129 case OMP_CLAUSE_FINAL:
1130 case OMP_CLAUSE_IF:
1131 case OMP_CLAUSE_NUM_THREADS:
1132 case OMP_CLAUSE_DEPEND:
1133 case OMP_CLAUSE_DEVICE:
1134 case OMP_CLAUSE_NUM_TEAMS:
1135 case OMP_CLAUSE_THREAD_LIMIT:
1136 case OMP_CLAUSE_SAFELEN:
1137 case OMP_CLAUSE__CILK_FOR_COUNT_:
1138 wi->val_only = true;
1139 wi->is_lhs = false;
1140 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1141 &dummy, wi);
1142 break;
1143
1144 case OMP_CLAUSE_DIST_SCHEDULE:
1145 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1146 {
1147 wi->val_only = true;
1148 wi->is_lhs = false;
1149 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1150 &dummy, wi);
1151 }
1152 break;
1153
1154 case OMP_CLAUSE_MAP:
1155 case OMP_CLAUSE_TO:
1156 case OMP_CLAUSE_FROM:
1157 if (OMP_CLAUSE_SIZE (clause))
1158 {
1159 wi->val_only = true;
1160 wi->is_lhs = false;
1161 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1162 &dummy, wi);
1163 }
1164 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1165 goto do_decl_clause;
1166 wi->val_only = true;
1167 wi->is_lhs = false;
1168 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1169 wi, NULL);
1170 break;
1171
1172 case OMP_CLAUSE_ALIGNED:
1173 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1174 {
1175 wi->val_only = true;
1176 wi->is_lhs = false;
1177 convert_nonlocal_reference_op
1178 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1179 }
1180 /* Like do_decl_clause, but don't add any suppression. */
1181 decl = OMP_CLAUSE_DECL (clause);
1182 if (TREE_CODE (decl) == VAR_DECL
1183 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1184 break;
1185 if (decl_function_context (decl) != info->context)
1186 {
1187 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1188 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1189 need_chain = true;
1190 }
1191 break;
1192
1193 case OMP_CLAUSE_NOWAIT:
1194 case OMP_CLAUSE_ORDERED:
1195 case OMP_CLAUSE_DEFAULT:
1196 case OMP_CLAUSE_COPYIN:
1197 case OMP_CLAUSE_COLLAPSE:
1198 case OMP_CLAUSE_UNTIED:
1199 case OMP_CLAUSE_MERGEABLE:
1200 case OMP_CLAUSE_PROC_BIND:
1201 break;
1202
1203 default:
1204 gcc_unreachable ();
1205 }
1206 }
1207
1208 info->suppress_expansion = new_suppress;
1209
1210 if (need_stmts)
1211 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1212 switch (OMP_CLAUSE_CODE (clause))
1213 {
1214 case OMP_CLAUSE_REDUCTION:
1215 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1216 {
1217 tree old_context
1218 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1219 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1220 = info->context;
1221 walk_body (convert_nonlocal_reference_stmt,
1222 convert_nonlocal_reference_op, info,
1223 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1224 walk_body (convert_nonlocal_reference_stmt,
1225 convert_nonlocal_reference_op, info,
1226 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1227 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1228 = old_context;
1229 }
1230 break;
1231
1232 case OMP_CLAUSE_LASTPRIVATE:
1233 walk_body (convert_nonlocal_reference_stmt,
1234 convert_nonlocal_reference_op, info,
1235 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1236 break;
1237
1238 case OMP_CLAUSE_LINEAR:
1239 walk_body (convert_nonlocal_reference_stmt,
1240 convert_nonlocal_reference_op, info,
1241 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1242 break;
1243
1244 default:
1245 break;
1246 }
1247
1248 return need_chain;
1249 }
1250
1251 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1252
1253 static void
1254 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1255 {
1256 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1257 type = TREE_TYPE (type);
1258
1259 if (TYPE_NAME (type)
1260 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1261 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1262 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1263
1264 while (POINTER_TYPE_P (type)
1265 || TREE_CODE (type) == VECTOR_TYPE
1266 || TREE_CODE (type) == FUNCTION_TYPE
1267 || TREE_CODE (type) == METHOD_TYPE)
1268 type = TREE_TYPE (type);
1269
1270 if (TREE_CODE (type) == ARRAY_TYPE)
1271 {
1272 tree domain, t;
1273
1274 note_nonlocal_vla_type (info, TREE_TYPE (type));
1275 domain = TYPE_DOMAIN (type);
1276 if (domain)
1277 {
1278 t = TYPE_MIN_VALUE (domain);
1279 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1280 && decl_function_context (t) != info->context)
1281 get_nonlocal_debug_decl (info, t);
1282 t = TYPE_MAX_VALUE (domain);
1283 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1284 && decl_function_context (t) != info->context)
1285 get_nonlocal_debug_decl (info, t);
1286 }
1287 }
1288 }
1289
1290 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1291 in BLOCK. */
1292
1293 static void
1294 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1295 {
1296 tree var;
1297
1298 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1299 if (TREE_CODE (var) == VAR_DECL
1300 && variably_modified_type_p (TREE_TYPE (var), NULL)
1301 && DECL_HAS_VALUE_EXPR_P (var)
1302 && decl_function_context (var) != info->context)
1303 note_nonlocal_vla_type (info, TREE_TYPE (var));
1304 }
1305
1306 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1307 PARM_DECLs that belong to outer functions. This handles statements
1308 that are not handled via the standard recursion done in
1309 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1310 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1311 operands of STMT have been handled by this function. */
1312
1313 static tree
1314 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1315 struct walk_stmt_info *wi)
1316 {
1317 struct nesting_info *info = (struct nesting_info *) wi->info;
1318 tree save_local_var_chain;
1319 bitmap save_suppress;
1320 gimple stmt = gsi_stmt (*gsi);
1321
1322 switch (gimple_code (stmt))
1323 {
1324 case GIMPLE_GOTO:
1325 /* Don't walk non-local gotos for now. */
1326 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1327 {
1328 wi->val_only = true;
1329 wi->is_lhs = false;
1330 *handled_ops_p = true;
1331 return NULL_TREE;
1332 }
1333 break;
1334
1335 case GIMPLE_OMP_PARALLEL:
1336 case GIMPLE_OMP_TASK:
1337 save_suppress = info->suppress_expansion;
1338 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1339 wi))
1340 {
1341 tree c, decl;
1342 decl = get_chain_decl (info);
1343 c = build_omp_clause (gimple_location (stmt),
1344 OMP_CLAUSE_FIRSTPRIVATE);
1345 OMP_CLAUSE_DECL (c) = decl;
1346 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1347 gimple_omp_taskreg_set_clauses (stmt, c);
1348 }
1349
1350 save_local_var_chain = info->new_local_var_chain;
1351 info->new_local_var_chain = NULL;
1352
1353 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1354 info, gimple_omp_body_ptr (stmt));
1355
1356 if (info->new_local_var_chain)
1357 declare_vars (info->new_local_var_chain,
1358 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1359 false);
1360 info->new_local_var_chain = save_local_var_chain;
1361 info->suppress_expansion = save_suppress;
1362 break;
1363
1364 case GIMPLE_OMP_FOR:
1365 save_suppress = info->suppress_expansion;
1366 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1367 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1368 convert_nonlocal_reference_stmt,
1369 convert_nonlocal_reference_op, info);
1370 walk_body (convert_nonlocal_reference_stmt,
1371 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1372 info->suppress_expansion = save_suppress;
1373 break;
1374
1375 case GIMPLE_OMP_SECTIONS:
1376 save_suppress = info->suppress_expansion;
1377 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1378 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1379 info, gimple_omp_body_ptr (stmt));
1380 info->suppress_expansion = save_suppress;
1381 break;
1382
1383 case GIMPLE_OMP_SINGLE:
1384 save_suppress = info->suppress_expansion;
1385 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1386 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1387 info, gimple_omp_body_ptr (stmt));
1388 info->suppress_expansion = save_suppress;
1389 break;
1390
1391 case GIMPLE_OMP_TARGET:
1392 if (!is_gimple_omp_offloaded (stmt))
1393 {
1394 save_suppress = info->suppress_expansion;
1395 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1396 wi);
1397 info->suppress_expansion = save_suppress;
1398 walk_body (convert_nonlocal_reference_stmt,
1399 convert_nonlocal_reference_op, info,
1400 gimple_omp_body_ptr (stmt));
1401 break;
1402 }
1403 save_suppress = info->suppress_expansion;
1404 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1405 wi))
1406 {
1407 tree c, decl;
1408 decl = get_chain_decl (info);
1409 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1410 OMP_CLAUSE_DECL (c) = decl;
1411 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1412 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1413 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1414 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1415 }
1416
1417 save_local_var_chain = info->new_local_var_chain;
1418 info->new_local_var_chain = NULL;
1419
1420 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1421 info, gimple_omp_body_ptr (stmt));
1422
1423 if (info->new_local_var_chain)
1424 declare_vars (info->new_local_var_chain,
1425 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1426 false);
1427 info->new_local_var_chain = save_local_var_chain;
1428 info->suppress_expansion = save_suppress;
1429 break;
1430
1431 case GIMPLE_OMP_TEAMS:
1432 save_suppress = info->suppress_expansion;
1433 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1434 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1435 info, gimple_omp_body_ptr (stmt));
1436 info->suppress_expansion = save_suppress;
1437 break;
1438
1439 case GIMPLE_OMP_SECTION:
1440 case GIMPLE_OMP_MASTER:
1441 case GIMPLE_OMP_TASKGROUP:
1442 case GIMPLE_OMP_ORDERED:
1443 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1444 info, gimple_omp_body_ptr (stmt));
1445 break;
1446
1447 case GIMPLE_BIND:
1448 {
1449 gbind *bind_stmt = as_a <gbind *> (stmt);
1450 if (!optimize && gimple_bind_block (bind_stmt))
1451 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1452
1453 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1454 if (TREE_CODE (var) == NAMELIST_DECL)
1455 {
1456 /* Adjust decls mentioned in NAMELIST_DECL. */
1457 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1458 tree decl;
1459 unsigned int i;
1460
1461 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1462 {
1463 if (TREE_CODE (decl) == VAR_DECL
1464 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1465 continue;
1466 if (decl_function_context (decl) != info->context)
1467 CONSTRUCTOR_ELT (decls, i)->value
1468 = get_nonlocal_debug_decl (info, decl);
1469 }
1470 }
1471
1472 *handled_ops_p = false;
1473 return NULL_TREE;
1474 }
1475 case GIMPLE_COND:
1476 wi->val_only = true;
1477 wi->is_lhs = false;
1478 *handled_ops_p = false;
1479 return NULL_TREE;
1480
1481 default:
1482 /* For every other statement that we are not interested in
1483 handling here, let the walker traverse the operands. */
1484 *handled_ops_p = false;
1485 return NULL_TREE;
1486 }
1487
1488 /* We have handled all of STMT operands, no need to traverse the operands. */
1489 *handled_ops_p = true;
1490 return NULL_TREE;
1491 }
1492
1493
1494 /* A subroutine of convert_local_reference. Create a local variable
1495 in the parent function with DECL_VALUE_EXPR set to reference the
1496 field in FRAME. This is used both for debug info and in OMP
1497 lowering. */
1498
1499 static tree
1500 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1501 {
1502 tree x, new_decl;
1503
1504 tree *slot = &info->var_map->get_or_insert (decl);
1505 if (*slot)
1506 return *slot;
1507
1508 /* Make sure frame_decl gets created. */
1509 (void) get_frame_type (info);
1510 x = info->frame_decl;
1511 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1512
1513 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1514 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1515 DECL_CONTEXT (new_decl) = info->context;
1516 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1517 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1518 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1519 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1520 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1521 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1522 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1523 if ((TREE_CODE (decl) == PARM_DECL
1524 || TREE_CODE (decl) == RESULT_DECL
1525 || TREE_CODE (decl) == VAR_DECL)
1526 && DECL_BY_REFERENCE (decl))
1527 DECL_BY_REFERENCE (new_decl) = 1;
1528
1529 SET_DECL_VALUE_EXPR (new_decl, x);
1530 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1531 *slot = new_decl;
1532
1533 DECL_CHAIN (new_decl) = info->debug_var_chain;
1534 info->debug_var_chain = new_decl;
1535
1536 /* Do not emit debug info twice. */
1537 DECL_IGNORED_P (decl) = 1;
1538
1539 return new_decl;
1540 }
1541
1542
1543 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1544 and PARM_DECLs that were referenced by inner nested functions.
1545 The rewrite will be a structure reference to the local frame variable. */
1546
1547 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1548
1549 static tree
1550 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1551 {
1552 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1553 struct nesting_info *const info = (struct nesting_info *) wi->info;
1554 tree t = *tp, field, x;
1555 bool save_val_only;
1556
1557 *walk_subtrees = 0;
1558 switch (TREE_CODE (t))
1559 {
1560 case VAR_DECL:
1561 /* Non-automatic variables are never processed. */
1562 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1563 break;
1564 /* FALLTHRU */
1565
1566 case PARM_DECL:
1567 if (decl_function_context (t) == info->context)
1568 {
1569 /* If we copied a pointer to the frame, then the original decl
1570 is used unchanged in the parent function. */
1571 if (use_pointer_in_frame (t))
1572 break;
1573
1574 /* No need to transform anything if no child references the
1575 variable. */
1576 field = lookup_field_for_decl (info, t, NO_INSERT);
1577 if (!field)
1578 break;
1579 wi->changed = true;
1580
1581 x = get_local_debug_decl (info, t, field);
1582 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1583 x = get_frame_field (info, info->context, field, &wi->gsi);
1584
1585 if (wi->val_only)
1586 {
1587 if (wi->is_lhs)
1588 x = save_tmp_var (info, x, &wi->gsi);
1589 else
1590 x = init_tmp_var (info, x, &wi->gsi);
1591 }
1592
1593 *tp = x;
1594 }
1595 break;
1596
1597 case ADDR_EXPR:
1598 save_val_only = wi->val_only;
1599 wi->val_only = false;
1600 wi->is_lhs = false;
1601 wi->changed = false;
1602 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1603 wi->val_only = save_val_only;
1604
1605 /* If we converted anything ... */
1606 if (wi->changed)
1607 {
1608 tree save_context;
1609
1610 /* Then the frame decl is now addressable. */
1611 TREE_ADDRESSABLE (info->frame_decl) = 1;
1612
1613 save_context = current_function_decl;
1614 current_function_decl = info->context;
1615 recompute_tree_invariant_for_addr_expr (t);
1616 current_function_decl = save_context;
1617
1618 /* If we are in a context where we only accept values, then
1619 compute the address into a temporary. */
1620 if (save_val_only)
1621 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1622 t, &wi->gsi);
1623 }
1624 break;
1625
1626 case REALPART_EXPR:
1627 case IMAGPART_EXPR:
1628 case COMPONENT_REF:
1629 case ARRAY_REF:
1630 case ARRAY_RANGE_REF:
1631 case BIT_FIELD_REF:
1632 /* Go down this entire nest and just look at the final prefix and
1633 anything that describes the references. Otherwise, we lose track
1634 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1635 save_val_only = wi->val_only;
1636 wi->val_only = true;
1637 wi->is_lhs = false;
1638 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1639 {
1640 if (TREE_CODE (t) == COMPONENT_REF)
1641 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1642 NULL);
1643 else if (TREE_CODE (t) == ARRAY_REF
1644 || TREE_CODE (t) == ARRAY_RANGE_REF)
1645 {
1646 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1647 NULL);
1648 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1649 NULL);
1650 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1651 NULL);
1652 }
1653 }
1654 wi->val_only = false;
1655 walk_tree (tp, convert_local_reference_op, wi, NULL);
1656 wi->val_only = save_val_only;
1657 break;
1658
1659 case MEM_REF:
1660 save_val_only = wi->val_only;
1661 wi->val_only = true;
1662 wi->is_lhs = false;
1663 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1664 wi, NULL);
1665 /* We need to re-fold the MEM_REF as component references as
1666 part of a ADDR_EXPR address are not allowed. But we cannot
1667 fold here, as the chain record type is not yet finalized. */
1668 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1669 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1670 info->mem_refs->add (tp);
1671 wi->val_only = save_val_only;
1672 break;
1673
1674 case VIEW_CONVERT_EXPR:
1675 /* Just request to look at the subtrees, leaving val_only and lhs
1676 untouched. This might actually be for !val_only + lhs, in which
1677 case we don't want to force a replacement by a temporary. */
1678 *walk_subtrees = 1;
1679 break;
1680
1681 default:
1682 if (!IS_TYPE_OR_DECL_P (t))
1683 {
1684 *walk_subtrees = 1;
1685 wi->val_only = true;
1686 wi->is_lhs = false;
1687 }
1688 break;
1689 }
1690
1691 return NULL_TREE;
1692 }
1693
1694 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1695 struct walk_stmt_info *);
1696
1697 /* Helper for convert_local_reference. Convert all the references in
1698 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1699
1700 static bool
1701 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1702 {
1703 struct nesting_info *const info = (struct nesting_info *) wi->info;
1704 bool need_frame = false, need_stmts = false;
1705 tree clause, decl;
1706 int dummy;
1707 bitmap new_suppress;
1708
1709 new_suppress = BITMAP_GGC_ALLOC ();
1710 bitmap_copy (new_suppress, info->suppress_expansion);
1711
1712 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1713 {
1714 switch (OMP_CLAUSE_CODE (clause))
1715 {
1716 case OMP_CLAUSE_REDUCTION:
1717 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1718 need_stmts = true;
1719 goto do_decl_clause;
1720
1721 case OMP_CLAUSE_LASTPRIVATE:
1722 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1723 need_stmts = true;
1724 goto do_decl_clause;
1725
1726 case OMP_CLAUSE_LINEAR:
1727 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1728 need_stmts = true;
1729 wi->val_only = true;
1730 wi->is_lhs = false;
1731 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1732 wi);
1733 goto do_decl_clause;
1734
1735 case OMP_CLAUSE_PRIVATE:
1736 case OMP_CLAUSE_FIRSTPRIVATE:
1737 case OMP_CLAUSE_COPYPRIVATE:
1738 case OMP_CLAUSE_SHARED:
1739 do_decl_clause:
1740 decl = OMP_CLAUSE_DECL (clause);
1741 if (TREE_CODE (decl) == VAR_DECL
1742 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1743 break;
1744 if (decl_function_context (decl) == info->context
1745 && !use_pointer_in_frame (decl))
1746 {
1747 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1748 if (field)
1749 {
1750 bitmap_set_bit (new_suppress, DECL_UID (decl));
1751 OMP_CLAUSE_DECL (clause)
1752 = get_local_debug_decl (info, decl, field);
1753 need_frame = true;
1754 }
1755 }
1756 break;
1757
1758 case OMP_CLAUSE_SCHEDULE:
1759 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1760 break;
1761 /* FALLTHRU */
1762 case OMP_CLAUSE_FINAL:
1763 case OMP_CLAUSE_IF:
1764 case OMP_CLAUSE_NUM_THREADS:
1765 case OMP_CLAUSE_DEPEND:
1766 case OMP_CLAUSE_DEVICE:
1767 case OMP_CLAUSE_NUM_TEAMS:
1768 case OMP_CLAUSE_THREAD_LIMIT:
1769 case OMP_CLAUSE_SAFELEN:
1770 case OMP_CLAUSE__CILK_FOR_COUNT_:
1771 wi->val_only = true;
1772 wi->is_lhs = false;
1773 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1774 wi);
1775 break;
1776
1777 case OMP_CLAUSE_DIST_SCHEDULE:
1778 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1779 {
1780 wi->val_only = true;
1781 wi->is_lhs = false;
1782 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1783 &dummy, wi);
1784 }
1785 break;
1786
1787 case OMP_CLAUSE_MAP:
1788 case OMP_CLAUSE_TO:
1789 case OMP_CLAUSE_FROM:
1790 if (OMP_CLAUSE_SIZE (clause))
1791 {
1792 wi->val_only = true;
1793 wi->is_lhs = false;
1794 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1795 &dummy, wi);
1796 }
1797 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1798 goto do_decl_clause;
1799 wi->val_only = true;
1800 wi->is_lhs = false;
1801 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1802 wi, NULL);
1803 break;
1804
1805 case OMP_CLAUSE_ALIGNED:
1806 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1807 {
1808 wi->val_only = true;
1809 wi->is_lhs = false;
1810 convert_local_reference_op
1811 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1812 }
1813 /* Like do_decl_clause, but don't add any suppression. */
1814 decl = OMP_CLAUSE_DECL (clause);
1815 if (TREE_CODE (decl) == VAR_DECL
1816 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1817 break;
1818 if (decl_function_context (decl) == info->context
1819 && !use_pointer_in_frame (decl))
1820 {
1821 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1822 if (field)
1823 {
1824 OMP_CLAUSE_DECL (clause)
1825 = get_local_debug_decl (info, decl, field);
1826 need_frame = true;
1827 }
1828 }
1829 break;
1830
1831 case OMP_CLAUSE_NOWAIT:
1832 case OMP_CLAUSE_ORDERED:
1833 case OMP_CLAUSE_DEFAULT:
1834 case OMP_CLAUSE_COPYIN:
1835 case OMP_CLAUSE_COLLAPSE:
1836 case OMP_CLAUSE_UNTIED:
1837 case OMP_CLAUSE_MERGEABLE:
1838 case OMP_CLAUSE_PROC_BIND:
1839 break;
1840
1841 default:
1842 gcc_unreachable ();
1843 }
1844 }
1845
1846 info->suppress_expansion = new_suppress;
1847
1848 if (need_stmts)
1849 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1850 switch (OMP_CLAUSE_CODE (clause))
1851 {
1852 case OMP_CLAUSE_REDUCTION:
1853 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1854 {
1855 tree old_context
1856 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1857 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1858 = info->context;
1859 walk_body (convert_local_reference_stmt,
1860 convert_local_reference_op, info,
1861 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1862 walk_body (convert_local_reference_stmt,
1863 convert_local_reference_op, info,
1864 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1865 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1866 = old_context;
1867 }
1868 break;
1869
1870 case OMP_CLAUSE_LASTPRIVATE:
1871 walk_body (convert_local_reference_stmt,
1872 convert_local_reference_op, info,
1873 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1874 break;
1875
1876 case OMP_CLAUSE_LINEAR:
1877 walk_body (convert_local_reference_stmt,
1878 convert_local_reference_op, info,
1879 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1880 break;
1881
1882 default:
1883 break;
1884 }
1885
1886 return need_frame;
1887 }
1888
1889
1890 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1891 and PARM_DECLs that were referenced by inner nested functions.
1892 The rewrite will be a structure reference to the local frame variable. */
1893
1894 static tree
1895 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1896 struct walk_stmt_info *wi)
1897 {
1898 struct nesting_info *info = (struct nesting_info *) wi->info;
1899 tree save_local_var_chain;
1900 bitmap save_suppress;
1901 gimple stmt = gsi_stmt (*gsi);
1902
1903 switch (gimple_code (stmt))
1904 {
1905 case GIMPLE_OMP_PARALLEL:
1906 case GIMPLE_OMP_TASK:
1907 save_suppress = info->suppress_expansion;
1908 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1909 wi))
1910 {
1911 tree c;
1912 (void) get_frame_type (info);
1913 c = build_omp_clause (gimple_location (stmt),
1914 OMP_CLAUSE_SHARED);
1915 OMP_CLAUSE_DECL (c) = info->frame_decl;
1916 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1917 gimple_omp_taskreg_set_clauses (stmt, c);
1918 }
1919
1920 save_local_var_chain = info->new_local_var_chain;
1921 info->new_local_var_chain = NULL;
1922
1923 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1924 gimple_omp_body_ptr (stmt));
1925
1926 if (info->new_local_var_chain)
1927 declare_vars (info->new_local_var_chain,
1928 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1929 info->new_local_var_chain = save_local_var_chain;
1930 info->suppress_expansion = save_suppress;
1931 break;
1932
1933 case GIMPLE_OMP_FOR:
1934 save_suppress = info->suppress_expansion;
1935 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1936 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1937 convert_local_reference_stmt,
1938 convert_local_reference_op, info);
1939 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1940 info, gimple_omp_body_ptr (stmt));
1941 info->suppress_expansion = save_suppress;
1942 break;
1943
1944 case GIMPLE_OMP_SECTIONS:
1945 save_suppress = info->suppress_expansion;
1946 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1947 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1948 info, gimple_omp_body_ptr (stmt));
1949 info->suppress_expansion = save_suppress;
1950 break;
1951
1952 case GIMPLE_OMP_SINGLE:
1953 save_suppress = info->suppress_expansion;
1954 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1955 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1956 info, gimple_omp_body_ptr (stmt));
1957 info->suppress_expansion = save_suppress;
1958 break;
1959
1960 case GIMPLE_OMP_TARGET:
1961 if (!is_gimple_omp_offloaded (stmt))
1962 {
1963 save_suppress = info->suppress_expansion;
1964 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1965 info->suppress_expansion = save_suppress;
1966 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1967 info, gimple_omp_body_ptr (stmt));
1968 break;
1969 }
1970 save_suppress = info->suppress_expansion;
1971 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1972 {
1973 tree c;
1974 (void) get_frame_type (info);
1975 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1976 OMP_CLAUSE_DECL (c) = info->frame_decl;
1977 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
1978 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
1979 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1980 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1981 }
1982
1983 save_local_var_chain = info->new_local_var_chain;
1984 info->new_local_var_chain = NULL;
1985
1986 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1987 gimple_omp_body_ptr (stmt));
1988
1989 if (info->new_local_var_chain)
1990 declare_vars (info->new_local_var_chain,
1991 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1992 info->new_local_var_chain = save_local_var_chain;
1993 info->suppress_expansion = save_suppress;
1994 break;
1995
1996 case GIMPLE_OMP_TEAMS:
1997 save_suppress = info->suppress_expansion;
1998 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1999 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2000 info, gimple_omp_body_ptr (stmt));
2001 info->suppress_expansion = save_suppress;
2002 break;
2003
2004 case GIMPLE_OMP_SECTION:
2005 case GIMPLE_OMP_MASTER:
2006 case GIMPLE_OMP_TASKGROUP:
2007 case GIMPLE_OMP_ORDERED:
2008 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2009 info, gimple_omp_body_ptr (stmt));
2010 break;
2011
2012 case GIMPLE_COND:
2013 wi->val_only = true;
2014 wi->is_lhs = false;
2015 *handled_ops_p = false;
2016 return NULL_TREE;
2017
2018 case GIMPLE_ASSIGN:
2019 if (gimple_clobber_p (stmt))
2020 {
2021 tree lhs = gimple_assign_lhs (stmt);
2022 if (!use_pointer_in_frame (lhs)
2023 && lookup_field_for_decl (info, lhs, NO_INSERT))
2024 {
2025 gsi_replace (gsi, gimple_build_nop (), true);
2026 break;
2027 }
2028 }
2029 *handled_ops_p = false;
2030 return NULL_TREE;
2031
2032 case GIMPLE_BIND:
2033 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2034 var;
2035 var = DECL_CHAIN (var))
2036 if (TREE_CODE (var) == NAMELIST_DECL)
2037 {
2038 /* Adjust decls mentioned in NAMELIST_DECL. */
2039 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2040 tree decl;
2041 unsigned int i;
2042
2043 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2044 {
2045 if (TREE_CODE (decl) == VAR_DECL
2046 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2047 continue;
2048 if (decl_function_context (decl) == info->context
2049 && !use_pointer_in_frame (decl))
2050 {
2051 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2052 if (field)
2053 {
2054 CONSTRUCTOR_ELT (decls, i)->value
2055 = get_local_debug_decl (info, decl, field);
2056 }
2057 }
2058 }
2059 }
2060
2061 *handled_ops_p = false;
2062 return NULL_TREE;
2063
2064 default:
2065 /* For every other statement that we are not interested in
2066 handling here, let the walker traverse the operands. */
2067 *handled_ops_p = false;
2068 return NULL_TREE;
2069 }
2070
2071 /* Indicate that we have handled all the operands ourselves. */
2072 *handled_ops_p = true;
2073 return NULL_TREE;
2074 }
2075
2076
2077 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2078 that reference labels from outer functions. The rewrite will be a
2079 call to __builtin_nonlocal_goto. */
2080
2081 static tree
2082 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2083 struct walk_stmt_info *wi)
2084 {
2085 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2086 tree label, new_label, target_context, x, field;
2087 gcall *call;
2088 gimple stmt = gsi_stmt (*gsi);
2089
2090 if (gimple_code (stmt) != GIMPLE_GOTO)
2091 {
2092 *handled_ops_p = false;
2093 return NULL_TREE;
2094 }
2095
2096 label = gimple_goto_dest (stmt);
2097 if (TREE_CODE (label) != LABEL_DECL)
2098 {
2099 *handled_ops_p = false;
2100 return NULL_TREE;
2101 }
2102
2103 target_context = decl_function_context (label);
2104 if (target_context == info->context)
2105 {
2106 *handled_ops_p = false;
2107 return NULL_TREE;
2108 }
2109
2110 for (i = info->outer; target_context != i->context; i = i->outer)
2111 continue;
2112
2113 /* The original user label may also be use for a normal goto, therefore
2114 we must create a new label that will actually receive the abnormal
2115 control transfer. This new label will be marked LABEL_NONLOCAL; this
2116 mark will trigger proper behavior in the cfg, as well as cause the
2117 (hairy target-specific) non-local goto receiver code to be generated
2118 when we expand rtl. Enter this association into var_map so that we
2119 can insert the new label into the IL during a second pass. */
2120 tree *slot = &i->var_map->get_or_insert (label);
2121 if (*slot == NULL)
2122 {
2123 new_label = create_artificial_label (UNKNOWN_LOCATION);
2124 DECL_NONLOCAL (new_label) = 1;
2125 *slot = new_label;
2126 }
2127 else
2128 new_label = *slot;
2129
2130 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2131 field = get_nl_goto_field (i);
2132 x = get_frame_field (info, target_context, field, gsi);
2133 x = build_addr (x, target_context);
2134 x = gsi_gimplify_val (info, x, gsi);
2135 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2136 2, build_addr (new_label, target_context), x);
2137 gsi_replace (gsi, call, false);
2138
2139 /* We have handled all of STMT's operands, no need to keep going. */
2140 *handled_ops_p = true;
2141 return NULL_TREE;
2142 }
2143
2144
2145 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2146 are referenced via nonlocal goto from a nested function. The rewrite
2147 will involve installing a newly generated DECL_NONLOCAL label, and
2148 (potentially) a branch around the rtl gunk that is assumed to be
2149 attached to such a label. */
2150
2151 static tree
2152 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2153 struct walk_stmt_info *wi)
2154 {
2155 struct nesting_info *const info = (struct nesting_info *) wi->info;
2156 tree label, new_label;
2157 gimple_stmt_iterator tmp_gsi;
2158 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2159
2160 if (!stmt)
2161 {
2162 *handled_ops_p = false;
2163 return NULL_TREE;
2164 }
2165
2166 label = gimple_label_label (stmt);
2167
2168 tree *slot = info->var_map->get (label);
2169 if (!slot)
2170 {
2171 *handled_ops_p = false;
2172 return NULL_TREE;
2173 }
2174
2175 /* If there's any possibility that the previous statement falls through,
2176 then we must branch around the new non-local label. */
2177 tmp_gsi = wi->gsi;
2178 gsi_prev (&tmp_gsi);
2179 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2180 {
2181 gimple stmt = gimple_build_goto (label);
2182 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2183 }
2184
2185 new_label = (tree) *slot;
2186 stmt = gimple_build_label (new_label);
2187 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2188
2189 *handled_ops_p = true;
2190 return NULL_TREE;
2191 }
2192
2193
2194 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2195 of nested functions that require the use of trampolines. The rewrite
2196 will involve a reference a trampoline generated for the occasion. */
2197
2198 static tree
2199 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2200 {
2201 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2202 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2203 tree t = *tp, decl, target_context, x, builtin;
2204 gcall *call;
2205
2206 *walk_subtrees = 0;
2207 switch (TREE_CODE (t))
2208 {
2209 case ADDR_EXPR:
2210 /* Build
2211 T.1 = &CHAIN->tramp;
2212 T.2 = __builtin_adjust_trampoline (T.1);
2213 T.3 = (func_type)T.2;
2214 */
2215
2216 decl = TREE_OPERAND (t, 0);
2217 if (TREE_CODE (decl) != FUNCTION_DECL)
2218 break;
2219
2220 /* Only need to process nested functions. */
2221 target_context = decl_function_context (decl);
2222 if (!target_context)
2223 break;
2224
2225 /* If the nested function doesn't use a static chain, then
2226 it doesn't need a trampoline. */
2227 if (!DECL_STATIC_CHAIN (decl))
2228 break;
2229
2230 /* If we don't want a trampoline, then don't build one. */
2231 if (TREE_NO_TRAMPOLINE (t))
2232 break;
2233
2234 /* Lookup the immediate parent of the callee, as that's where
2235 we need to insert the trampoline. */
2236 for (i = info; i->context != target_context; i = i->outer)
2237 continue;
2238 x = lookup_tramp_for_decl (i, decl, INSERT);
2239
2240 /* Compute the address of the field holding the trampoline. */
2241 x = get_frame_field (info, target_context, x, &wi->gsi);
2242 x = build_addr (x, target_context);
2243 x = gsi_gimplify_val (info, x, &wi->gsi);
2244
2245 /* Do machine-specific ugliness. Normally this will involve
2246 computing extra alignment, but it can really be anything. */
2247 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2248 call = gimple_build_call (builtin, 1, x);
2249 x = init_tmp_var_with_call (info, &wi->gsi, call);
2250
2251 /* Cast back to the proper function type. */
2252 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2253 x = init_tmp_var (info, x, &wi->gsi);
2254
2255 *tp = x;
2256 break;
2257
2258 default:
2259 if (!IS_TYPE_OR_DECL_P (t))
2260 *walk_subtrees = 1;
2261 break;
2262 }
2263
2264 return NULL_TREE;
2265 }
2266
2267
2268 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2269 to addresses of nested functions that require the use of
2270 trampolines. The rewrite will involve a reference a trampoline
2271 generated for the occasion. */
2272
2273 static tree
2274 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2275 struct walk_stmt_info *wi)
2276 {
2277 struct nesting_info *info = (struct nesting_info *) wi->info;
2278 gimple stmt = gsi_stmt (*gsi);
2279
2280 switch (gimple_code (stmt))
2281 {
2282 case GIMPLE_CALL:
2283 {
2284 /* Only walk call arguments, lest we generate trampolines for
2285 direct calls. */
2286 unsigned long i, nargs = gimple_call_num_args (stmt);
2287 for (i = 0; i < nargs; i++)
2288 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2289 wi, NULL);
2290 break;
2291 }
2292
2293 case GIMPLE_OMP_TARGET:
2294 if (!is_gimple_omp_offloaded (stmt))
2295 {
2296 *handled_ops_p = false;
2297 return NULL_TREE;
2298 }
2299 /* FALLTHRU */
2300 case GIMPLE_OMP_PARALLEL:
2301 case GIMPLE_OMP_TASK:
2302 {
2303 tree save_local_var_chain;
2304 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2305 save_local_var_chain = info->new_local_var_chain;
2306 info->new_local_var_chain = NULL;
2307 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2308 info, gimple_omp_body_ptr (stmt));
2309 if (info->new_local_var_chain)
2310 declare_vars (info->new_local_var_chain,
2311 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2312 false);
2313 info->new_local_var_chain = save_local_var_chain;
2314 }
2315 break;
2316
2317 default:
2318 *handled_ops_p = false;
2319 return NULL_TREE;
2320 }
2321
2322 *handled_ops_p = true;
2323 return NULL_TREE;
2324 }
2325
2326
2327
2328 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2329 that reference nested functions to make sure that the static chain
2330 is set up properly for the call. */
2331
2332 static tree
2333 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2334 struct walk_stmt_info *wi)
2335 {
2336 struct nesting_info *const info = (struct nesting_info *) wi->info;
2337 tree decl, target_context;
2338 char save_static_chain_added;
2339 int i;
2340 gimple stmt = gsi_stmt (*gsi);
2341
2342 switch (gimple_code (stmt))
2343 {
2344 case GIMPLE_CALL:
2345 if (gimple_call_chain (stmt))
2346 break;
2347 decl = gimple_call_fndecl (stmt);
2348 if (!decl)
2349 break;
2350 target_context = decl_function_context (decl);
2351 if (target_context && DECL_STATIC_CHAIN (decl))
2352 {
2353 gimple_call_set_chain (as_a <gcall *> (stmt),
2354 get_static_chain (info, target_context,
2355 &wi->gsi));
2356 info->static_chain_added |= (1 << (info->context != target_context));
2357 }
2358 break;
2359
2360 case GIMPLE_OMP_PARALLEL:
2361 case GIMPLE_OMP_TASK:
2362 save_static_chain_added = info->static_chain_added;
2363 info->static_chain_added = 0;
2364 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2365 for (i = 0; i < 2; i++)
2366 {
2367 tree c, decl;
2368 if ((info->static_chain_added & (1 << i)) == 0)
2369 continue;
2370 decl = i ? get_chain_decl (info) : info->frame_decl;
2371 /* Don't add CHAIN.* or FRAME.* twice. */
2372 for (c = gimple_omp_taskreg_clauses (stmt);
2373 c;
2374 c = OMP_CLAUSE_CHAIN (c))
2375 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2376 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2377 && OMP_CLAUSE_DECL (c) == decl)
2378 break;
2379 if (c == NULL)
2380 {
2381 c = build_omp_clause (gimple_location (stmt),
2382 i ? OMP_CLAUSE_FIRSTPRIVATE
2383 : OMP_CLAUSE_SHARED);
2384 OMP_CLAUSE_DECL (c) = decl;
2385 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2386 gimple_omp_taskreg_set_clauses (stmt, c);
2387 }
2388 }
2389 info->static_chain_added |= save_static_chain_added;
2390 break;
2391
2392 case GIMPLE_OMP_TARGET:
2393 if (!is_gimple_omp_offloaded (stmt))
2394 {
2395 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2396 break;
2397 }
2398 save_static_chain_added = info->static_chain_added;
2399 info->static_chain_added = 0;
2400 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2401 for (i = 0; i < 2; i++)
2402 {
2403 tree c, decl;
2404 if ((info->static_chain_added & (1 << i)) == 0)
2405 continue;
2406 decl = i ? get_chain_decl (info) : info->frame_decl;
2407 /* Don't add CHAIN.* or FRAME.* twice. */
2408 for (c = gimple_omp_target_clauses (stmt);
2409 c;
2410 c = OMP_CLAUSE_CHAIN (c))
2411 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2412 && OMP_CLAUSE_DECL (c) == decl)
2413 break;
2414 if (c == NULL)
2415 {
2416 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2417 OMP_CLAUSE_DECL (c) = decl;
2418 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2419 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2420 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2421 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2422 c);
2423 }
2424 }
2425 info->static_chain_added |= save_static_chain_added;
2426 break;
2427
2428 case GIMPLE_OMP_FOR:
2429 walk_body (convert_gimple_call, NULL, info,
2430 gimple_omp_for_pre_body_ptr (stmt));
2431 /* FALLTHRU */
2432 case GIMPLE_OMP_SECTIONS:
2433 case GIMPLE_OMP_SECTION:
2434 case GIMPLE_OMP_SINGLE:
2435 case GIMPLE_OMP_TEAMS:
2436 case GIMPLE_OMP_MASTER:
2437 case GIMPLE_OMP_TASKGROUP:
2438 case GIMPLE_OMP_ORDERED:
2439 case GIMPLE_OMP_CRITICAL:
2440 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2441 break;
2442
2443 default:
2444 /* Keep looking for other operands. */
2445 *handled_ops_p = false;
2446 return NULL_TREE;
2447 }
2448
2449 *handled_ops_p = true;
2450 return NULL_TREE;
2451 }
2452
2453 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2454 call expressions. At the same time, determine if a nested function
2455 actually uses its static chain; if not, remember that. */
2456
2457 static void
2458 convert_all_function_calls (struct nesting_info *root)
2459 {
2460 unsigned int chain_count = 0, old_chain_count, iter_count;
2461 struct nesting_info *n;
2462
2463 /* First, optimistically clear static_chain for all decls that haven't
2464 used the static chain already for variable access. But always create
2465 it if not optimizing. This makes it possible to reconstruct the static
2466 nesting tree at run time and thus to resolve up-level references from
2467 within the debugger. */
2468 FOR_EACH_NEST_INFO (n, root)
2469 {
2470 tree decl = n->context;
2471 if (!optimize)
2472 {
2473 if (n->inner)
2474 (void) get_frame_type (n);
2475 if (n->outer)
2476 (void) get_chain_decl (n);
2477 }
2478 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2479 {
2480 DECL_STATIC_CHAIN (decl) = 0;
2481 if (dump_file && (dump_flags & TDF_DETAILS))
2482 fprintf (dump_file, "Guessing no static-chain for %s\n",
2483 lang_hooks.decl_printable_name (decl, 2));
2484 }
2485 else
2486 DECL_STATIC_CHAIN (decl) = 1;
2487 chain_count += DECL_STATIC_CHAIN (decl);
2488 }
2489
2490 /* Walk the functions and perform transformations. Note that these
2491 transformations can induce new uses of the static chain, which in turn
2492 require re-examining all users of the decl. */
2493 /* ??? It would make sense to try to use the call graph to speed this up,
2494 but the call graph hasn't really been built yet. Even if it did, we
2495 would still need to iterate in this loop since address-of references
2496 wouldn't show up in the callgraph anyway. */
2497 iter_count = 0;
2498 do
2499 {
2500 old_chain_count = chain_count;
2501 chain_count = 0;
2502 iter_count++;
2503
2504 if (dump_file && (dump_flags & TDF_DETAILS))
2505 fputc ('\n', dump_file);
2506
2507 FOR_EACH_NEST_INFO (n, root)
2508 {
2509 tree decl = n->context;
2510 walk_function (convert_tramp_reference_stmt,
2511 convert_tramp_reference_op, n);
2512 walk_function (convert_gimple_call, NULL, n);
2513 chain_count += DECL_STATIC_CHAIN (decl);
2514 }
2515 }
2516 while (chain_count != old_chain_count);
2517
2518 if (dump_file && (dump_flags & TDF_DETAILS))
2519 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2520 iter_count);
2521 }
2522
2523 struct nesting_copy_body_data
2524 {
2525 copy_body_data cb;
2526 struct nesting_info *root;
2527 };
2528
2529 /* A helper subroutine for debug_var_chain type remapping. */
2530
2531 static tree
2532 nesting_copy_decl (tree decl, copy_body_data *id)
2533 {
2534 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2535 tree *slot = nid->root->var_map->get (decl);
2536
2537 if (slot)
2538 return (tree) *slot;
2539
2540 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2541 {
2542 tree new_decl = copy_decl_no_change (decl, id);
2543 DECL_ORIGINAL_TYPE (new_decl)
2544 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2545 return new_decl;
2546 }
2547
2548 if (TREE_CODE (decl) == VAR_DECL
2549 || TREE_CODE (decl) == PARM_DECL
2550 || TREE_CODE (decl) == RESULT_DECL)
2551 return decl;
2552
2553 return copy_decl_no_change (decl, id);
2554 }
2555
2556 /* A helper function for remap_vla_decls. See if *TP contains
2557 some remapped variables. */
2558
2559 static tree
2560 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2561 {
2562 struct nesting_info *root = (struct nesting_info *) data;
2563 tree t = *tp;
2564
2565 if (DECL_P (t))
2566 {
2567 *walk_subtrees = 0;
2568 tree *slot = root->var_map->get (t);
2569
2570 if (slot)
2571 return *slot;
2572 }
2573 return NULL;
2574 }
2575
2576 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2577 involved. */
2578
2579 static void
2580 remap_vla_decls (tree block, struct nesting_info *root)
2581 {
2582 tree var, subblock, val, type;
2583 struct nesting_copy_body_data id;
2584
2585 for (subblock = BLOCK_SUBBLOCKS (block);
2586 subblock;
2587 subblock = BLOCK_CHAIN (subblock))
2588 remap_vla_decls (subblock, root);
2589
2590 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2591 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2592 {
2593 val = DECL_VALUE_EXPR (var);
2594 type = TREE_TYPE (var);
2595
2596 if (!(TREE_CODE (val) == INDIRECT_REF
2597 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2598 && variably_modified_type_p (type, NULL)))
2599 continue;
2600
2601 if (root->var_map->get (TREE_OPERAND (val, 0))
2602 || walk_tree (&type, contains_remapped_vars, root, NULL))
2603 break;
2604 }
2605
2606 if (var == NULL_TREE)
2607 return;
2608
2609 memset (&id, 0, sizeof (id));
2610 id.cb.copy_decl = nesting_copy_decl;
2611 id.cb.decl_map = new hash_map<tree, tree>;
2612 id.root = root;
2613
2614 for (; var; var = DECL_CHAIN (var))
2615 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2616 {
2617 struct nesting_info *i;
2618 tree newt, context;
2619
2620 val = DECL_VALUE_EXPR (var);
2621 type = TREE_TYPE (var);
2622
2623 if (!(TREE_CODE (val) == INDIRECT_REF
2624 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2625 && variably_modified_type_p (type, NULL)))
2626 continue;
2627
2628 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2629 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2630 continue;
2631
2632 context = decl_function_context (var);
2633 for (i = root; i; i = i->outer)
2634 if (i->context == context)
2635 break;
2636
2637 if (i == NULL)
2638 continue;
2639
2640 /* Fully expand value expressions. This avoids having debug variables
2641 only referenced from them and that can be swept during GC. */
2642 if (slot)
2643 {
2644 tree t = (tree) *slot;
2645 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2646 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2647 }
2648
2649 id.cb.src_fn = i->context;
2650 id.cb.dst_fn = i->context;
2651 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2652
2653 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2654 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2655 {
2656 newt = TREE_TYPE (newt);
2657 type = TREE_TYPE (type);
2658 }
2659 if (TYPE_NAME (newt)
2660 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2661 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2662 && newt != type
2663 && TYPE_NAME (newt) == TYPE_NAME (type))
2664 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2665
2666 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2667 if (val != DECL_VALUE_EXPR (var))
2668 SET_DECL_VALUE_EXPR (var, val);
2669 }
2670
2671 delete id.cb.decl_map;
2672 }
2673
2674 /* Fold the MEM_REF *E. */
2675 bool
2676 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2677 {
2678 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2679 *ref_p = fold (*ref_p);
2680 return true;
2681 }
2682
2683 /* Do "everything else" to clean up or complete state collected by the
2684 various walking passes -- lay out the types and decls, generate code
2685 to initialize the frame decl, store critical expressions in the
2686 struct function for rtl to find. */
2687
2688 static void
2689 finalize_nesting_tree_1 (struct nesting_info *root)
2690 {
2691 gimple_seq stmt_list;
2692 gimple stmt;
2693 tree context = root->context;
2694 struct function *sf;
2695
2696 stmt_list = NULL;
2697
2698 /* If we created a non-local frame type or decl, we need to lay them
2699 out at this time. */
2700 if (root->frame_type)
2701 {
2702 /* In some cases the frame type will trigger the -Wpadded warning.
2703 This is not helpful; suppress it. */
2704 int save_warn_padded = warn_padded;
2705 tree *adjust;
2706
2707 warn_padded = 0;
2708 layout_type (root->frame_type);
2709 warn_padded = save_warn_padded;
2710 layout_decl (root->frame_decl, 0);
2711
2712 /* Remove root->frame_decl from root->new_local_var_chain, so
2713 that we can declare it also in the lexical blocks, which
2714 helps ensure virtual regs that end up appearing in its RTL
2715 expression get substituted in instantiate_virtual_regs(). */
2716 for (adjust = &root->new_local_var_chain;
2717 *adjust != root->frame_decl;
2718 adjust = &DECL_CHAIN (*adjust))
2719 gcc_assert (DECL_CHAIN (*adjust));
2720 *adjust = DECL_CHAIN (*adjust);
2721
2722 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2723 declare_vars (root->frame_decl,
2724 gimple_seq_first_stmt (gimple_body (context)), true);
2725 }
2726
2727 /* If any parameters were referenced non-locally, then we need to
2728 insert a copy. Likewise, if any variables were referenced by
2729 pointer, we need to initialize the address. */
2730 if (root->any_parm_remapped)
2731 {
2732 tree p;
2733 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2734 {
2735 tree field, x, y;
2736
2737 field = lookup_field_for_decl (root, p, NO_INSERT);
2738 if (!field)
2739 continue;
2740
2741 if (use_pointer_in_frame (p))
2742 x = build_addr (p, context);
2743 else
2744 x = p;
2745
2746 /* If the assignment is from a non-register the stmt is
2747 not valid gimple. Make it so by using a temporary instead. */
2748 if (!is_gimple_reg (x)
2749 && is_gimple_reg_type (TREE_TYPE (x)))
2750 {
2751 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2752 x = init_tmp_var (root, x, &gsi);
2753 }
2754
2755 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2756 root->frame_decl, field, NULL_TREE);
2757 stmt = gimple_build_assign (y, x);
2758 gimple_seq_add_stmt (&stmt_list, stmt);
2759 }
2760 }
2761
2762 /* If a chain_field was created, then it needs to be initialized
2763 from chain_decl. */
2764 if (root->chain_field)
2765 {
2766 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2767 root->frame_decl, root->chain_field, NULL_TREE);
2768 stmt = gimple_build_assign (x, get_chain_decl (root));
2769 gimple_seq_add_stmt (&stmt_list, stmt);
2770 }
2771
2772 /* If trampolines were created, then we need to initialize them. */
2773 if (root->any_tramp_created)
2774 {
2775 struct nesting_info *i;
2776 for (i = root->inner; i ; i = i->next)
2777 {
2778 tree arg1, arg2, arg3, x, field;
2779
2780 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2781 if (!field)
2782 continue;
2783
2784 gcc_assert (DECL_STATIC_CHAIN (i->context));
2785 arg3 = build_addr (root->frame_decl, context);
2786
2787 arg2 = build_addr (i->context, context);
2788
2789 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2790 root->frame_decl, field, NULL_TREE);
2791 arg1 = build_addr (x, context);
2792
2793 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2794 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2795 gimple_seq_add_stmt (&stmt_list, stmt);
2796 }
2797 }
2798
2799 /* If we created initialization statements, insert them. */
2800 if (stmt_list)
2801 {
2802 gbind *bind;
2803 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2804 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
2805 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2806 gimple_bind_set_body (bind, stmt_list);
2807 }
2808
2809 /* If a chain_decl was created, then it needs to be registered with
2810 struct function so that it gets initialized from the static chain
2811 register at the beginning of the function. */
2812 sf = DECL_STRUCT_FUNCTION (root->context);
2813 sf->static_chain_decl = root->chain_decl;
2814
2815 /* Similarly for the non-local goto save area. */
2816 if (root->nl_goto_field)
2817 {
2818 sf->nonlocal_goto_save_area
2819 = get_frame_field (root, context, root->nl_goto_field, NULL);
2820 sf->has_nonlocal_label = 1;
2821 }
2822
2823 /* Make sure all new local variables get inserted into the
2824 proper BIND_EXPR. */
2825 if (root->new_local_var_chain)
2826 declare_vars (root->new_local_var_chain,
2827 gimple_seq_first_stmt (gimple_body (root->context)),
2828 false);
2829
2830 if (root->debug_var_chain)
2831 {
2832 tree debug_var;
2833 gbind *scope;
2834
2835 remap_vla_decls (DECL_INITIAL (root->context), root);
2836
2837 for (debug_var = root->debug_var_chain; debug_var;
2838 debug_var = DECL_CHAIN (debug_var))
2839 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2840 break;
2841
2842 /* If there are any debug decls with variable length types,
2843 remap those types using other debug_var_chain variables. */
2844 if (debug_var)
2845 {
2846 struct nesting_copy_body_data id;
2847
2848 memset (&id, 0, sizeof (id));
2849 id.cb.copy_decl = nesting_copy_decl;
2850 id.cb.decl_map = new hash_map<tree, tree>;
2851 id.root = root;
2852
2853 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2854 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2855 {
2856 tree type = TREE_TYPE (debug_var);
2857 tree newt, t = type;
2858 struct nesting_info *i;
2859
2860 for (i = root; i; i = i->outer)
2861 if (variably_modified_type_p (type, i->context))
2862 break;
2863
2864 if (i == NULL)
2865 continue;
2866
2867 id.cb.src_fn = i->context;
2868 id.cb.dst_fn = i->context;
2869 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2870
2871 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2872 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2873 {
2874 newt = TREE_TYPE (newt);
2875 t = TREE_TYPE (t);
2876 }
2877 if (TYPE_NAME (newt)
2878 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2879 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2880 && newt != t
2881 && TYPE_NAME (newt) == TYPE_NAME (t))
2882 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2883 }
2884
2885 delete id.cb.decl_map;
2886 }
2887
2888 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
2889 if (gimple_bind_block (scope))
2890 declare_vars (root->debug_var_chain, scope, true);
2891 else
2892 BLOCK_VARS (DECL_INITIAL (root->context))
2893 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2894 root->debug_var_chain);
2895 }
2896
2897 /* Fold the rewritten MEM_REF trees. */
2898 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2899
2900 /* Dump the translated tree function. */
2901 if (dump_file)
2902 {
2903 fputs ("\n\n", dump_file);
2904 dump_function_to_file (root->context, dump_file, dump_flags);
2905 }
2906 }
2907
2908 static void
2909 finalize_nesting_tree (struct nesting_info *root)
2910 {
2911 struct nesting_info *n;
2912 FOR_EACH_NEST_INFO (n, root)
2913 finalize_nesting_tree_1 (n);
2914 }
2915
2916 /* Unnest the nodes and pass them to cgraph. */
2917
2918 static void
2919 unnest_nesting_tree_1 (struct nesting_info *root)
2920 {
2921 struct cgraph_node *node = cgraph_node::get (root->context);
2922
2923 /* For nested functions update the cgraph to reflect unnesting.
2924 We also delay finalizing of these functions up to this point. */
2925 if (node->origin)
2926 {
2927 node->unnest ();
2928 cgraph_node::finalize_function (root->context, true);
2929 }
2930 }
2931
2932 static void
2933 unnest_nesting_tree (struct nesting_info *root)
2934 {
2935 struct nesting_info *n;
2936 FOR_EACH_NEST_INFO (n, root)
2937 unnest_nesting_tree_1 (n);
2938 }
2939
2940 /* Free the data structures allocated during this pass. */
2941
2942 static void
2943 free_nesting_tree (struct nesting_info *root)
2944 {
2945 struct nesting_info *node, *next;
2946
2947 node = iter_nestinfo_start (root);
2948 do
2949 {
2950 next = iter_nestinfo_next (node);
2951 delete node->var_map;
2952 delete node->field_map;
2953 delete node->mem_refs;
2954 free (node);
2955 node = next;
2956 }
2957 while (node);
2958 }
2959
2960 /* Gimplify a function and all its nested functions. */
2961 static void
2962 gimplify_all_functions (struct cgraph_node *root)
2963 {
2964 struct cgraph_node *iter;
2965 if (!gimple_body (root->decl))
2966 gimplify_function_tree (root->decl);
2967 for (iter = root->nested; iter; iter = iter->next_nested)
2968 gimplify_all_functions (iter);
2969 }
2970
2971 /* Main entry point for this pass. Process FNDECL and all of its nested
2972 subroutines and turn them into something less tightly bound. */
2973
2974 void
2975 lower_nested_functions (tree fndecl)
2976 {
2977 struct cgraph_node *cgn;
2978 struct nesting_info *root;
2979
2980 /* If there are no nested functions, there's nothing to do. */
2981 cgn = cgraph_node::get (fndecl);
2982 if (!cgn->nested)
2983 return;
2984
2985 gimplify_all_functions (cgn);
2986
2987 dump_file = dump_begin (TDI_nested, &dump_flags);
2988 if (dump_file)
2989 fprintf (dump_file, "\n;; Function %s\n\n",
2990 lang_hooks.decl_printable_name (fndecl, 2));
2991
2992 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2993 root = create_nesting_tree (cgn);
2994
2995 walk_all_functions (convert_nonlocal_reference_stmt,
2996 convert_nonlocal_reference_op,
2997 root);
2998 walk_all_functions (convert_local_reference_stmt,
2999 convert_local_reference_op,
3000 root);
3001 walk_all_functions (convert_nl_goto_reference, NULL, root);
3002 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3003
3004 convert_all_function_calls (root);
3005 finalize_nesting_tree (root);
3006 unnest_nesting_tree (root);
3007
3008 free_nesting_tree (root);
3009 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3010
3011 if (dump_file)
3012 {
3013 dump_end (TDI_nested, dump_file);
3014 dump_file = NULL;
3015 }
3016 }
3017
3018 #include "gt-tree-nested.h"