tree-nested.c (convert_local_reference): Handle VIEW_CONVERT_EXPR.
[gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for trees.
2 Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "function.h"
29 #include "tree-dump.h"
30 #include "tree-inline.h"
31 #include "tree-gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-flow.h"
34 #include "cgraph.h"
35 #include "expr.h"
36 #include "langhooks.h"
37 #include "pointer-set.h"
38 #include "ggc.h"
39
40
41 /* The object of this pass is to lower the representation of a set of nested
42 functions in order to expose all of the gory details of the various
43 nonlocal references. We want to do this sooner rather than later, in
44 order to give us more freedom in emitting all of the functions in question.
45
46 Back in olden times, when gcc was young, we developed an insanely
47 complicated scheme whereby variables which were referenced nonlocally
48 were forced to live in the stack of the declaring function, and then
49 the nested functions magically discovered where these variables were
50 placed. In order for this scheme to function properly, it required
51 that the outer function be partially expanded, then we switch to
52 compiling the inner function, and once done with those we switch back
53 to compiling the outer function. Such delicate ordering requirements
54 makes it difficult to do whole translation unit optimizations
55 involving such functions.
56
57 The implementation here is much more direct. Everything that can be
58 referenced by an inner function is a member of an explicitly created
59 structure herein called the "nonlocal frame struct". The incoming
60 static chain for a nested function is a pointer to this struct in
61 the parent. In this way, we settle on known offsets from a known
62 base, and so are decoupled from the logic that places objects in the
63 function's stack frame. More importantly, we don't have to wait for
64 that to happen -- since the compilation of the inner function is no
65 longer tied to a real stack frame, the nonlocal frame struct can be
66 allocated anywhere. Which means that the outer function is now
67 inlinable.
68
69 Theory of operation here is very simple. Iterate over all the
70 statements in all the functions (depth first) several times,
71 allocating structures and fields on demand. In general we want to
72 examine inner functions first, so that we can avoid making changes
73 to outer functions which are unnecessary.
74
75 The order of the passes matters a bit, in that later passes will be
76 skipped if it is discovered that the functions don't actually interact
77 at all. That is, they're nested in the lexical sense but could have
78 been written as independent functions without change. */
79
80
81 struct nesting_info
82 {
83 struct nesting_info *outer;
84 struct nesting_info *inner;
85 struct nesting_info *next;
86
87 struct pointer_map_t *field_map;
88 struct pointer_map_t *var_map;
89 bitmap suppress_expansion;
90
91 tree context;
92 tree new_local_var_chain;
93 tree debug_var_chain;
94 tree frame_type;
95 tree frame_decl;
96 tree chain_field;
97 tree chain_decl;
98 tree nl_goto_field;
99
100 bool any_parm_remapped;
101 bool any_tramp_created;
102 char static_chain_added;
103 };
104
105
106 /* Obstack used for the bitmaps in the struct above. */
107 static struct bitmap_obstack nesting_info_bitmap_obstack;
108
109
110 /* We're working in so many different function contexts simultaneously,
111 that create_tmp_var is dangerous. Prevent mishap. */
112 #define create_tmp_var cant_use_create_tmp_var_here_dummy
113
114 /* Like create_tmp_var, except record the variable for registration at
115 the given nesting level. */
116
117 static tree
118 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
119 {
120 tree tmp_var;
121
122 /* If the type is of variable size or a type which must be created by the
123 frontend, something is wrong. Note that we explicitly allow
124 incomplete types here, since we create them ourselves here. */
125 gcc_assert (!TREE_ADDRESSABLE (type));
126 gcc_assert (!TYPE_SIZE_UNIT (type)
127 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
128
129 tmp_var = create_tmp_var_raw (type, prefix);
130 DECL_CONTEXT (tmp_var) = info->context;
131 TREE_CHAIN (tmp_var) = info->new_local_var_chain;
132 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
133 if (TREE_CODE (type) == COMPLEX_TYPE
134 || TREE_CODE (type) == VECTOR_TYPE)
135 DECL_GIMPLE_REG_P (tmp_var) = 1;
136
137 info->new_local_var_chain = tmp_var;
138
139 return tmp_var;
140 }
141
142 /* Take the address of EXP to be used within function CONTEXT.
143 Mark it for addressability as necessary. */
144
145 tree
146 build_addr (tree exp, tree context)
147 {
148 tree base = exp;
149 tree save_context;
150 tree retval;
151
152 while (handled_component_p (base))
153 base = TREE_OPERAND (base, 0);
154
155 if (DECL_P (base))
156 TREE_ADDRESSABLE (base) = 1;
157
158 /* Building the ADDR_EXPR will compute a set of properties for
159 that ADDR_EXPR. Those properties are unfortunately context
160 specific. ie, they are dependent on CURRENT_FUNCTION_DECL.
161
162 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
163 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
164 way the properties are for the ADDR_EXPR are computed properly. */
165 save_context = current_function_decl;
166 current_function_decl = context;
167 retval = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
168 current_function_decl = save_context;
169 return retval;
170 }
171
172 /* Insert FIELD into TYPE, sorted by alignment requirements. */
173
174 void
175 insert_field_into_struct (tree type, tree field)
176 {
177 tree *p;
178
179 DECL_CONTEXT (field) = type;
180
181 for (p = &TYPE_FIELDS (type); *p ; p = &TREE_CHAIN (*p))
182 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
183 break;
184
185 TREE_CHAIN (field) = *p;
186 *p = field;
187 }
188
189 /* Build or return the RECORD_TYPE that describes the frame state that is
190 shared between INFO->CONTEXT and its nested functions. This record will
191 not be complete until finalize_nesting_tree; up until that point we'll
192 be adding fields as necessary.
193
194 We also build the DECL that represents this frame in the function. */
195
196 static tree
197 get_frame_type (struct nesting_info *info)
198 {
199 tree type = info->frame_type;
200 if (!type)
201 {
202 char *name;
203
204 type = make_node (RECORD_TYPE);
205
206 name = concat ("FRAME.",
207 IDENTIFIER_POINTER (DECL_NAME (info->context)),
208 NULL);
209 TYPE_NAME (type) = get_identifier (name);
210 free (name);
211
212 info->frame_type = type;
213 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
214
215 /* ??? Always make it addressable for now, since it is meant to
216 be pointed to by the static chain pointer. This pessimizes
217 when it turns out that no static chains are needed because
218 the nested functions referencing non-local variables are not
219 reachable, but the true pessimization is to create the non-
220 local frame structure in the first place. */
221 TREE_ADDRESSABLE (info->frame_decl) = 1;
222 }
223 return type;
224 }
225
226 /* Return true if DECL should be referenced by pointer in the non-local
227 frame structure. */
228
229 static bool
230 use_pointer_in_frame (tree decl)
231 {
232 if (TREE_CODE (decl) == PARM_DECL)
233 {
234 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
235 sized decls, and inefficient to copy large aggregates. Don't bother
236 moving anything but scalar variables. */
237 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
238 }
239 else
240 {
241 /* Variable sized types make things "interesting" in the frame. */
242 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
243 }
244 }
245
246 /* Given DECL, a non-locally accessed variable, find or create a field
247 in the non-local frame structure for the given nesting context. */
248
249 static tree
250 lookup_field_for_decl (struct nesting_info *info, tree decl,
251 enum insert_option insert)
252 {
253 void **slot;
254
255 if (insert == NO_INSERT)
256 {
257 slot = pointer_map_contains (info->field_map, decl);
258 return slot ? *slot : NULL;
259 }
260
261 slot = pointer_map_insert (info->field_map, decl);
262 if (!*slot)
263 {
264 tree field = make_node (FIELD_DECL);
265 DECL_NAME (field) = DECL_NAME (decl);
266
267 if (use_pointer_in_frame (decl))
268 {
269 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
270 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
271 DECL_NONADDRESSABLE_P (field) = 1;
272 }
273 else
274 {
275 TREE_TYPE (field) = TREE_TYPE (decl);
276 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
277 DECL_ALIGN (field) = DECL_ALIGN (decl);
278 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
279 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
280 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
281 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
282 }
283
284 insert_field_into_struct (get_frame_type (info), field);
285 *slot = field;
286
287 if (TREE_CODE (decl) == PARM_DECL)
288 info->any_parm_remapped = true;
289 }
290
291 return *slot;
292 }
293
294 /* Build or return the variable that holds the static chain within
295 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
296
297 static tree
298 get_chain_decl (struct nesting_info *info)
299 {
300 tree decl = info->chain_decl;
301 if (!decl)
302 {
303 tree type;
304
305 type = get_frame_type (info->outer);
306 type = build_pointer_type (type);
307
308 /* Note that this variable is *not* entered into any BIND_EXPR;
309 the construction of this variable is handled specially in
310 expand_function_start and initialize_inlined_parameters.
311 Note also that it's represented as a parameter. This is more
312 close to the truth, since the initial value does come from
313 the caller. */
314 decl = build_decl (PARM_DECL, create_tmp_var_name ("CHAIN"), type);
315 DECL_ARTIFICIAL (decl) = 1;
316 DECL_IGNORED_P (decl) = 1;
317 TREE_USED (decl) = 1;
318 DECL_CONTEXT (decl) = info->context;
319 DECL_ARG_TYPE (decl) = type;
320
321 /* Tell tree-inline.c that we never write to this variable, so
322 it can copy-prop the replacement value immediately. */
323 TREE_READONLY (decl) = 1;
324
325 info->chain_decl = decl;
326 }
327 return decl;
328 }
329
330 /* Build or return the field within the non-local frame state that holds
331 the static chain for INFO->CONTEXT. This is the way to walk back up
332 multiple nesting levels. */
333
334 static tree
335 get_chain_field (struct nesting_info *info)
336 {
337 tree field = info->chain_field;
338 if (!field)
339 {
340 tree type = build_pointer_type (get_frame_type (info->outer));
341
342 field = make_node (FIELD_DECL);
343 DECL_NAME (field) = get_identifier ("__chain");
344 TREE_TYPE (field) = type;
345 DECL_ALIGN (field) = TYPE_ALIGN (type);
346 DECL_NONADDRESSABLE_P (field) = 1;
347
348 insert_field_into_struct (get_frame_type (info), field);
349
350 info->chain_field = field;
351 }
352 return field;
353 }
354
355 /* Copy EXP into a temporary. Allocate the temporary in the context of
356 INFO and insert the initialization statement before TSI. */
357
358 static tree
359 init_tmp_var (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
360 {
361 tree t, stmt;
362
363 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
364 stmt = build_gimple_modify_stmt (t, exp);
365 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
366 tsi_link_before (tsi, stmt, TSI_SAME_STMT);
367
368 return t;
369 }
370
371 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
372
373 static tree
374 tsi_gimplify_val (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
375 {
376 if (is_gimple_val (exp))
377 return exp;
378 else
379 return init_tmp_var (info, exp, tsi);
380 }
381
382 /* Similarly, but copy from the temporary and insert the statement
383 after the iterator. */
384
385 static tree
386 save_tmp_var (struct nesting_info *info, tree exp,
387 tree_stmt_iterator *tsi)
388 {
389 tree t, stmt;
390
391 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
392 stmt = build_gimple_modify_stmt (exp, t);
393 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
394 tsi_link_after (tsi, stmt, TSI_SAME_STMT);
395
396 return t;
397 }
398
399 /* Build or return the type used to represent a nested function trampoline. */
400
401 static GTY(()) tree trampoline_type;
402
403 static tree
404 get_trampoline_type (void)
405 {
406 tree record, t;
407 unsigned align, size;
408
409 if (trampoline_type)
410 return trampoline_type;
411
412 align = TRAMPOLINE_ALIGNMENT;
413 size = TRAMPOLINE_SIZE;
414
415 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
416 then allocate extra space so that we can do dynamic alignment. */
417 if (align > STACK_BOUNDARY)
418 {
419 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
420 align = STACK_BOUNDARY;
421 }
422
423 t = build_index_type (build_int_cst (NULL_TREE, size - 1));
424 t = build_array_type (char_type_node, t);
425 t = build_decl (FIELD_DECL, get_identifier ("__data"), t);
426 DECL_ALIGN (t) = align;
427 DECL_USER_ALIGN (t) = 1;
428
429 record = make_node (RECORD_TYPE);
430 TYPE_NAME (record) = get_identifier ("__builtin_trampoline");
431 TYPE_FIELDS (record) = t;
432 layout_type (record);
433
434 return record;
435 }
436
437 /* Given DECL, a nested function, find or create a field in the non-local
438 frame structure for a trampoline for this function. */
439
440 static tree
441 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
442 enum insert_option insert)
443 {
444 void **slot;
445
446 if (insert == NO_INSERT)
447 {
448 slot = pointer_map_contains (info->var_map, decl);
449 return slot ? *slot : NULL;
450 }
451
452 slot = pointer_map_insert (info->var_map, decl);
453 if (!*slot)
454 {
455 tree field = make_node (FIELD_DECL);
456 DECL_NAME (field) = DECL_NAME (decl);
457 TREE_TYPE (field) = get_trampoline_type ();
458 TREE_ADDRESSABLE (field) = 1;
459
460 insert_field_into_struct (get_frame_type (info), field);
461 *slot = field;
462
463 info->any_tramp_created = true;
464 }
465
466 return *slot;
467 }
468
469 /* Build or return the field within the non-local frame state that holds
470 the non-local goto "jmp_buf". The buffer itself is maintained by the
471 rtl middle-end as dynamic stack space is allocated. */
472
473 static tree
474 get_nl_goto_field (struct nesting_info *info)
475 {
476 tree field = info->nl_goto_field;
477 if (!field)
478 {
479 unsigned size;
480 tree type;
481
482 /* For __builtin_nonlocal_goto, we need N words. The first is the
483 frame pointer, the rest is for the target's stack pointer save
484 area. The number of words is controlled by STACK_SAVEAREA_MODE;
485 not the best interface, but it'll do for now. */
486 if (Pmode == ptr_mode)
487 type = ptr_type_node;
488 else
489 type = lang_hooks.types.type_for_mode (Pmode, 1);
490
491 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
492 size = size / GET_MODE_SIZE (Pmode);
493 size = size + 1;
494
495 type = build_array_type
496 (type, build_index_type (build_int_cst (NULL_TREE, size)));
497
498 field = make_node (FIELD_DECL);
499 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
500 TREE_TYPE (field) = type;
501 DECL_ALIGN (field) = TYPE_ALIGN (type);
502 TREE_ADDRESSABLE (field) = 1;
503
504 insert_field_into_struct (get_frame_type (info), field);
505
506 info->nl_goto_field = field;
507 }
508
509 return field;
510 }
511 \f
512 /* Helper function for walk_stmts. Walk output operands of an ASM_EXPR. */
513
514 static void
515 walk_asm_expr (struct walk_stmt_info *wi, tree stmt)
516 {
517 int noutputs = list_length (ASM_OUTPUTS (stmt));
518 const char **oconstraints
519 = (const char **) alloca ((noutputs) * sizeof (const char *));
520 int i;
521 tree link;
522 const char *constraint;
523 bool allows_mem, allows_reg, is_inout;
524
525 wi->is_lhs = true;
526 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
527 {
528 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
529 oconstraints[i] = constraint;
530 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
531 &allows_reg, &is_inout);
532
533 wi->val_only = (allows_reg || !allows_mem);
534 walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
535 }
536
537 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
538 {
539 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
540 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
541 oconstraints, &allows_mem, &allows_reg);
542
543 wi->val_only = (allows_reg || !allows_mem);
544 /* Although input "m" is not really a LHS, we need a lvalue. */
545 wi->is_lhs = !wi->val_only;
546 walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
547 }
548
549 wi->is_lhs = false;
550 wi->val_only = true;
551 }
552
553 /* Iterate over all sub-statements of *TP calling walk_tree with
554 WI->CALLBACK for every sub-expression in each statement found. */
555
556 void
557 walk_stmts (struct walk_stmt_info *wi, tree *tp)
558 {
559 tree t = *tp;
560 int walk_subtrees;
561
562 if (!t)
563 return;
564
565 if (wi->want_locations && EXPR_HAS_LOCATION (t))
566 input_location = EXPR_LOCATION (t);
567
568 switch (TREE_CODE (t))
569 {
570 case STATEMENT_LIST:
571 {
572 tree_stmt_iterator i;
573 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
574 {
575 wi->tsi = i;
576 walk_stmts (wi, tsi_stmt_ptr (i));
577 }
578 }
579 break;
580
581 case COND_EXPR:
582 walk_tree (&COND_EXPR_COND (t), wi->callback, wi, NULL);
583 walk_stmts (wi, &COND_EXPR_THEN (t));
584 walk_stmts (wi, &COND_EXPR_ELSE (t));
585 break;
586 case CATCH_EXPR:
587 walk_stmts (wi, &CATCH_BODY (t));
588 break;
589 case EH_FILTER_EXPR:
590 walk_stmts (wi, &EH_FILTER_FAILURE (t));
591 break;
592 case TRY_CATCH_EXPR:
593 case TRY_FINALLY_EXPR:
594 walk_stmts (wi, &TREE_OPERAND (t, 0));
595 walk_stmts (wi, &TREE_OPERAND (t, 1));
596 break;
597
598 case BIND_EXPR:
599 if (wi->want_bind_expr)
600 {
601 walk_subtrees = 1;
602 wi->callback (tp, &walk_subtrees, wi);
603 if (!walk_subtrees)
604 break;
605 }
606 walk_stmts (wi, &BIND_EXPR_BODY (t));
607 break;
608
609 case RETURN_EXPR:
610 if (wi->want_return_expr)
611 {
612 walk_subtrees = 1;
613 wi->callback (tp, &walk_subtrees, wi);
614 if (!walk_subtrees)
615 break;
616 }
617 walk_stmts (wi, &TREE_OPERAND (t, 0));
618 break;
619
620 case GIMPLE_MODIFY_STMT:
621 /* A formal temporary lhs may use a COMPONENT_REF rhs. */
622 wi->val_only = !is_gimple_formal_tmp_var (GIMPLE_STMT_OPERAND (t, 0));
623 walk_tree (&GIMPLE_STMT_OPERAND (t, 1), wi->callback, wi, NULL);
624
625 /* If the rhs is appropriate for a memory, we may use a
626 COMPONENT_REF on the lhs. */
627 wi->val_only = !is_gimple_mem_rhs (GIMPLE_STMT_OPERAND (t, 1));
628 wi->is_lhs = true;
629 walk_tree (&GIMPLE_STMT_OPERAND (t, 0), wi->callback, wi, NULL);
630
631 wi->val_only = true;
632 wi->is_lhs = false;
633 break;
634
635 case ASM_EXPR:
636 walk_asm_expr (wi, *tp);
637 break;
638
639 default:
640 wi->val_only = true;
641 walk_tree (tp, wi->callback, wi, NULL);
642 break;
643 }
644 }
645
646 /* Invoke CALLBACK on all statements of *STMT_P. */
647
648 static void
649 walk_body (walk_tree_fn callback, struct nesting_info *info, tree *stmt_p)
650 {
651 struct walk_stmt_info wi;
652
653 memset (&wi, 0, sizeof (wi));
654 wi.callback = callback;
655 wi.info = info;
656 wi.val_only = true;
657
658 walk_stmts (&wi, stmt_p);
659 }
660
661 /* Invoke CALLBACK on all statements of INFO->CONTEXT. */
662
663 static inline void
664 walk_function (walk_tree_fn callback, struct nesting_info *info)
665 {
666 walk_body (callback, info, &DECL_SAVED_TREE (info->context));
667 }
668
669 /* Similarly for ROOT and all functions nested underneath, depth first. */
670
671 static void
672 walk_all_functions (walk_tree_fn callback, struct nesting_info *root)
673 {
674 do
675 {
676 if (root->inner)
677 walk_all_functions (callback, root->inner);
678 walk_function (callback, root);
679 root = root->next;
680 }
681 while (root);
682 }
683 \f
684 /* We have to check for a fairly pathological case. The operands of function
685 nested function are to be interpreted in the context of the enclosing
686 function. So if any are variably-sized, they will get remapped when the
687 enclosing function is inlined. But that remapping would also have to be
688 done in the types of the PARM_DECLs of the nested function, meaning the
689 argument types of that function will disagree with the arguments in the
690 calls to that function. So we'd either have to make a copy of the nested
691 function corresponding to each time the enclosing function was inlined or
692 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
693 function. The former is not practical. The latter would still require
694 detecting this case to know when to add the conversions. So, for now at
695 least, we don't inline such an enclosing function.
696
697 We have to do that check recursively, so here return indicating whether
698 FNDECL has such a nested function. ORIG_FN is the function we were
699 trying to inline to use for checking whether any argument is variably
700 modified by anything in it.
701
702 It would be better to do this in tree-inline.c so that we could give
703 the appropriate warning for why a function can't be inlined, but that's
704 too late since the nesting structure has already been flattened and
705 adding a flag just to record this fact seems a waste of a flag. */
706
707 static bool
708 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
709 {
710 struct cgraph_node *cgn = cgraph_node (fndecl);
711 tree arg;
712
713 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
714 {
715 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = TREE_CHAIN (arg))
716 if (variably_modified_type_p (TREE_TYPE (arg), 0), orig_fndecl)
717 return true;
718
719 if (check_for_nested_with_variably_modified (cgn->decl, orig_fndecl))
720 return true;
721 }
722
723 return false;
724 }
725
726 /* Construct our local datastructure describing the function nesting
727 tree rooted by CGN. */
728
729 static struct nesting_info *
730 create_nesting_tree (struct cgraph_node *cgn)
731 {
732 struct nesting_info *info = XCNEW (struct nesting_info);
733 info->field_map = pointer_map_create ();
734 info->var_map = pointer_map_create ();
735 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
736 info->context = cgn->decl;
737
738 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
739 {
740 struct nesting_info *sub = create_nesting_tree (cgn);
741 sub->outer = info;
742 sub->next = info->inner;
743 info->inner = sub;
744 }
745
746 /* See discussion at check_for_nested_with_variably_modified for a
747 discussion of why this has to be here. */
748 if (check_for_nested_with_variably_modified (info->context, info->context))
749 DECL_UNINLINABLE (info->context) = true;
750
751 return info;
752 }
753
754 /* Return an expression computing the static chain for TARGET_CONTEXT
755 from INFO->CONTEXT. Insert any necessary computations before TSI. */
756
757 static tree
758 get_static_chain (struct nesting_info *info, tree target_context,
759 tree_stmt_iterator *tsi)
760 {
761 struct nesting_info *i;
762 tree x;
763
764 if (info->context == target_context)
765 {
766 x = build_addr (info->frame_decl, target_context);
767 }
768 else
769 {
770 x = get_chain_decl (info);
771
772 for (i = info->outer; i->context != target_context; i = i->outer)
773 {
774 tree field = get_chain_field (i);
775
776 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
777 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
778 x = init_tmp_var (info, x, tsi);
779 }
780 }
781
782 return x;
783 }
784
785 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
786 frame as seen from INFO->CONTEXT. Insert any necessary computations
787 before TSI. */
788
789 static tree
790 get_frame_field (struct nesting_info *info, tree target_context,
791 tree field, tree_stmt_iterator *tsi)
792 {
793 struct nesting_info *i;
794 tree x;
795
796 if (info->context == target_context)
797 {
798 /* Make sure frame_decl gets created. */
799 (void) get_frame_type (info);
800 x = info->frame_decl;
801 }
802 else
803 {
804 x = get_chain_decl (info);
805
806 for (i = info->outer; i->context != target_context; i = i->outer)
807 {
808 tree field = get_chain_field (i);
809
810 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
811 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
812 x = init_tmp_var (info, x, tsi);
813 }
814
815 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
816 }
817
818 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
819 return x;
820 }
821
822 /* A subroutine of convert_nonlocal_reference. Create a local variable
823 in the nested function with DECL_VALUE_EXPR set to reference the true
824 variable in the parent function. This is used both for debug info
825 and in OpenMP lowering. */
826
827 static tree
828 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
829 {
830 tree target_context;
831 struct nesting_info *i;
832 tree x, field, new_decl;
833 void **slot;
834
835 slot = pointer_map_insert (info->var_map, decl);
836
837 if (*slot)
838 return *slot;
839
840 target_context = decl_function_context (decl);
841
842 /* A copy of the code in get_frame_field, but without the temporaries. */
843 if (info->context == target_context)
844 {
845 /* Make sure frame_decl gets created. */
846 (void) get_frame_type (info);
847 x = info->frame_decl;
848 i = info;
849 }
850 else
851 {
852 x = get_chain_decl (info);
853 for (i = info->outer; i->context != target_context; i = i->outer)
854 {
855 field = get_chain_field (i);
856 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
857 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
858 }
859 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
860 }
861
862 field = lookup_field_for_decl (i, decl, INSERT);
863 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
864 if (use_pointer_in_frame (decl))
865 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
866
867 /* ??? We should be remapping types as well, surely. */
868 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
869 DECL_CONTEXT (new_decl) = info->context;
870 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
871 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
872 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
873 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
874 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
875 TREE_READONLY (new_decl) = TREE_READONLY (decl);
876 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
877 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
878
879 SET_DECL_VALUE_EXPR (new_decl, x);
880 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
881
882 *slot = new_decl;
883 TREE_CHAIN (new_decl) = info->debug_var_chain;
884 info->debug_var_chain = new_decl;
885
886 return new_decl;
887 }
888
889 /* Called via walk_function+walk_tree, rewrite all references to VAR
890 and PARM_DECLs that belong to outer functions.
891
892 The rewrite will involve some number of structure accesses back up
893 the static chain. E.g. for a variable FOO up one nesting level it'll
894 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
895 indirections apply to decls for which use_pointer_in_frame is true. */
896
897 static bool convert_nonlocal_omp_clauses (tree *, struct walk_stmt_info *);
898
899 static tree
900 convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
901 {
902 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
903 struct nesting_info *info = wi->info;
904 tree t = *tp;
905 tree save_local_var_chain;
906 bitmap save_suppress;
907
908 *walk_subtrees = 0;
909 switch (TREE_CODE (t))
910 {
911 case VAR_DECL:
912 /* Non-automatic variables are never processed. */
913 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
914 break;
915 /* FALLTHRU */
916
917 case PARM_DECL:
918 if (decl_function_context (t) != info->context)
919 {
920 tree x;
921 wi->changed = true;
922
923 x = get_nonlocal_debug_decl (info, t);
924 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
925 {
926 tree target_context = decl_function_context (t);
927 struct nesting_info *i;
928 for (i = info->outer; i->context != target_context; i = i->outer)
929 continue;
930 x = lookup_field_for_decl (i, t, INSERT);
931 x = get_frame_field (info, target_context, x, &wi->tsi);
932 if (use_pointer_in_frame (t))
933 {
934 x = init_tmp_var (info, x, &wi->tsi);
935 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
936 }
937 }
938
939 if (wi->val_only)
940 {
941 if (wi->is_lhs)
942 x = save_tmp_var (info, x, &wi->tsi);
943 else
944 x = init_tmp_var (info, x, &wi->tsi);
945 }
946
947 *tp = x;
948 }
949 break;
950
951 case GOTO_EXPR:
952 /* Don't walk non-local gotos for now. */
953 if (TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL)
954 {
955 *walk_subtrees = 1;
956 wi->val_only = true;
957 wi->is_lhs = false;
958 }
959 break;
960
961 case LABEL_DECL:
962 /* We're taking the address of a label from a parent function, but
963 this is not itself a non-local goto. Mark the label such that it
964 will not be deleted, much as we would with a label address in
965 static storage. */
966 if (decl_function_context (t) != info->context)
967 FORCED_LABEL (t) = 1;
968 break;
969
970 case ADDR_EXPR:
971 {
972 bool save_val_only = wi->val_only;
973
974 wi->val_only = false;
975 wi->is_lhs = false;
976 wi->changed = false;
977 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference, wi, NULL);
978 wi->val_only = true;
979
980 if (wi->changed)
981 {
982 tree save_context;
983
984 /* If we changed anything, then TREE_INVARIANT is be wrong,
985 since we're no longer directly referencing a decl. */
986 save_context = current_function_decl;
987 current_function_decl = info->context;
988 recompute_tree_invariant_for_addr_expr (t);
989 current_function_decl = save_context;
990
991 /* If the callback converted the address argument in a context
992 where we only accept variables (and min_invariant, presumably),
993 then compute the address into a temporary. */
994 if (save_val_only)
995 *tp = tsi_gimplify_val (wi->info, t, &wi->tsi);
996 }
997 }
998 break;
999
1000 case REALPART_EXPR:
1001 case IMAGPART_EXPR:
1002 case COMPONENT_REF:
1003 case ARRAY_REF:
1004 case ARRAY_RANGE_REF:
1005 case BIT_FIELD_REF:
1006 /* Go down this entire nest and just look at the final prefix and
1007 anything that describes the references. Otherwise, we lose track
1008 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1009 wi->val_only = true;
1010 wi->is_lhs = false;
1011 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1012 {
1013 if (TREE_CODE (t) == COMPONENT_REF)
1014 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1015 NULL);
1016 else if (TREE_CODE (t) == ARRAY_REF
1017 || TREE_CODE (t) == ARRAY_RANGE_REF)
1018 {
1019 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
1020 NULL);
1021 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1022 NULL);
1023 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference, wi,
1024 NULL);
1025 }
1026 else if (TREE_CODE (t) == BIT_FIELD_REF)
1027 {
1028 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
1029 NULL);
1030 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1031 NULL);
1032 }
1033 }
1034 wi->val_only = false;
1035 walk_tree (tp, convert_nonlocal_reference, wi, NULL);
1036 break;
1037
1038 case VIEW_CONVERT_EXPR:
1039 /* Just request to look at the subtrees, leaving val_only and lhs
1040 untouched. This might actually be for !val_only + lhs, in which
1041 case we don't want to force a replacement by a temporary. */
1042 *walk_subtrees = 1;
1043 break;
1044
1045 case OMP_PARALLEL:
1046 save_suppress = info->suppress_expansion;
1047 if (convert_nonlocal_omp_clauses (&OMP_PARALLEL_CLAUSES (t), wi))
1048 {
1049 tree c, decl;
1050 decl = get_chain_decl (info);
1051 c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
1052 OMP_CLAUSE_DECL (c) = decl;
1053 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1054 OMP_PARALLEL_CLAUSES (t) = c;
1055 }
1056
1057 save_local_var_chain = info->new_local_var_chain;
1058 info->new_local_var_chain = NULL;
1059
1060 walk_body (convert_nonlocal_reference, info, &OMP_PARALLEL_BODY (t));
1061
1062 if (info->new_local_var_chain)
1063 declare_vars (info->new_local_var_chain, OMP_PARALLEL_BODY (t), false);
1064 info->new_local_var_chain = save_local_var_chain;
1065 info->suppress_expansion = save_suppress;
1066 break;
1067
1068 case OMP_FOR:
1069 case OMP_SECTIONS:
1070 case OMP_SINGLE:
1071 save_suppress = info->suppress_expansion;
1072 convert_nonlocal_omp_clauses (&OMP_CLAUSES (t), wi);
1073 walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
1074 info->suppress_expansion = save_suppress;
1075 break;
1076
1077 case OMP_SECTION:
1078 case OMP_MASTER:
1079 case OMP_ORDERED:
1080 walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
1081 break;
1082
1083 default:
1084 if (!IS_TYPE_OR_DECL_P (t))
1085 {
1086 *walk_subtrees = 1;
1087 wi->val_only = true;
1088 wi->is_lhs = false;
1089 }
1090 break;
1091 }
1092
1093 return NULL_TREE;
1094 }
1095
1096 static bool
1097 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1098 {
1099 struct nesting_info *info = wi->info;
1100 bool need_chain = false;
1101 tree clause, decl;
1102 int dummy;
1103 bitmap new_suppress;
1104
1105 new_suppress = BITMAP_GGC_ALLOC ();
1106 bitmap_copy (new_suppress, info->suppress_expansion);
1107
1108 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1109 {
1110 switch (OMP_CLAUSE_CODE (clause))
1111 {
1112 case OMP_CLAUSE_PRIVATE:
1113 case OMP_CLAUSE_FIRSTPRIVATE:
1114 case OMP_CLAUSE_LASTPRIVATE:
1115 case OMP_CLAUSE_REDUCTION:
1116 case OMP_CLAUSE_COPYPRIVATE:
1117 case OMP_CLAUSE_SHARED:
1118 decl = OMP_CLAUSE_DECL (clause);
1119 if (decl_function_context (decl) != info->context)
1120 {
1121 bitmap_set_bit (new_suppress, DECL_UID (decl));
1122 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1123 need_chain = true;
1124 }
1125 break;
1126
1127 case OMP_CLAUSE_SCHEDULE:
1128 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1129 break;
1130 /* FALLTHRU */
1131 case OMP_CLAUSE_IF:
1132 case OMP_CLAUSE_NUM_THREADS:
1133 wi->val_only = true;
1134 wi->is_lhs = false;
1135 convert_nonlocal_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1136 wi);
1137 break;
1138
1139 case OMP_CLAUSE_NOWAIT:
1140 case OMP_CLAUSE_ORDERED:
1141 case OMP_CLAUSE_DEFAULT:
1142 case OMP_CLAUSE_COPYIN:
1143 break;
1144
1145 default:
1146 gcc_unreachable ();
1147 }
1148 }
1149
1150 info->suppress_expansion = new_suppress;
1151
1152 return need_chain;
1153 }
1154
1155 /* A subroutine of convert_local_reference. Create a local variable
1156 in the parent function with DECL_VALUE_EXPR set to reference the
1157 field in FRAME. This is used both for debug info and in OpenMP
1158 lowering. */
1159
1160 static tree
1161 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1162 {
1163 tree x, new_decl;
1164 void **slot;
1165
1166 slot = pointer_map_insert (info->var_map, decl);
1167 if (*slot)
1168 return *slot;
1169
1170 /* Make sure frame_decl gets created. */
1171 (void) get_frame_type (info);
1172 x = info->frame_decl;
1173 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1174
1175 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1176 DECL_CONTEXT (new_decl) = info->context;
1177 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
1178 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1179 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1180 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1181 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1182 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1183 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1184 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1185
1186 SET_DECL_VALUE_EXPR (new_decl, x);
1187 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1188 *slot = new_decl;
1189
1190 TREE_CHAIN (new_decl) = info->debug_var_chain;
1191 info->debug_var_chain = new_decl;
1192
1193 /* Do not emit debug info twice. */
1194 DECL_IGNORED_P (decl) = 1;
1195
1196 return new_decl;
1197 }
1198
1199 /* Called via walk_function+walk_tree, rewrite all references to VAR
1200 and PARM_DECLs that were referenced by inner nested functions.
1201 The rewrite will be a structure reference to the local frame variable. */
1202
1203 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1204
1205 static tree
1206 convert_local_reference (tree *tp, int *walk_subtrees, void *data)
1207 {
1208 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1209 struct nesting_info *info = wi->info;
1210 tree t = *tp, field, x;
1211 bool save_val_only;
1212 tree save_local_var_chain;
1213 bitmap save_suppress;
1214
1215 *walk_subtrees = 0;
1216 switch (TREE_CODE (t))
1217 {
1218 case VAR_DECL:
1219 /* Non-automatic variables are never processed. */
1220 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1221 break;
1222 /* FALLTHRU */
1223
1224 case PARM_DECL:
1225 if (decl_function_context (t) == info->context)
1226 {
1227 /* If we copied a pointer to the frame, then the original decl
1228 is used unchanged in the parent function. */
1229 if (use_pointer_in_frame (t))
1230 break;
1231
1232 /* No need to transform anything if no child references the
1233 variable. */
1234 field = lookup_field_for_decl (info, t, NO_INSERT);
1235 if (!field)
1236 break;
1237 wi->changed = true;
1238
1239 x = get_local_debug_decl (info, t, field);
1240 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1241 x = get_frame_field (info, info->context, field, &wi->tsi);
1242
1243 if (wi->val_only)
1244 {
1245 if (wi->is_lhs)
1246 x = save_tmp_var (info, x, &wi->tsi);
1247 else
1248 x = init_tmp_var (info, x, &wi->tsi);
1249 }
1250
1251 *tp = x;
1252 }
1253 break;
1254
1255 case ADDR_EXPR:
1256 save_val_only = wi->val_only;
1257 wi->val_only = false;
1258 wi->is_lhs = false;
1259 wi->changed = false;
1260 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference, wi, NULL);
1261 wi->val_only = save_val_only;
1262
1263 /* If we converted anything ... */
1264 if (wi->changed)
1265 {
1266 tree save_context;
1267
1268 /* Then the frame decl is now addressable. */
1269 TREE_ADDRESSABLE (info->frame_decl) = 1;
1270
1271 save_context = current_function_decl;
1272 current_function_decl = info->context;
1273 recompute_tree_invariant_for_addr_expr (t);
1274 current_function_decl = save_context;
1275
1276 /* If we are in a context where we only accept values, then
1277 compute the address into a temporary. */
1278 if (save_val_only)
1279 *tp = tsi_gimplify_val (wi->info, t, &wi->tsi);
1280 }
1281 break;
1282
1283 case REALPART_EXPR:
1284 case IMAGPART_EXPR:
1285 case COMPONENT_REF:
1286 case ARRAY_REF:
1287 case ARRAY_RANGE_REF:
1288 case BIT_FIELD_REF:
1289 /* Go down this entire nest and just look at the final prefix and
1290 anything that describes the references. Otherwise, we lose track
1291 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1292 save_val_only = wi->val_only;
1293 wi->val_only = true;
1294 wi->is_lhs = false;
1295 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1296 {
1297 if (TREE_CODE (t) == COMPONENT_REF)
1298 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1299 NULL);
1300 else if (TREE_CODE (t) == ARRAY_REF
1301 || TREE_CODE (t) == ARRAY_RANGE_REF)
1302 {
1303 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
1304 NULL);
1305 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1306 NULL);
1307 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference, wi,
1308 NULL);
1309 }
1310 else if (TREE_CODE (t) == BIT_FIELD_REF)
1311 {
1312 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
1313 NULL);
1314 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1315 NULL);
1316 }
1317 }
1318 wi->val_only = false;
1319 walk_tree (tp, convert_local_reference, wi, NULL);
1320 wi->val_only = save_val_only;
1321 break;
1322
1323 case VIEW_CONVERT_EXPR:
1324 /* Just request to look at the subtrees, leaving val_only and lhs
1325 untouched. This might actually be for !val_only + lhs, in which
1326 case we don't want to force a replacement by a temporary. */
1327 *walk_subtrees = 1;
1328 break;
1329
1330 case OMP_PARALLEL:
1331 save_suppress = info->suppress_expansion;
1332 if (convert_local_omp_clauses (&OMP_PARALLEL_CLAUSES (t), wi))
1333 {
1334 tree c;
1335 (void) get_frame_type (info);
1336 c = build_omp_clause (OMP_CLAUSE_SHARED);
1337 OMP_CLAUSE_DECL (c) = info->frame_decl;
1338 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1339 OMP_PARALLEL_CLAUSES (t) = c;
1340 }
1341
1342 save_local_var_chain = info->new_local_var_chain;
1343 info->new_local_var_chain = NULL;
1344
1345 walk_body (convert_local_reference, info, &OMP_PARALLEL_BODY (t));
1346
1347 if (info->new_local_var_chain)
1348 declare_vars (info->new_local_var_chain, OMP_PARALLEL_BODY (t), false);
1349 info->new_local_var_chain = save_local_var_chain;
1350 info->suppress_expansion = save_suppress;
1351 break;
1352
1353 case OMP_FOR:
1354 case OMP_SECTIONS:
1355 case OMP_SINGLE:
1356 save_suppress = info->suppress_expansion;
1357 convert_local_omp_clauses (&OMP_CLAUSES (t), wi);
1358 walk_body (convert_local_reference, info, &OMP_BODY (t));
1359 info->suppress_expansion = save_suppress;
1360 break;
1361
1362 case OMP_SECTION:
1363 case OMP_MASTER:
1364 case OMP_ORDERED:
1365 walk_body (convert_local_reference, info, &OMP_BODY (t));
1366 break;
1367
1368 default:
1369 if (!IS_TYPE_OR_DECL_P (t))
1370 {
1371 *walk_subtrees = 1;
1372 wi->val_only = true;
1373 wi->is_lhs = false;
1374 }
1375 break;
1376 }
1377
1378 return NULL_TREE;
1379 }
1380
1381 static bool
1382 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1383 {
1384 struct nesting_info *info = wi->info;
1385 bool need_frame = false;
1386 tree clause, decl;
1387 int dummy;
1388 bitmap new_suppress;
1389
1390 new_suppress = BITMAP_GGC_ALLOC ();
1391 bitmap_copy (new_suppress, info->suppress_expansion);
1392
1393 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1394 {
1395 switch (OMP_CLAUSE_CODE (clause))
1396 {
1397 case OMP_CLAUSE_PRIVATE:
1398 case OMP_CLAUSE_FIRSTPRIVATE:
1399 case OMP_CLAUSE_LASTPRIVATE:
1400 case OMP_CLAUSE_REDUCTION:
1401 case OMP_CLAUSE_COPYPRIVATE:
1402 case OMP_CLAUSE_SHARED:
1403 decl = OMP_CLAUSE_DECL (clause);
1404 if (decl_function_context (decl) == info->context
1405 && !use_pointer_in_frame (decl))
1406 {
1407 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1408 if (field)
1409 {
1410 bitmap_set_bit (new_suppress, DECL_UID (decl));
1411 OMP_CLAUSE_DECL (clause)
1412 = get_local_debug_decl (info, decl, field);
1413 need_frame = true;
1414 }
1415 }
1416 break;
1417
1418 case OMP_CLAUSE_SCHEDULE:
1419 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1420 break;
1421 /* FALLTHRU */
1422 case OMP_CLAUSE_IF:
1423 case OMP_CLAUSE_NUM_THREADS:
1424 wi->val_only = true;
1425 wi->is_lhs = false;
1426 convert_local_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy, wi);
1427 break;
1428
1429 case OMP_CLAUSE_NOWAIT:
1430 case OMP_CLAUSE_ORDERED:
1431 case OMP_CLAUSE_DEFAULT:
1432 case OMP_CLAUSE_COPYIN:
1433 break;
1434
1435 default:
1436 gcc_unreachable ();
1437 }
1438 }
1439
1440 info->suppress_expansion = new_suppress;
1441
1442 return need_frame;
1443 }
1444
1445 /* Called via walk_function+walk_tree, rewrite all GOTO_EXPRs that
1446 reference labels from outer functions. The rewrite will be a
1447 call to __builtin_nonlocal_goto. */
1448
1449 static tree
1450 convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data)
1451 {
1452 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1453 struct nesting_info *info = wi->info, *i;
1454 tree t = *tp, label, new_label, target_context, x, field;
1455 void **slot;
1456
1457 *walk_subtrees = 0;
1458 if (TREE_CODE (t) != GOTO_EXPR)
1459 return NULL_TREE;
1460 label = GOTO_DESTINATION (t);
1461 if (TREE_CODE (label) != LABEL_DECL)
1462 return NULL_TREE;
1463 target_context = decl_function_context (label);
1464 if (target_context == info->context)
1465 return NULL_TREE;
1466
1467 for (i = info->outer; target_context != i->context; i = i->outer)
1468 continue;
1469
1470 /* The original user label may also be use for a normal goto, therefore
1471 we must create a new label that will actually receive the abnormal
1472 control transfer. This new label will be marked LABEL_NONLOCAL; this
1473 mark will trigger proper behavior in the cfg, as well as cause the
1474 (hairy target-specific) non-local goto receiver code to be generated
1475 when we expand rtl. Enter this association into var_map so that we
1476 can insert the new label into the IL during a second pass. */
1477 slot = pointer_map_insert (i->var_map, label);
1478 if (*slot == NULL)
1479 {
1480 new_label = create_artificial_label ();
1481 DECL_NONLOCAL (new_label) = 1;
1482 *slot = new_label;
1483 }
1484 else
1485 new_label = *slot;
1486
1487 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1488 field = get_nl_goto_field (i);
1489 x = get_frame_field (info, target_context, field, &wi->tsi);
1490 x = build_addr (x, target_context);
1491 x = tsi_gimplify_val (info, x, &wi->tsi);
1492 x = build_call_expr (implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO], 2,
1493 build_addr (new_label, target_context), x);
1494
1495 SET_EXPR_LOCUS (x, EXPR_LOCUS (tsi_stmt (wi->tsi)));
1496 *tsi_stmt_ptr (wi->tsi) = x;
1497
1498 return NULL_TREE;
1499 }
1500
1501 /* Called via walk_function+walk_tree, rewrite all LABEL_EXPRs that
1502 are referenced via nonlocal goto from a nested function. The rewrite
1503 will involve installing a newly generated DECL_NONLOCAL label, and
1504 (potentially) a branch around the rtl gunk that is assumed to be
1505 attached to such a label. */
1506
1507 static tree
1508 convert_nl_goto_receiver (tree *tp, int *walk_subtrees, void *data)
1509 {
1510 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1511 struct nesting_info *info = wi->info;
1512 tree t = *tp, label, new_label, x;
1513 tree_stmt_iterator tmp_tsi;
1514 void **slot;
1515
1516 *walk_subtrees = 0;
1517 if (TREE_CODE (t) != LABEL_EXPR)
1518 return NULL_TREE;
1519 label = LABEL_EXPR_LABEL (t);
1520
1521 slot = pointer_map_contains (info->var_map, label);
1522 if (!slot)
1523 return NULL_TREE;
1524
1525 /* If there's any possibility that the previous statement falls through,
1526 then we must branch around the new non-local label. */
1527 tmp_tsi = wi->tsi;
1528 tsi_prev (&tmp_tsi);
1529 if (tsi_end_p (tmp_tsi) || block_may_fallthru (tsi_stmt (tmp_tsi)))
1530 {
1531 x = build1 (GOTO_EXPR, void_type_node, label);
1532 tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
1533 }
1534
1535 new_label = (tree) *slot;
1536 x = build1 (LABEL_EXPR, void_type_node, new_label);
1537 tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
1538
1539 return NULL_TREE;
1540 }
1541
1542 /* Called via walk_function+walk_tree, rewrite all references to addresses
1543 of nested functions that require the use of trampolines. The rewrite
1544 will involve a reference a trampoline generated for the occasion. */
1545
1546 static tree
1547 convert_tramp_reference (tree *tp, int *walk_subtrees, void *data)
1548 {
1549 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1550 struct nesting_info *info = wi->info, *i;
1551 tree t = *tp, decl, target_context, x;
1552
1553 *walk_subtrees = 0;
1554 switch (TREE_CODE (t))
1555 {
1556 case ADDR_EXPR:
1557 /* Build
1558 T.1 = &CHAIN->tramp;
1559 T.2 = __builtin_adjust_trampoline (T.1);
1560 T.3 = (func_type)T.2;
1561 */
1562
1563 decl = TREE_OPERAND (t, 0);
1564 if (TREE_CODE (decl) != FUNCTION_DECL)
1565 break;
1566
1567 /* Only need to process nested functions. */
1568 target_context = decl_function_context (decl);
1569 if (!target_context)
1570 break;
1571
1572 /* If the nested function doesn't use a static chain, then
1573 it doesn't need a trampoline. */
1574 if (DECL_NO_STATIC_CHAIN (decl))
1575 break;
1576
1577 /* Lookup the immediate parent of the callee, as that's where
1578 we need to insert the trampoline. */
1579 for (i = info; i->context != target_context; i = i->outer)
1580 continue;
1581 x = lookup_tramp_for_decl (i, decl, INSERT);
1582
1583 /* Compute the address of the field holding the trampoline. */
1584 x = get_frame_field (info, target_context, x, &wi->tsi);
1585 x = build_addr (x, target_context);
1586 x = tsi_gimplify_val (info, x, &wi->tsi);
1587
1588 /* Do machine-specific ugliness. Normally this will involve
1589 computing extra alignment, but it can really be anything. */
1590 x = build_call_expr (implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE],
1591 1, x);
1592 x = init_tmp_var (info, x, &wi->tsi);
1593
1594 /* Cast back to the proper function type. */
1595 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1596 x = init_tmp_var (info, x, &wi->tsi);
1597
1598 *tp = x;
1599 break;
1600
1601 case CALL_EXPR:
1602 /* Only walk call arguments, lest we generate trampolines for
1603 direct calls. */
1604 {
1605 int nargs = call_expr_nargs (t);
1606 int i;
1607 for (i = 0; i < nargs; i++)
1608 walk_tree (&CALL_EXPR_ARG (t, i), convert_tramp_reference, wi, NULL);
1609 }
1610 break;
1611
1612 default:
1613 if (!IS_TYPE_OR_DECL_P (t))
1614 *walk_subtrees = 1;
1615 break;
1616 }
1617
1618 return NULL_TREE;
1619 }
1620
1621 /* Called via walk_function+walk_tree, rewrite all CALL_EXPRs that
1622 reference nested functions to make sure that the static chain is
1623 set up properly for the call. */
1624
1625 static tree
1626 convert_call_expr (tree *tp, int *walk_subtrees, void *data)
1627 {
1628 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1629 struct nesting_info *info = wi->info;
1630 tree t = *tp, decl, target_context;
1631 char save_static_chain_added;
1632 int i;
1633
1634 *walk_subtrees = 0;
1635 switch (TREE_CODE (t))
1636 {
1637 case CALL_EXPR:
1638 decl = get_callee_fndecl (t);
1639 if (!decl)
1640 break;
1641 target_context = decl_function_context (decl);
1642 if (target_context && !DECL_NO_STATIC_CHAIN (decl))
1643 {
1644 CALL_EXPR_STATIC_CHAIN (t)
1645 = get_static_chain (info, target_context, &wi->tsi);
1646 info->static_chain_added
1647 |= (1 << (info->context != target_context));
1648 }
1649 break;
1650
1651 case RETURN_EXPR:
1652 case GIMPLE_MODIFY_STMT:
1653 case WITH_SIZE_EXPR:
1654 /* Only return modify and with_size_expr may contain calls. */
1655 *walk_subtrees = 1;
1656 break;
1657
1658 case OMP_PARALLEL:
1659 save_static_chain_added = info->static_chain_added;
1660 info->static_chain_added = 0;
1661 walk_body (convert_call_expr, info, &OMP_PARALLEL_BODY (t));
1662 for (i = 0; i < 2; i++)
1663 {
1664 tree c, decl;
1665 if ((info->static_chain_added & (1 << i)) == 0)
1666 continue;
1667 decl = i ? get_chain_decl (info) : info->frame_decl;
1668 /* Don't add CHAIN.* or FRAME.* twice. */
1669 for (c = OMP_PARALLEL_CLAUSES (t); c; c = OMP_CLAUSE_CHAIN (c))
1670 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1671 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
1672 && OMP_CLAUSE_DECL (c) == decl)
1673 break;
1674 if (c == NULL)
1675 {
1676 c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
1677 OMP_CLAUSE_DECL (c) = decl;
1678 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1679 OMP_PARALLEL_CLAUSES (t) = c;
1680 }
1681 }
1682 info->static_chain_added |= save_static_chain_added;
1683 break;
1684
1685 case OMP_FOR:
1686 case OMP_SECTIONS:
1687 case OMP_SECTION:
1688 case OMP_SINGLE:
1689 case OMP_MASTER:
1690 case OMP_ORDERED:
1691 case OMP_CRITICAL:
1692 walk_body (convert_call_expr, info, &OMP_BODY (t));
1693 break;
1694
1695 default:
1696 break;
1697 }
1698
1699 return NULL_TREE;
1700 }
1701
1702 /* Walk the nesting tree starting with ROOT, depth first. Convert all
1703 trampolines and call expressions. On the way back up, determine if
1704 a nested function actually uses its static chain; if not, remember that. */
1705
1706 static void
1707 convert_all_function_calls (struct nesting_info *root)
1708 {
1709 do
1710 {
1711 if (root->inner)
1712 convert_all_function_calls (root->inner);
1713
1714 walk_function (convert_tramp_reference, root);
1715 walk_function (convert_call_expr, root);
1716
1717 /* If the function does not use a static chain, then remember that. */
1718 if (root->outer && !root->chain_decl && !root->chain_field)
1719 DECL_NO_STATIC_CHAIN (root->context) = 1;
1720 else
1721 gcc_assert (!DECL_NO_STATIC_CHAIN (root->context));
1722
1723 root = root->next;
1724 }
1725 while (root);
1726 }
1727
1728 /* Do "everything else" to clean up or complete state collected by the
1729 various walking passes -- lay out the types and decls, generate code
1730 to initialize the frame decl, store critical expressions in the
1731 struct function for rtl to find. */
1732
1733 static void
1734 finalize_nesting_tree_1 (struct nesting_info *root)
1735 {
1736 tree stmt_list = NULL;
1737 tree context = root->context;
1738 struct function *sf;
1739
1740 /* If we created a non-local frame type or decl, we need to lay them
1741 out at this time. */
1742 if (root->frame_type)
1743 {
1744 /* In some cases the frame type will trigger the -Wpadded warning.
1745 This is not helpful; suppress it. */
1746 int save_warn_padded = warn_padded;
1747 warn_padded = 0;
1748 layout_type (root->frame_type);
1749 warn_padded = save_warn_padded;
1750 layout_decl (root->frame_decl, 0);
1751 }
1752
1753 /* If any parameters were referenced non-locally, then we need to
1754 insert a copy. Likewise, if any variables were referenced by
1755 pointer, we need to initialize the address. */
1756 if (root->any_parm_remapped)
1757 {
1758 tree p;
1759 for (p = DECL_ARGUMENTS (context); p ; p = TREE_CHAIN (p))
1760 {
1761 tree field, x, y;
1762
1763 field = lookup_field_for_decl (root, p, NO_INSERT);
1764 if (!field)
1765 continue;
1766
1767 if (use_pointer_in_frame (p))
1768 x = build_addr (p, context);
1769 else
1770 x = p;
1771
1772 y = build3 (COMPONENT_REF, TREE_TYPE (field),
1773 root->frame_decl, field, NULL_TREE);
1774 x = build_gimple_modify_stmt (y, x);
1775 append_to_statement_list (x, &stmt_list);
1776 }
1777 }
1778
1779 /* If a chain_field was created, then it needs to be initialized
1780 from chain_decl. */
1781 if (root->chain_field)
1782 {
1783 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
1784 root->frame_decl, root->chain_field, NULL_TREE);
1785 x = build_gimple_modify_stmt (x, get_chain_decl (root));
1786 append_to_statement_list (x, &stmt_list);
1787 }
1788
1789 /* If trampolines were created, then we need to initialize them. */
1790 if (root->any_tramp_created)
1791 {
1792 struct nesting_info *i;
1793 for (i = root->inner; i ; i = i->next)
1794 {
1795 tree arg1, arg2, arg3, x, field;
1796
1797 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
1798 if (!field)
1799 continue;
1800
1801 if (DECL_NO_STATIC_CHAIN (i->context))
1802 arg3 = null_pointer_node;
1803 else
1804 arg3 = build_addr (root->frame_decl, context);
1805
1806 arg2 = build_addr (i->context, context);
1807
1808 x = build3 (COMPONENT_REF, TREE_TYPE (field),
1809 root->frame_decl, field, NULL_TREE);
1810 arg1 = build_addr (x, context);
1811
1812 x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE];
1813 x = build_call_expr (x, 3, arg1, arg2, arg3);
1814 append_to_statement_list (x, &stmt_list);
1815 }
1816 }
1817
1818 /* If we created initialization statements, insert them. */
1819 if (stmt_list)
1820 {
1821 annotate_all_with_locus (&stmt_list,
1822 DECL_SOURCE_LOCATION (context));
1823 append_to_statement_list (BIND_EXPR_BODY (DECL_SAVED_TREE (context)),
1824 &stmt_list);
1825 BIND_EXPR_BODY (DECL_SAVED_TREE (context)) = stmt_list;
1826 }
1827
1828 /* If a chain_decl was created, then it needs to be registered with
1829 struct function so that it gets initialized from the static chain
1830 register at the beginning of the function. */
1831 sf = DECL_STRUCT_FUNCTION (root->context);
1832 sf->static_chain_decl = root->chain_decl;
1833
1834 /* Similarly for the non-local goto save area. */
1835 if (root->nl_goto_field)
1836 {
1837 sf->nonlocal_goto_save_area
1838 = get_frame_field (root, context, root->nl_goto_field, NULL);
1839 sf->has_nonlocal_label = 1;
1840 }
1841
1842 /* Make sure all new local variables get inserted into the
1843 proper BIND_EXPR. */
1844 if (root->new_local_var_chain)
1845 declare_vars (root->new_local_var_chain, DECL_SAVED_TREE (root->context),
1846 false);
1847 if (root->debug_var_chain)
1848 declare_vars (root->debug_var_chain, DECL_SAVED_TREE (root->context),
1849 true);
1850
1851 /* Dump the translated tree function. */
1852 dump_function (TDI_nested, root->context);
1853 }
1854
1855 static void
1856 finalize_nesting_tree (struct nesting_info *root)
1857 {
1858 do
1859 {
1860 if (root->inner)
1861 finalize_nesting_tree (root->inner);
1862 finalize_nesting_tree_1 (root);
1863 root = root->next;
1864 }
1865 while (root);
1866 }
1867
1868 /* Unnest the nodes and pass them to cgraph. */
1869
1870 static void
1871 unnest_nesting_tree_1 (struct nesting_info *root)
1872 {
1873 struct cgraph_node *node = cgraph_node (root->context);
1874
1875 /* For nested functions update the cgraph to reflect unnesting.
1876 We also delay finalizing of these functions up to this point. */
1877 if (node->origin)
1878 {
1879 cgraph_unnest_node (cgraph_node (root->context));
1880 cgraph_finalize_function (root->context, true);
1881 }
1882 }
1883
1884 static void
1885 unnest_nesting_tree (struct nesting_info *root)
1886 {
1887 do
1888 {
1889 if (root->inner)
1890 unnest_nesting_tree (root->inner);
1891 unnest_nesting_tree_1 (root);
1892 root = root->next;
1893 }
1894 while (root);
1895 }
1896
1897 /* Free the data structures allocated during this pass. */
1898
1899 static void
1900 free_nesting_tree (struct nesting_info *root)
1901 {
1902 struct nesting_info *next;
1903 do
1904 {
1905 if (root->inner)
1906 free_nesting_tree (root->inner);
1907 pointer_map_destroy (root->var_map);
1908 pointer_map_destroy (root->field_map);
1909 next = root->next;
1910 free (root);
1911 root = next;
1912 }
1913 while (root);
1914 }
1915
1916 /* Main entry point for this pass. Process FNDECL and all of its nested
1917 subroutines and turn them into something less tightly bound. */
1918
1919 void
1920 lower_nested_functions (tree fndecl)
1921 {
1922 struct cgraph_node *cgn;
1923 struct nesting_info *root;
1924
1925 /* If there are no nested functions, there's nothing to do. */
1926 cgn = cgraph_node (fndecl);
1927 if (!cgn->nested)
1928 return;
1929
1930 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
1931 root = create_nesting_tree (cgn);
1932 walk_all_functions (convert_nonlocal_reference, root);
1933 walk_all_functions (convert_local_reference, root);
1934 walk_all_functions (convert_nl_goto_reference, root);
1935 walk_all_functions (convert_nl_goto_receiver, root);
1936 convert_all_function_calls (root);
1937 finalize_nesting_tree (root);
1938 unnest_nesting_tree (root);
1939 free_nesting_tree (root);
1940 bitmap_obstack_release (&nesting_info_bitmap_obstack);
1941 }
1942
1943 #include "gt-tree-nested.h"