c++: Fix typo in NON_UNION_CLASS_TYPE_P.
[gcc.git] / gcc / cp / lambda.c
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
6 Copyright (C) 1998-2020 Free Software Foundation, Inc.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "target.h"
34
35 /* Constructor for a lambda expression. */
36
37 tree
38 build_lambda_expr (void)
39 {
40 tree lambda = make_node (LAMBDA_EXPR);
41 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
42 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
43 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
44 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
45 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
46 return lambda;
47 }
48
49 /* Create the closure object for a LAMBDA_EXPR. */
50
51 tree
52 build_lambda_object (tree lambda_expr)
53 {
54 /* Build aggregate constructor call.
55 - cp_parser_braced_list
56 - cp_parser_functional_cast */
57 vec<constructor_elt, va_gc> *elts = NULL;
58 tree node, expr, type;
59 location_t saved_loc;
60
61 if (processing_template_decl || lambda_expr == error_mark_node)
62 return lambda_expr;
63
64 /* Make sure any error messages refer to the lambda-introducer. */
65 saved_loc = input_location;
66 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
67
68 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
69 node;
70 node = TREE_CHAIN (node))
71 {
72 tree field = TREE_PURPOSE (node);
73 tree val = TREE_VALUE (node);
74
75 if (field == error_mark_node)
76 {
77 expr = error_mark_node;
78 goto out;
79 }
80
81 if (TREE_CODE (val) == TREE_LIST)
82 val = build_x_compound_expr_from_list (val, ELK_INIT,
83 tf_warning_or_error);
84
85 if (DECL_P (val))
86 mark_used (val);
87
88 /* Mere mortals can't copy arrays with aggregate initialization, so
89 do some magic to make it work here. */
90 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
91 val = build_array_copy (val);
92 else if (DECL_NORMAL_CAPTURE_P (field)
93 && !DECL_VLA_CAPTURE_P (field)
94 && !TYPE_REF_P (TREE_TYPE (field)))
95 {
96 /* "the entities that are captured by copy are used to
97 direct-initialize each corresponding non-static data
98 member of the resulting closure object."
99
100 There's normally no way to express direct-initialization
101 from an element of a CONSTRUCTOR, so we build up a special
102 TARGET_EXPR to bypass the usual copy-initialization. */
103 val = force_rvalue (val, tf_warning_or_error);
104 if (TREE_CODE (val) == TARGET_EXPR)
105 TARGET_EXPR_DIRECT_INIT_P (val) = true;
106 }
107
108 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
109 }
110
111 expr = build_constructor (init_list_type_node, elts);
112 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
113
114 /* N2927: "[The closure] class type is not an aggregate."
115 But we briefly treat it as an aggregate to make this simpler. */
116 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
117 CLASSTYPE_NON_AGGREGATE (type) = 0;
118 expr = finish_compound_literal (type, expr, tf_warning_or_error);
119 CLASSTYPE_NON_AGGREGATE (type) = 1;
120
121 out:
122 input_location = saved_loc;
123 return expr;
124 }
125
126 /* Return an initialized RECORD_TYPE for LAMBDA.
127 LAMBDA must have its explicit captures already. */
128
129 tree
130 begin_lambda_type (tree lambda)
131 {
132 /* Lambda names are nearly but not quite anonymous. */
133 tree name = make_anon_name ();
134 IDENTIFIER_LAMBDA_P (name) = true;
135
136 /* Create the new RECORD_TYPE for this lambda. */
137 tree type = xref_tag (/*tag_code=*/record_type, name);
138 if (type == error_mark_node)
139 return error_mark_node;
140
141 /* Designate it as a struct so that we can use aggregate initialization. */
142 CLASSTYPE_DECLARED_CLASS (type) = false;
143
144 /* Cross-reference the expression and the type. */
145 LAMBDA_EXPR_CLOSURE (lambda) = type;
146 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
147
148 /* In C++17, assume the closure is literal; we'll clear the flag later if
149 necessary. */
150 if (cxx_dialect >= cxx17)
151 CLASSTYPE_LITERAL_P (type) = true;
152
153 /* Clear base types. */
154 xref_basetypes (type, /*bases=*/NULL_TREE);
155
156 /* Start the class. */
157 type = begin_class_definition (type);
158
159 return type;
160 }
161
162 /* Returns the type to use for the return type of the operator() of a
163 closure class. */
164
165 tree
166 lambda_return_type (tree expr)
167 {
168 if (expr == NULL_TREE)
169 return void_type_node;
170 if (type_unknown_p (expr)
171 || BRACE_ENCLOSED_INITIALIZER_P (expr))
172 {
173 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
174 return error_mark_node;
175 }
176 gcc_checking_assert (!type_dependent_expression_p (expr));
177 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
178 }
179
180 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
181 closure type. */
182
183 tree
184 lambda_function (tree lambda)
185 {
186 tree type;
187 if (TREE_CODE (lambda) == LAMBDA_EXPR)
188 type = LAMBDA_EXPR_CLOSURE (lambda);
189 else
190 type = lambda;
191 gcc_assert (LAMBDA_TYPE_P (type));
192 /* Don't let debug_tree cause instantiation. */
193 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
194 && !COMPLETE_OR_OPEN_TYPE_P (type))
195 return NULL_TREE;
196 lambda = lookup_member (type, call_op_identifier,
197 /*protect=*/0, /*want_type=*/false,
198 tf_warning_or_error);
199 if (lambda)
200 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
201 return lambda;
202 }
203
204 /* Returns the type to use for the FIELD_DECL corresponding to the
205 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
206 C++14 init capture, and BY_REFERENCE_P indicates whether we're
207 capturing by reference. */
208
209 tree
210 lambda_capture_field_type (tree expr, bool explicit_init_p,
211 bool by_reference_p)
212 {
213 tree type;
214 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
215
216 if (!is_this && explicit_init_p)
217 {
218 tree auto_node = make_auto ();
219
220 type = auto_node;
221 if (by_reference_p)
222 /* Add the reference now, so deduction doesn't lose
223 outermost CV qualifiers of EXPR. */
224 type = build_reference_type (type);
225 if (uses_parameter_packs (expr))
226 /* Stick with 'auto' even if the type could be deduced. */;
227 else
228 type = do_auto_deduction (type, expr, auto_node);
229 }
230 else if (!is_this && type_dependent_expression_p (expr))
231 {
232 type = cxx_make_type (DECLTYPE_TYPE);
233 DECLTYPE_TYPE_EXPR (type) = expr;
234 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
235 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
236 SET_TYPE_STRUCTURAL_EQUALITY (type);
237 }
238 else
239 {
240 type = non_reference (unlowered_expr_type (expr));
241
242 if (!is_this
243 && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE))
244 type = build_reference_type (type);
245 }
246
247 return type;
248 }
249
250 /* Returns true iff DECL is a lambda capture proxy variable created by
251 build_capture_proxy. */
252
253 bool
254 is_capture_proxy (tree decl)
255 {
256 return (VAR_P (decl)
257 && DECL_HAS_VALUE_EXPR_P (decl)
258 && !DECL_ANON_UNION_VAR_P (decl)
259 && !DECL_DECOMPOSITION_P (decl)
260 && !DECL_FNAME_P (decl)
261 && !(DECL_ARTIFICIAL (decl)
262 && DECL_LANG_SPECIFIC (decl)
263 && DECL_OMP_PRIVATIZED_MEMBER (decl))
264 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
265 }
266
267 /* Returns true iff DECL is a capture proxy for a normal capture
268 (i.e. without explicit initializer). */
269
270 bool
271 is_normal_capture_proxy (tree decl)
272 {
273 if (!is_capture_proxy (decl))
274 /* It's not a capture proxy. */
275 return false;
276
277 return (DECL_LANG_SPECIFIC (decl)
278 && DECL_CAPTURED_VARIABLE (decl));
279 }
280
281 /* Returns true iff DECL is a capture proxy for a normal capture
282 of a constant variable. */
283
284 bool
285 is_constant_capture_proxy (tree decl)
286 {
287 if (is_normal_capture_proxy (decl))
288 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
289 return false;
290 }
291
292 /* VAR is a capture proxy created by build_capture_proxy; add it to the
293 current function, which is the operator() for the appropriate lambda. */
294
295 void
296 insert_capture_proxy (tree var)
297 {
298 if (is_normal_capture_proxy (var))
299 {
300 tree cap = DECL_CAPTURED_VARIABLE (var);
301 if (CHECKING_P)
302 {
303 gcc_assert (!is_normal_capture_proxy (cap));
304 tree old = retrieve_local_specialization (cap);
305 if (old)
306 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
307 }
308 register_local_specialization (var, cap);
309 }
310
311 /* Put the capture proxy in the extra body block so that it won't clash
312 with a later local variable. */
313 pushdecl_outermost_localscope (var);
314
315 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
316 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
317 tree stmt_list = (*stmt_list_stack)[1];
318 gcc_assert (stmt_list);
319 append_to_statement_list_force (var, &stmt_list);
320 }
321
322 /* We've just finished processing a lambda; if the containing scope is also
323 a lambda, insert any capture proxies that were created while processing
324 the nested lambda. */
325
326 void
327 insert_pending_capture_proxies (void)
328 {
329 tree lam;
330 vec<tree, va_gc> *proxies;
331 unsigned i;
332
333 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
334 return;
335
336 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
337 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
338 for (i = 0; i < vec_safe_length (proxies); ++i)
339 {
340 tree var = (*proxies)[i];
341 insert_capture_proxy (var);
342 }
343 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
344 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
345 }
346
347 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
348 return the type we want the proxy to have: the type of the field itself,
349 with added const-qualification if the lambda isn't mutable and the
350 capture is by value. */
351
352 tree
353 lambda_proxy_type (tree ref)
354 {
355 tree type;
356 if (ref == error_mark_node)
357 return error_mark_node;
358 if (REFERENCE_REF_P (ref))
359 ref = TREE_OPERAND (ref, 0);
360 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
361 type = TREE_TYPE (ref);
362 if (!type || WILDCARD_TYPE_P (non_reference (type)))
363 {
364 type = cxx_make_type (DECLTYPE_TYPE);
365 DECLTYPE_TYPE_EXPR (type) = ref;
366 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
367 SET_TYPE_STRUCTURAL_EQUALITY (type);
368 }
369 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
370 type = make_pack_expansion (type);
371 return type;
372 }
373
374 /* MEMBER is a capture field in a lambda closure class. Now that we're
375 inside the operator(), build a placeholder var for future lookups and
376 debugging. */
377
378 static tree
379 build_capture_proxy (tree member, tree init)
380 {
381 tree var, object, fn, closure, name, lam, type;
382
383 if (PACK_EXPANSION_P (member))
384 member = PACK_EXPANSION_PATTERN (member);
385
386 closure = DECL_CONTEXT (member);
387 fn = lambda_function (closure);
388 lam = CLASSTYPE_LAMBDA_EXPR (closure);
389
390 /* The proxy variable forwards to the capture field. */
391 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
392 object = finish_non_static_data_member (member, object, NULL_TREE);
393 if (REFERENCE_REF_P (object))
394 object = TREE_OPERAND (object, 0);
395
396 /* Remove the __ inserted by add_capture. */
397 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
398
399 type = lambda_proxy_type (object);
400
401 if (name == this_identifier && !INDIRECT_TYPE_P (type))
402 {
403 type = build_pointer_type (type);
404 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
405 object = build_fold_addr_expr_with_type (object, type);
406 }
407
408 if (DECL_VLA_CAPTURE_P (member))
409 {
410 /* Rebuild the VLA type from the pointer and maxindex. */
411 tree field = next_initializable_field (TYPE_FIELDS (type));
412 tree ptr = build_simple_component_ref (object, field);
413 field = next_initializable_field (DECL_CHAIN (field));
414 tree max = build_simple_component_ref (object, field);
415 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
416 build_index_type (max));
417 type = build_reference_type (type);
418 object = convert (type, ptr);
419 }
420
421 complete_type (type);
422
423 var = build_decl (input_location, VAR_DECL, name, type);
424 SET_DECL_VALUE_EXPR (var, object);
425 DECL_HAS_VALUE_EXPR_P (var) = 1;
426 DECL_ARTIFICIAL (var) = 1;
427 TREE_USED (var) = 1;
428 DECL_CONTEXT (var) = fn;
429
430 if (DECL_NORMAL_CAPTURE_P (member))
431 {
432 if (DECL_VLA_CAPTURE_P (member))
433 {
434 init = CONSTRUCTOR_ELT (init, 0)->value;
435 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
436 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
437 }
438 else
439 {
440 if (PACK_EXPANSION_P (init))
441 init = PACK_EXPANSION_PATTERN (init);
442 }
443
444 if (INDIRECT_REF_P (init))
445 init = TREE_OPERAND (init, 0);
446 STRIP_NOPS (init);
447
448 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
449 while (is_normal_capture_proxy (init))
450 init = DECL_CAPTURED_VARIABLE (init);
451 retrofit_lang_decl (var);
452 DECL_CAPTURED_VARIABLE (var) = init;
453 }
454
455 if (name == this_identifier)
456 {
457 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
458 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
459 }
460
461 if (fn == current_function_decl)
462 insert_capture_proxy (var);
463 else
464 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
465
466 return var;
467 }
468
469 static GTY(()) tree ptr_id;
470 static GTY(()) tree max_id;
471
472 /* Return a struct containing a pointer and a length for lambda capture of
473 an array of runtime length. */
474
475 static tree
476 vla_capture_type (tree array_type)
477 {
478 tree type = xref_tag (record_type, make_anon_name ());
479 xref_basetypes (type, NULL_TREE);
480 type = begin_class_definition (type);
481 if (!ptr_id)
482 {
483 ptr_id = get_identifier ("ptr");
484 max_id = get_identifier ("max");
485 }
486 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
487 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
488 finish_member_declaration (field);
489 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
490 finish_member_declaration (field);
491 return finish_struct (type, NULL_TREE);
492 }
493
494 /* From an ID and INITIALIZER, create a capture (by reference if
495 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
496 and return it. If ID is `this', BY_REFERENCE_P says whether
497 `*this' is captured by reference. */
498
499 tree
500 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
501 bool explicit_init_p)
502 {
503 char *buf;
504 tree type, member, name;
505 bool vla = false;
506 bool variadic = false;
507 tree initializer = orig_init;
508
509 if (PACK_EXPANSION_P (initializer))
510 {
511 initializer = PACK_EXPANSION_PATTERN (initializer);
512 variadic = true;
513 }
514
515 if (TREE_CODE (initializer) == TREE_LIST
516 /* A pack expansion might end up with multiple elements. */
517 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
518 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
519 tf_warning_or_error);
520 type = TREE_TYPE (initializer);
521 if (type == error_mark_node)
522 return error_mark_node;
523
524 if (!dependent_type_p (type) && array_of_runtime_bound_p (type))
525 {
526 vla = true;
527 if (!by_reference_p)
528 error ("array of runtime bound cannot be captured by copy, "
529 "only by reference");
530
531 /* For a VLA, we capture the address of the first element and the
532 maximum index, and then reconstruct the VLA for the proxy. */
533 tree elt = cp_build_array_ref (input_location, initializer,
534 integer_zero_node, tf_warning_or_error);
535 initializer = build_constructor_va (init_list_type_node, 2,
536 NULL_TREE, build_address (elt),
537 NULL_TREE, array_type_nelts (type));
538 type = vla_capture_type (type);
539 }
540 else if (!dependent_type_p (type)
541 && variably_modified_type_p (type, NULL_TREE))
542 {
543 sorry ("capture of variably-modified type %qT that is not an N3639 array "
544 "of runtime bound", type);
545 if (TREE_CODE (type) == ARRAY_TYPE
546 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
547 inform (input_location, "because the array element type %qT has "
548 "variable size", TREE_TYPE (type));
549 return error_mark_node;
550 }
551 else
552 {
553 type = lambda_capture_field_type (initializer, explicit_init_p,
554 by_reference_p);
555 if (type == error_mark_node)
556 return error_mark_node;
557
558 if (id == this_identifier && !by_reference_p)
559 {
560 gcc_assert (INDIRECT_TYPE_P (type));
561 type = TREE_TYPE (type);
562 initializer = cp_build_fold_indirect_ref (initializer);
563 }
564
565 if (dependent_type_p (type))
566 ;
567 else if (id != this_identifier && by_reference_p)
568 {
569 if (!lvalue_p (initializer))
570 {
571 error ("cannot capture %qE by reference", initializer);
572 return error_mark_node;
573 }
574 }
575 else
576 {
577 /* Capture by copy requires a complete type. */
578 type = complete_type (type);
579 if (!COMPLETE_TYPE_P (type))
580 {
581 error ("capture by copy of incomplete type %qT", type);
582 cxx_incomplete_type_inform (type);
583 return error_mark_node;
584 }
585 else if (!verify_type_context (input_location,
586 TCTX_CAPTURE_BY_COPY, type))
587 return error_mark_node;
588 }
589 }
590
591 /* Add __ to the beginning of the field name so that user code
592 won't find the field with name lookup. We can't just leave the name
593 unset because template instantiation uses the name to find
594 instantiated fields. */
595 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
596 buf[1] = buf[0] = '_';
597 memcpy (buf + 2, IDENTIFIER_POINTER (id),
598 IDENTIFIER_LENGTH (id) + 1);
599 name = get_identifier (buf);
600
601 if (variadic)
602 {
603 type = make_pack_expansion (type);
604 if (explicit_init_p)
605 /* With an explicit initializer 'type' is auto, which isn't really a
606 parameter pack in this context. We will want as many fields as we
607 have elements in the expansion of the initializer, so use its packs
608 instead. */
609 PACK_EXPANSION_PARAMETER_PACKS (type)
610 = uses_parameter_packs (initializer);
611 }
612
613 /* Make member variable. */
614 member = build_decl (input_location, FIELD_DECL, name, type);
615 DECL_VLA_CAPTURE_P (member) = vla;
616
617 if (!explicit_init_p)
618 /* Normal captures are invisible to name lookup but uses are replaced
619 with references to the capture field; we implement this by only
620 really making them invisible in unevaluated context; see
621 qualify_lookup. For now, let's make explicitly initialized captures
622 always visible. */
623 DECL_NORMAL_CAPTURE_P (member) = true;
624
625 if (id == this_identifier)
626 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
627
628 /* Add it to the appropriate closure class if we've started it. */
629 if (current_class_type
630 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
631 {
632 if (COMPLETE_TYPE_P (current_class_type))
633 internal_error ("trying to capture %qD in instantiation of "
634 "generic lambda", id);
635 finish_member_declaration (member);
636 }
637
638 tree listmem = member;
639 if (variadic)
640 {
641 listmem = make_pack_expansion (member);
642 initializer = orig_init;
643 }
644 LAMBDA_EXPR_CAPTURE_LIST (lambda)
645 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
646
647 if (LAMBDA_EXPR_CLOSURE (lambda))
648 return build_capture_proxy (member, initializer);
649 /* For explicit captures we haven't started the function yet, so we wait
650 and build the proxy from cp_parser_lambda_body. */
651 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
652 return NULL_TREE;
653 }
654
655 /* Register all the capture members on the list CAPTURES, which is the
656 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
657
658 void
659 register_capture_members (tree captures)
660 {
661 if (captures == NULL_TREE)
662 return;
663
664 register_capture_members (TREE_CHAIN (captures));
665
666 tree field = TREE_PURPOSE (captures);
667 if (PACK_EXPANSION_P (field))
668 field = PACK_EXPANSION_PATTERN (field);
669
670 finish_member_declaration (field);
671 }
672
673 /* Similar to add_capture, except this works on a stack of nested lambdas.
674 BY_REFERENCE_P in this case is derived from the default capture mode.
675 Returns the capture for the lambda at the bottom of the stack. */
676
677 tree
678 add_default_capture (tree lambda_stack, tree id, tree initializer)
679 {
680 bool this_capture_p = (id == this_identifier);
681 tree var = NULL_TREE;
682 tree saved_class_type = current_class_type;
683
684 for (tree node = lambda_stack;
685 node;
686 node = TREE_CHAIN (node))
687 {
688 tree lambda = TREE_VALUE (node);
689
690 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
691 if (DECL_PACK_P (initializer))
692 initializer = make_pack_expansion (initializer);
693 var = add_capture (lambda,
694 id,
695 initializer,
696 /*by_reference_p=*/
697 (this_capture_p
698 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
699 == CPLD_REFERENCE)),
700 /*explicit_init_p=*/false);
701 initializer = convert_from_reference (var);
702
703 /* Warn about deprecated implicit capture of this via [=]. */
704 if (cxx_dialect >= cxx20
705 && this_capture_p
706 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY)
707 {
708 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
709 "implicit capture of %qE via %<[=]%> is deprecated "
710 "in C++20", this_identifier))
711 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
712 "%<*this%> capture");
713 }
714 }
715
716 current_class_type = saved_class_type;
717
718 return var;
719 }
720
721 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
722 form of an INDIRECT_REF, possibly adding it through default
723 capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative,
724 try to capture but don't complain if we can't. */
725
726 tree
727 lambda_expr_this_capture (tree lambda, int add_capture_p)
728 {
729 tree result;
730
731 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
732
733 /* In unevaluated context this isn't an odr-use, so don't capture. */
734 if (cp_unevaluated_operand)
735 add_capture_p = false;
736
737 /* Try to default capture 'this' if we can. */
738 if (!this_capture)
739 {
740 tree lambda_stack = NULL_TREE;
741 tree init = NULL_TREE;
742
743 /* If we are in a lambda function, we can move out until we hit:
744 1. a non-lambda function or NSDMI,
745 2. a lambda function capturing 'this', or
746 3. a non-default capturing lambda function. */
747 for (tree tlambda = lambda; ;)
748 {
749 if (add_capture_p
750 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
751 /* tlambda won't let us capture 'this'. */
752 break;
753
754 if (add_capture_p)
755 lambda_stack = tree_cons (NULL_TREE,
756 tlambda,
757 lambda_stack);
758
759 tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
760 tree containing_function
761 = decl_function_context (TYPE_NAME (closure));
762
763 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
764 if (ex && TREE_CODE (ex) == FIELD_DECL)
765 {
766 /* Lambda in an NSDMI. We don't have a function to look up
767 'this' in, but we can find (or rebuild) the fake one from
768 inject_this_parameter. */
769 if (!containing_function && !COMPLETE_TYPE_P (closure))
770 /* If we're parsing a lambda in a non-local class,
771 we can find the fake 'this' in scope_chain. */
772 init = scope_chain->x_current_class_ptr;
773 else
774 /* Otherwise it's either gone or buried in
775 function_context_stack, so make another. */
776 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
777 TYPE_UNQUALIFIED);
778 gcc_checking_assert
779 (init && (TREE_TYPE (TREE_TYPE (init))
780 == current_nonlambda_class_type ()));
781 break;
782 }
783
784 if (containing_function == NULL_TREE)
785 /* We ran out of scopes; there's no 'this' to capture. */
786 break;
787
788 if (!LAMBDA_FUNCTION_P (containing_function))
789 {
790 /* We found a non-lambda function. */
791 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
792 /* First parameter is 'this'. */
793 init = DECL_ARGUMENTS (containing_function);
794 break;
795 }
796
797 tlambda
798 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
799
800 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
801 {
802 /* An outer lambda has already captured 'this'. */
803 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
804 break;
805 }
806 }
807
808 if (init)
809 {
810 if (add_capture_p)
811 this_capture = add_default_capture (lambda_stack,
812 /*id=*/this_identifier,
813 init);
814 else
815 this_capture = init;
816 }
817 }
818
819 if (cp_unevaluated_operand)
820 result = this_capture;
821 else if (!this_capture)
822 {
823 if (add_capture_p == 1)
824 {
825 error ("%<this%> was not captured for this lambda function");
826 result = error_mark_node;
827 }
828 else
829 result = NULL_TREE;
830 }
831 else
832 {
833 /* To make sure that current_class_ref is for the lambda. */
834 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
835 == LAMBDA_EXPR_CLOSURE (lambda));
836
837 result = this_capture;
838
839 /* If 'this' is captured, each use of 'this' is transformed into an
840 access to the corresponding unnamed data member of the closure
841 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
842 ensures that the transformed expression is an rvalue. ] */
843 result = rvalue (result);
844 }
845
846 return result;
847 }
848
849 /* Return the innermost LAMBDA_EXPR we're currently in, if any. */
850
851 tree
852 current_lambda_expr (void)
853 {
854 tree type = current_class_type;
855 while (type && !LAMBDA_TYPE_P (type))
856 type = decl_type_context (TYPE_NAME (type));
857 if (type)
858 return CLASSTYPE_LAMBDA_EXPR (type);
859 else
860 return NULL_TREE;
861 }
862
863 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
864 object. NULL otherwise.. */
865
866 static tree
867 resolvable_dummy_lambda (tree object)
868 {
869 if (!is_dummy_object (object))
870 return NULL_TREE;
871
872 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
873 gcc_assert (!TYPE_PTR_P (type));
874
875 if (type != current_class_type
876 && current_class_type
877 && LAMBDA_TYPE_P (current_class_type)
878 && lambda_function (current_class_type)
879 && DERIVED_FROM_P (type, nonlambda_method_basetype()))
880 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
881
882 return NULL_TREE;
883 }
884
885 /* We don't want to capture 'this' until we know we need it, i.e. after
886 overload resolution has chosen a non-static member function. At that
887 point we call this function to turn a dummy object into a use of the
888 'this' capture. */
889
890 tree
891 maybe_resolve_dummy (tree object, bool add_capture_p)
892 {
893 if (tree lam = resolvable_dummy_lambda (object))
894 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
895 if (cap != error_mark_node)
896 object = build_fold_indirect_ref (cap);
897
898 return object;
899 }
900
901 /* When parsing a generic lambda containing an argument-dependent
902 member function call we defer overload resolution to instantiation
903 time. But we have to know now whether to capture this or not.
904 Do that if FNS contains any non-static fns.
905 The std doesn't anticipate this case, but I expect this to be the
906 outcome of discussion. */
907
908 void
909 maybe_generic_this_capture (tree object, tree fns)
910 {
911 if (tree lam = resolvable_dummy_lambda (object))
912 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
913 {
914 /* We've not yet captured, so look at the function set of
915 interest. */
916 if (BASELINK_P (fns))
917 fns = BASELINK_FUNCTIONS (fns);
918 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
919 if (id_expr)
920 fns = TREE_OPERAND (fns, 0);
921
922 for (lkp_iterator iter (fns); iter; ++iter)
923 if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
924 || TREE_CODE (*iter) == TEMPLATE_DECL)
925 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
926 {
927 /* Found a non-static member. Capture this. */
928 lambda_expr_this_capture (lam, /*maybe*/-1);
929 break;
930 }
931 }
932 }
933
934 /* Returns the innermost non-lambda function. */
935
936 tree
937 current_nonlambda_function (void)
938 {
939 tree fn = current_function_decl;
940 while (fn && LAMBDA_FUNCTION_P (fn))
941 fn = decl_function_context (fn);
942 return fn;
943 }
944
945 /* Returns the method basetype of the innermost non-lambda function, including
946 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */
947
948 tree
949 nonlambda_method_basetype (void)
950 {
951 if (!current_class_ref)
952 return NULL_TREE;
953
954 tree type = current_class_type;
955 if (!type || !LAMBDA_TYPE_P (type))
956 return type;
957
958 while (true)
959 {
960 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
961 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
962 if (ex && TREE_CODE (ex) == FIELD_DECL)
963 /* Lambda in an NSDMI. */
964 return DECL_CONTEXT (ex);
965
966 tree fn = TYPE_CONTEXT (type);
967 if (!fn || TREE_CODE (fn) != FUNCTION_DECL
968 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
969 /* No enclosing non-lambda method. */
970 return NULL_TREE;
971 if (!LAMBDA_FUNCTION_P (fn))
972 /* Found an enclosing non-lambda method. */
973 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
974 type = DECL_CONTEXT (fn);
975 }
976 }
977
978 /* Like current_scope, but looking through lambdas. */
979
980 tree
981 current_nonlambda_scope (void)
982 {
983 tree scope = current_scope ();
984 for (;;)
985 {
986 if (TREE_CODE (scope) == FUNCTION_DECL
987 && LAMBDA_FUNCTION_P (scope))
988 {
989 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
990 continue;
991 }
992 else if (LAMBDA_TYPE_P (scope))
993 {
994 scope = CP_TYPE_CONTEXT (scope);
995 continue;
996 }
997 break;
998 }
999 return scope;
1000 }
1001
1002 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1003 indicated FN and NARGS, but do not initialize the return type or any of the
1004 argument slots. */
1005
1006 static tree
1007 prepare_op_call (tree fn, int nargs)
1008 {
1009 tree t;
1010
1011 t = build_vl_exp (CALL_EXPR, nargs + 3);
1012 CALL_EXPR_FN (t) = fn;
1013 CALL_EXPR_STATIC_CHAIN (t) = NULL;
1014
1015 return t;
1016 }
1017
1018 /* Return true iff CALLOP is the op() for a generic lambda. */
1019
1020 bool
1021 generic_lambda_fn_p (tree callop)
1022 {
1023 return (LAMBDA_FUNCTION_P (callop)
1024 && DECL_TEMPLATE_INFO (callop)
1025 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1026 }
1027
1028 /* If the closure TYPE has a static op(), also add a conversion to function
1029 pointer. */
1030
1031 void
1032 maybe_add_lambda_conv_op (tree type)
1033 {
1034 bool nested = (cfun != NULL);
1035 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
1036 tree callop = lambda_function (type);
1037 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1038
1039 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1040 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1041 return;
1042
1043 if (processing_template_decl)
1044 return;
1045
1046 bool const generic_lambda_p = generic_lambda_fn_p (callop);
1047
1048 if (!generic_lambda_p && undeduced_auto_decl (callop))
1049 {
1050 /* If the op() wasn't deduced due to errors, give up. */
1051 gcc_assert (errorcount || sorrycount);
1052 return;
1053 }
1054
1055 /* Non-generic non-capturing lambdas only have a conversion function to
1056 pointer to function when the trailing requires-clause's constraints are
1057 satisfied. */
1058 if (!generic_lambda_p && !constraints_satisfied_p (callop))
1059 return;
1060
1061 /* Non-template conversion operators are defined directly with build_call_a
1062 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1063 deferred and the CALL is built in-place. In the case of a deduced return
1064 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1065 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1066 the return expression may differ in flags from those in the body CALL. In
1067 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1068 the body CALL, but not in DECLTYPE_CALL. */
1069
1070 vec<tree, va_gc> *direct_argvec = 0;
1071 tree decltype_call = 0, call = 0;
1072 tree optype = TREE_TYPE (callop);
1073 tree fn_result = TREE_TYPE (optype);
1074
1075 tree thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
1076 if (generic_lambda_p)
1077 {
1078 ++processing_template_decl;
1079
1080 /* Prepare the dependent member call for the static member function
1081 '_FUN' and, potentially, prepare another call to be used in a decltype
1082 return expression for a deduced return call op to allow for simple
1083 implementation of the conversion operator. */
1084
1085 tree instance = cp_build_fold_indirect_ref (thisarg);
1086 tree objfn = lookup_template_function (DECL_NAME (callop),
1087 DECL_TI_ARGS (callop));
1088 objfn = build_min (COMPONENT_REF, NULL_TREE,
1089 instance, objfn, NULL_TREE);
1090 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1091
1092 call = prepare_op_call (objfn, nargs);
1093 if (type_uses_auto (fn_result))
1094 decltype_call = prepare_op_call (objfn, nargs);
1095 }
1096 else
1097 {
1098 direct_argvec = make_tree_vector ();
1099 direct_argvec->quick_push (thisarg);
1100 }
1101
1102 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1103 declare the static member function "_FUN" below. For each arg append to
1104 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1105 call args (for the template case). If a parameter pack is found, expand
1106 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1107
1108 tree fn_args = NULL_TREE;
1109 {
1110 int ix = 0;
1111 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1112 tree tgt = NULL;
1113
1114 while (src)
1115 {
1116 tree new_node = copy_node (src);
1117
1118 /* Clear TREE_ADDRESSABLE on thunk arguments. */
1119 TREE_ADDRESSABLE (new_node) = 0;
1120
1121 if (!fn_args)
1122 fn_args = tgt = new_node;
1123 else
1124 {
1125 TREE_CHAIN (tgt) = new_node;
1126 tgt = new_node;
1127 }
1128
1129 mark_exp_read (tgt);
1130
1131 if (generic_lambda_p)
1132 {
1133 tree a = tgt;
1134 if (DECL_PACK_P (tgt))
1135 {
1136 a = make_pack_expansion (a);
1137 PACK_EXPANSION_LOCAL_P (a) = true;
1138 }
1139 CALL_EXPR_ARG (call, ix) = a;
1140
1141 if (decltype_call)
1142 {
1143 /* Avoid capturing variables in this context. */
1144 ++cp_unevaluated_operand;
1145 CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1146 --cp_unevaluated_operand;
1147 }
1148
1149 ++ix;
1150 }
1151 else
1152 vec_safe_push (direct_argvec, tgt);
1153
1154 src = TREE_CHAIN (src);
1155 }
1156 }
1157
1158 if (generic_lambda_p)
1159 {
1160 if (decltype_call)
1161 {
1162 fn_result = finish_decltype_type
1163 (decltype_call, /*id_expression_or_member_access_p=*/false,
1164 tf_warning_or_error);
1165 }
1166 }
1167 else
1168 call = build_call_a (callop,
1169 direct_argvec->length (),
1170 direct_argvec->address ());
1171
1172 CALL_FROM_THUNK_P (call) = 1;
1173 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1174
1175 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1176 stattype = (cp_build_type_attribute_variant
1177 (stattype, TYPE_ATTRIBUTES (optype)));
1178 if (flag_noexcept_type
1179 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1180 stattype = build_exception_variant (stattype, noexcept_true_spec);
1181
1182 if (generic_lambda_p)
1183 --processing_template_decl;
1184
1185 /* First build up the conversion op. */
1186
1187 tree rettype = build_pointer_type (stattype);
1188 tree name = make_conv_op_name (rettype);
1189 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1190 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1191 /* DR 1722: The conversion function should be noexcept. */
1192 fntype = build_exception_variant (fntype, noexcept_true_spec);
1193 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1194 SET_DECL_LANGUAGE (convfn, lang_cplusplus);
1195 tree fn = convfn;
1196 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1197 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1198 grokclassfn (type, fn, NO_SPECIAL);
1199 set_linkage_according_to_type (type, fn);
1200 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1201 DECL_IN_AGGR_P (fn) = 1;
1202 DECL_ARTIFICIAL (fn) = 1;
1203 DECL_NOT_REALLY_EXTERN (fn) = 1;
1204 DECL_DECLARED_INLINE_P (fn) = 1;
1205 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1206 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1207 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1208 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1209
1210 if (nested_def)
1211 DECL_INTERFACE_KNOWN (fn) = 1;
1212
1213 if (generic_lambda_p)
1214 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1215
1216 add_method (type, fn, false);
1217
1218 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1219 the conversion op is used. */
1220 if (varargs_function_p (callop))
1221 {
1222 DECL_DELETED_FN (fn) = 1;
1223 return;
1224 }
1225
1226 /* Now build up the thunk to be returned. */
1227
1228 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
1229 SET_DECL_LANGUAGE (statfn, lang_cplusplus);
1230 fn = statfn;
1231 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1232 grokclassfn (type, fn, NO_SPECIAL);
1233 set_linkage_according_to_type (type, fn);
1234 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1235 DECL_IN_AGGR_P (fn) = 1;
1236 DECL_ARTIFICIAL (fn) = 1;
1237 DECL_NOT_REALLY_EXTERN (fn) = 1;
1238 DECL_DECLARED_INLINE_P (fn) = 1;
1239 DECL_STATIC_FUNCTION_P (fn) = 1;
1240 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1241 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1242 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1243 DECL_ARGUMENTS (fn) = fn_args;
1244 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1245 {
1246 /* Avoid duplicate -Wshadow warnings. */
1247 DECL_NAME (arg) = NULL_TREE;
1248 DECL_CONTEXT (arg) = fn;
1249 }
1250 if (nested_def)
1251 DECL_INTERFACE_KNOWN (fn) = 1;
1252
1253 if (generic_lambda_p)
1254 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1255
1256 if (flag_sanitize & SANITIZE_NULL)
1257 /* Don't UBsan this function; we're deliberately calling op() with a null
1258 object argument. */
1259 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1260
1261 add_method (type, fn, false);
1262
1263 if (nested)
1264 push_function_context ();
1265 else
1266 /* Still increment function_depth so that we don't GC in the
1267 middle of an expression. */
1268 ++function_depth;
1269
1270 /* Generate the body of the thunk. */
1271
1272 start_preparsed_function (statfn, NULL_TREE,
1273 SF_PRE_PARSED | SF_INCLASS_INLINE);
1274 tree body = begin_function_body ();
1275 tree compound_stmt = begin_compound_stmt (0);
1276 if (!generic_lambda_p)
1277 {
1278 set_flags_from_callee (call);
1279 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1280 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1281 }
1282 call = convert_from_reference (call);
1283 finish_return_stmt (call);
1284
1285 finish_compound_stmt (compound_stmt);
1286 finish_function_body (body);
1287
1288 fn = finish_function (/*inline_p=*/true);
1289 if (!generic_lambda_p)
1290 expand_or_defer_fn (fn);
1291
1292 /* Generate the body of the conversion op. */
1293
1294 start_preparsed_function (convfn, NULL_TREE,
1295 SF_PRE_PARSED | SF_INCLASS_INLINE);
1296 body = begin_function_body ();
1297 compound_stmt = begin_compound_stmt (0);
1298
1299 /* decl_needed_p needs to see that it's used. */
1300 TREE_USED (statfn) = 1;
1301 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1302
1303 finish_compound_stmt (compound_stmt);
1304 finish_function_body (body);
1305
1306 fn = finish_function (/*inline_p=*/true);
1307 if (!generic_lambda_p)
1308 expand_or_defer_fn (fn);
1309
1310 if (nested)
1311 pop_function_context ();
1312 else
1313 --function_depth;
1314 }
1315
1316 /* True if FN is the static function "_FUN" that gets returned from the lambda
1317 conversion operator. */
1318
1319 bool
1320 lambda_static_thunk_p (tree fn)
1321 {
1322 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1323 && DECL_ARTIFICIAL (fn)
1324 && DECL_STATIC_FUNCTION_P (fn)
1325 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1326 }
1327
1328 bool
1329 call_from_lambda_thunk_p (tree call)
1330 {
1331 return (CALL_FROM_THUNK_P (call)
1332 && lambda_static_thunk_p (current_function_decl));
1333 }
1334
1335 /* Returns true iff VAL is a lambda-related declaration which should
1336 be ignored by unqualified lookup. */
1337
1338 bool
1339 is_lambda_ignored_entity (tree val)
1340 {
1341 /* Look past normal, non-VLA capture proxies. */
1342 if (is_normal_capture_proxy (val)
1343 && !variably_modified_type_p (TREE_TYPE (val), NULL_TREE))
1344 return true;
1345
1346 /* Always ignore lambda fields, their names are only for debugging. */
1347 if (TREE_CODE (val) == FIELD_DECL
1348 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1349 return true;
1350
1351 /* None of the lookups that use qualify_lookup want the op() from the
1352 lambda; they want the one from the enclosing class. */
1353 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1354 return true;
1355
1356 return false;
1357 }
1358
1359 /* Lambdas that appear in variable initializer or default argument scope
1360 get that in their mangling, so we need to record it. We might as well
1361 use the count for function and namespace scopes as well. */
1362 static GTY(()) tree lambda_scope;
1363 static GTY(()) int lambda_count;
1364 struct GTY(()) tree_int
1365 {
1366 tree t;
1367 int i;
1368 };
1369 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1370
1371 void
1372 start_lambda_scope (tree decl)
1373 {
1374 tree_int ti;
1375 gcc_assert (decl);
1376 /* Once we're inside a function, we ignore variable scope and just push
1377 the function again so that popping works properly. */
1378 if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1379 decl = current_function_decl;
1380 ti.t = lambda_scope;
1381 ti.i = lambda_count;
1382 vec_safe_push (lambda_scope_stack, ti);
1383 if (lambda_scope != decl)
1384 {
1385 /* Don't reset the count if we're still in the same function. */
1386 lambda_scope = decl;
1387 lambda_count = 0;
1388 }
1389 }
1390
1391 void
1392 record_lambda_scope (tree lambda)
1393 {
1394 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1395 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1396 }
1397
1398 /* This lambda is an instantiation of a lambda in a template default argument
1399 that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either. But we do
1400 need to use and increment the global count to avoid collisions. */
1401
1402 void
1403 record_null_lambda_scope (tree lambda)
1404 {
1405 if (vec_safe_is_empty (lambda_scope_stack))
1406 record_lambda_scope (lambda);
1407 else
1408 {
1409 tree_int *p = lambda_scope_stack->begin();
1410 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t;
1411 LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++;
1412 }
1413 gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE);
1414 }
1415
1416 void
1417 finish_lambda_scope (void)
1418 {
1419 tree_int *p = &lambda_scope_stack->last ();
1420 if (lambda_scope != p->t)
1421 {
1422 lambda_scope = p->t;
1423 lambda_count = p->i;
1424 }
1425 lambda_scope_stack->pop ();
1426 }
1427
1428 tree
1429 start_lambda_function (tree fco, tree lambda_expr)
1430 {
1431 /* Let the front end know that we are going to be defining this
1432 function. */
1433 start_preparsed_function (fco,
1434 NULL_TREE,
1435 SF_PRE_PARSED | SF_INCLASS_INLINE);
1436
1437 tree body = begin_function_body ();
1438
1439 /* Push the proxies for any explicit captures. */
1440 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1441 cap = TREE_CHAIN (cap))
1442 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1443
1444 return body;
1445 }
1446
1447 /* Subroutine of prune_lambda_captures: CAP is a node in
1448 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we
1449 might optimize away the capture, or NULL_TREE if there is no such
1450 variable. */
1451
1452 static tree
1453 var_to_maybe_prune (tree cap)
1454 {
1455 if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1456 /* Don't prune explicit captures. */
1457 return NULL_TREE;
1458
1459 tree mem = TREE_PURPOSE (cap);
1460 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1461 /* Packs and init-captures aren't captures of constant vars. */
1462 return NULL_TREE;
1463
1464 tree init = TREE_VALUE (cap);
1465 if (is_normal_capture_proxy (init))
1466 init = DECL_CAPTURED_VARIABLE (init);
1467 if (decl_constant_var_p (init))
1468 return init;
1469
1470 return NULL_TREE;
1471 }
1472
1473 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1474 for constant variables are actually used in the lambda body.
1475
1476 There will always be a DECL_EXPR for the capture proxy; remember it when we
1477 see it, but replace it with any other use. */
1478
1479 static tree
1480 mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1481 {
1482 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1483
1484 tree var = NULL_TREE;
1485 if (TREE_CODE (*t) == DECL_EXPR)
1486 {
1487 tree decl = DECL_EXPR_DECL (*t);
1488 if (is_constant_capture_proxy (decl))
1489 {
1490 var = DECL_CAPTURED_VARIABLE (decl);
1491 *walk_subtrees = 0;
1492 }
1493 }
1494 else if (is_constant_capture_proxy (*t))
1495 var = DECL_CAPTURED_VARIABLE (*t);
1496
1497 if (var)
1498 {
1499 tree *&slot = const_vars.get_or_insert (var);
1500 if (!slot || VAR_P (*t))
1501 slot = t;
1502 }
1503
1504 return NULL_TREE;
1505 }
1506
1507 /* We're at the end of processing a lambda; go back and remove any captures of
1508 constant variables for which we've folded away all uses. */
1509
1510 static void
1511 prune_lambda_captures (tree body)
1512 {
1513 tree lam = current_lambda_expr ();
1514 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1515 /* No uses were optimized away. */
1516 return;
1517 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1518 /* No default captures, and we don't prune explicit captures. */
1519 return;
1520
1521 hash_map<tree,tree*> const_vars;
1522
1523 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1524
1525 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1526 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1527 {
1528 tree cap = *capp;
1529 if (tree var = var_to_maybe_prune (cap))
1530 {
1531 tree **use = const_vars.get (var);
1532 if (use && TREE_CODE (**use) == DECL_EXPR)
1533 {
1534 /* All uses of this capture were folded away, leaving only the
1535 proxy declaration. */
1536
1537 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */
1538 *capp = TREE_CHAIN (cap);
1539
1540 /* And out of TYPE_FIELDS. */
1541 tree field = TREE_PURPOSE (cap);
1542 while (*fieldp != field)
1543 fieldp = &DECL_CHAIN (*fieldp);
1544 *fieldp = DECL_CHAIN (*fieldp);
1545
1546 /* And remove the capture proxy declaration. */
1547 **use = void_node;
1548 continue;
1549 }
1550 }
1551
1552 capp = &TREE_CHAIN (cap);
1553 }
1554 }
1555
1556 void
1557 finish_lambda_function (tree body)
1558 {
1559 finish_function_body (body);
1560
1561 prune_lambda_captures (body);
1562
1563 /* Finish the function and generate code for it if necessary. */
1564 tree fn = finish_function (/*inline_p=*/true);
1565
1566 /* Only expand if the call op is not a template. */
1567 if (!DECL_TEMPLATE_INFO (fn))
1568 expand_or_defer_fn (fn);
1569 }
1570
1571 #include "gt-cp-lambda.h"