C++20 NB CA378 - Remove constrained non-template functions.
[gcc.git] / gcc / cp / lambda.c
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
6 Copyright (C) 1998-2019 Free Software Foundation, Inc.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33
34 /* Constructor for a lambda expression. */
35
36 tree
37 build_lambda_expr (void)
38 {
39 tree lambda = make_node (LAMBDA_EXPR);
40 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
41 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
42 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
43 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
44 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
45 return lambda;
46 }
47
48 /* Create the closure object for a LAMBDA_EXPR. */
49
50 tree
51 build_lambda_object (tree lambda_expr)
52 {
53 /* Build aggregate constructor call.
54 - cp_parser_braced_list
55 - cp_parser_functional_cast */
56 vec<constructor_elt, va_gc> *elts = NULL;
57 tree node, expr, type;
58 location_t saved_loc;
59
60 if (processing_template_decl || lambda_expr == error_mark_node)
61 return lambda_expr;
62
63 /* Make sure any error messages refer to the lambda-introducer. */
64 saved_loc = input_location;
65 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
66
67 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
68 node;
69 node = TREE_CHAIN (node))
70 {
71 tree field = TREE_PURPOSE (node);
72 tree val = TREE_VALUE (node);
73
74 if (field == error_mark_node)
75 {
76 expr = error_mark_node;
77 goto out;
78 }
79
80 if (TREE_CODE (val) == TREE_LIST)
81 val = build_x_compound_expr_from_list (val, ELK_INIT,
82 tf_warning_or_error);
83
84 if (DECL_P (val))
85 mark_used (val);
86
87 /* Mere mortals can't copy arrays with aggregate initialization, so
88 do some magic to make it work here. */
89 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
90 val = build_array_copy (val);
91 else if (DECL_NORMAL_CAPTURE_P (field)
92 && !DECL_VLA_CAPTURE_P (field)
93 && !TYPE_REF_P (TREE_TYPE (field)))
94 {
95 /* "the entities that are captured by copy are used to
96 direct-initialize each corresponding non-static data
97 member of the resulting closure object."
98
99 There's normally no way to express direct-initialization
100 from an element of a CONSTRUCTOR, so we build up a special
101 TARGET_EXPR to bypass the usual copy-initialization. */
102 val = force_rvalue (val, tf_warning_or_error);
103 if (TREE_CODE (val) == TARGET_EXPR)
104 TARGET_EXPR_DIRECT_INIT_P (val) = true;
105 }
106
107 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
108 }
109
110 expr = build_constructor (init_list_type_node, elts);
111 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
112
113 /* N2927: "[The closure] class type is not an aggregate."
114 But we briefly treat it as an aggregate to make this simpler. */
115 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
116 CLASSTYPE_NON_AGGREGATE (type) = 0;
117 expr = finish_compound_literal (type, expr, tf_warning_or_error);
118 CLASSTYPE_NON_AGGREGATE (type) = 1;
119
120 out:
121 input_location = saved_loc;
122 return expr;
123 }
124
125 /* Return an initialized RECORD_TYPE for LAMBDA.
126 LAMBDA must have its explicit captures already. */
127
128 tree
129 begin_lambda_type (tree lambda)
130 {
131 /* Lambda names are nearly but not quite anonymous. */
132 tree name = make_anon_name ();
133 IDENTIFIER_LAMBDA_P (name) = true;
134
135 /* Create the new RECORD_TYPE for this lambda. */
136 tree type = xref_tag (/*tag_code=*/record_type, name,
137 /*scope=*/ts_lambda, /*template_header_p=*/false);
138 if (type == error_mark_node)
139 return error_mark_node;
140
141 /* Designate it as a struct so that we can use aggregate initialization. */
142 CLASSTYPE_DECLARED_CLASS (type) = false;
143
144 /* Cross-reference the expression and the type. */
145 LAMBDA_EXPR_CLOSURE (lambda) = type;
146 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
147
148 /* In C++17, assume the closure is literal; we'll clear the flag later if
149 necessary. */
150 if (cxx_dialect >= cxx17)
151 CLASSTYPE_LITERAL_P (type) = true;
152
153 /* Clear base types. */
154 xref_basetypes (type, /*bases=*/NULL_TREE);
155
156 /* Start the class. */
157 type = begin_class_definition (type);
158
159 return type;
160 }
161
162 /* Returns the type to use for the return type of the operator() of a
163 closure class. */
164
165 tree
166 lambda_return_type (tree expr)
167 {
168 if (expr == NULL_TREE)
169 return void_type_node;
170 if (type_unknown_p (expr)
171 || BRACE_ENCLOSED_INITIALIZER_P (expr))
172 {
173 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
174 return error_mark_node;
175 }
176 gcc_checking_assert (!type_dependent_expression_p (expr));
177 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
178 }
179
180 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
181 closure type. */
182
183 tree
184 lambda_function (tree lambda)
185 {
186 tree type;
187 if (TREE_CODE (lambda) == LAMBDA_EXPR)
188 type = LAMBDA_EXPR_CLOSURE (lambda);
189 else
190 type = lambda;
191 gcc_assert (LAMBDA_TYPE_P (type));
192 /* Don't let debug_tree cause instantiation. */
193 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
194 && !COMPLETE_OR_OPEN_TYPE_P (type))
195 return NULL_TREE;
196 lambda = lookup_member (type, call_op_identifier,
197 /*protect=*/0, /*want_type=*/false,
198 tf_warning_or_error);
199 if (lambda)
200 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
201 return lambda;
202 }
203
204 /* Returns the type to use for the FIELD_DECL corresponding to the
205 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
206 C++14 init capture, and BY_REFERENCE_P indicates whether we're
207 capturing by reference. */
208
209 tree
210 lambda_capture_field_type (tree expr, bool explicit_init_p,
211 bool by_reference_p)
212 {
213 tree type;
214 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
215
216 if (!is_this && explicit_init_p)
217 {
218 tree auto_node = make_auto ();
219
220 type = auto_node;
221 if (by_reference_p)
222 /* Add the reference now, so deduction doesn't lose
223 outermost CV qualifiers of EXPR. */
224 type = build_reference_type (type);
225 type = do_auto_deduction (type, expr, auto_node);
226 }
227 else if (!is_this && type_dependent_expression_p (expr))
228 {
229 type = cxx_make_type (DECLTYPE_TYPE);
230 DECLTYPE_TYPE_EXPR (type) = expr;
231 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
232 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
233 SET_TYPE_STRUCTURAL_EQUALITY (type);
234 }
235 else
236 {
237 type = non_reference (unlowered_expr_type (expr));
238
239 if (!is_this
240 && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE))
241 type = build_reference_type (type);
242 }
243
244 return type;
245 }
246
247 /* Returns true iff DECL is a lambda capture proxy variable created by
248 build_capture_proxy. */
249
250 bool
251 is_capture_proxy (tree decl)
252 {
253 return (VAR_P (decl)
254 && DECL_HAS_VALUE_EXPR_P (decl)
255 && !DECL_ANON_UNION_VAR_P (decl)
256 && !DECL_DECOMPOSITION_P (decl)
257 && !DECL_FNAME_P (decl)
258 && !(DECL_ARTIFICIAL (decl)
259 && DECL_LANG_SPECIFIC (decl)
260 && DECL_OMP_PRIVATIZED_MEMBER (decl))
261 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
262 }
263
264 /* Returns true iff DECL is a capture proxy for a normal capture
265 (i.e. without explicit initializer). */
266
267 bool
268 is_normal_capture_proxy (tree decl)
269 {
270 if (!is_capture_proxy (decl))
271 /* It's not a capture proxy. */
272 return false;
273
274 return (DECL_LANG_SPECIFIC (decl)
275 && DECL_CAPTURED_VARIABLE (decl));
276 }
277
278 /* Returns true iff DECL is a capture proxy for a normal capture
279 of a constant variable. */
280
281 bool
282 is_constant_capture_proxy (tree decl)
283 {
284 if (is_normal_capture_proxy (decl))
285 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
286 return false;
287 }
288
289 /* VAR is a capture proxy created by build_capture_proxy; add it to the
290 current function, which is the operator() for the appropriate lambda. */
291
292 void
293 insert_capture_proxy (tree var)
294 {
295 if (is_normal_capture_proxy (var))
296 {
297 tree cap = DECL_CAPTURED_VARIABLE (var);
298 if (CHECKING_P)
299 {
300 gcc_assert (!is_normal_capture_proxy (cap));
301 tree old = retrieve_local_specialization (cap);
302 if (old)
303 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
304 }
305 register_local_specialization (var, cap);
306 }
307
308 /* Put the capture proxy in the extra body block so that it won't clash
309 with a later local variable. */
310 pushdecl_outermost_localscope (var);
311
312 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
313 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
314 tree stmt_list = (*stmt_list_stack)[1];
315 gcc_assert (stmt_list);
316 append_to_statement_list_force (var, &stmt_list);
317 }
318
319 /* We've just finished processing a lambda; if the containing scope is also
320 a lambda, insert any capture proxies that were created while processing
321 the nested lambda. */
322
323 void
324 insert_pending_capture_proxies (void)
325 {
326 tree lam;
327 vec<tree, va_gc> *proxies;
328 unsigned i;
329
330 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
331 return;
332
333 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
334 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
335 for (i = 0; i < vec_safe_length (proxies); ++i)
336 {
337 tree var = (*proxies)[i];
338 insert_capture_proxy (var);
339 }
340 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
341 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
342 }
343
344 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
345 return the type we want the proxy to have: the type of the field itself,
346 with added const-qualification if the lambda isn't mutable and the
347 capture is by value. */
348
349 tree
350 lambda_proxy_type (tree ref)
351 {
352 tree type;
353 if (ref == error_mark_node)
354 return error_mark_node;
355 if (REFERENCE_REF_P (ref))
356 ref = TREE_OPERAND (ref, 0);
357 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
358 type = TREE_TYPE (ref);
359 if (!type || WILDCARD_TYPE_P (non_reference (type)))
360 {
361 type = cxx_make_type (DECLTYPE_TYPE);
362 DECLTYPE_TYPE_EXPR (type) = ref;
363 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
364 SET_TYPE_STRUCTURAL_EQUALITY (type);
365 }
366 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
367 type = make_pack_expansion (type);
368 return type;
369 }
370
371 /* MEMBER is a capture field in a lambda closure class. Now that we're
372 inside the operator(), build a placeholder var for future lookups and
373 debugging. */
374
375 static tree
376 build_capture_proxy (tree member, tree init)
377 {
378 tree var, object, fn, closure, name, lam, type;
379
380 if (PACK_EXPANSION_P (member))
381 member = PACK_EXPANSION_PATTERN (member);
382
383 closure = DECL_CONTEXT (member);
384 fn = lambda_function (closure);
385 lam = CLASSTYPE_LAMBDA_EXPR (closure);
386
387 /* The proxy variable forwards to the capture field. */
388 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
389 object = finish_non_static_data_member (member, object, NULL_TREE);
390 if (REFERENCE_REF_P (object))
391 object = TREE_OPERAND (object, 0);
392
393 /* Remove the __ inserted by add_capture. */
394 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
395
396 type = lambda_proxy_type (object);
397
398 if (name == this_identifier && !INDIRECT_TYPE_P (type))
399 {
400 type = build_pointer_type (type);
401 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
402 object = build_fold_addr_expr_with_type (object, type);
403 }
404
405 if (DECL_VLA_CAPTURE_P (member))
406 {
407 /* Rebuild the VLA type from the pointer and maxindex. */
408 tree field = next_initializable_field (TYPE_FIELDS (type));
409 tree ptr = build_simple_component_ref (object, field);
410 field = next_initializable_field (DECL_CHAIN (field));
411 tree max = build_simple_component_ref (object, field);
412 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
413 build_index_type (max));
414 type = build_reference_type (type);
415 object = convert (type, ptr);
416 }
417
418 complete_type (type);
419
420 var = build_decl (input_location, VAR_DECL, name, type);
421 SET_DECL_VALUE_EXPR (var, object);
422 DECL_HAS_VALUE_EXPR_P (var) = 1;
423 DECL_ARTIFICIAL (var) = 1;
424 TREE_USED (var) = 1;
425 DECL_CONTEXT (var) = fn;
426
427 if (DECL_NORMAL_CAPTURE_P (member))
428 {
429 if (DECL_VLA_CAPTURE_P (member))
430 {
431 init = CONSTRUCTOR_ELT (init, 0)->value;
432 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
433 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
434 }
435 else
436 {
437 if (PACK_EXPANSION_P (init))
438 init = PACK_EXPANSION_PATTERN (init);
439 }
440
441 if (INDIRECT_REF_P (init))
442 init = TREE_OPERAND (init, 0);
443 STRIP_NOPS (init);
444
445 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
446 while (is_normal_capture_proxy (init))
447 init = DECL_CAPTURED_VARIABLE (init);
448 retrofit_lang_decl (var);
449 DECL_CAPTURED_VARIABLE (var) = init;
450 }
451
452 if (name == this_identifier)
453 {
454 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
455 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
456 }
457
458 if (fn == current_function_decl)
459 insert_capture_proxy (var);
460 else
461 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
462
463 return var;
464 }
465
466 static GTY(()) tree ptr_id;
467 static GTY(()) tree max_id;
468
469 /* Return a struct containing a pointer and a length for lambda capture of
470 an array of runtime length. */
471
472 static tree
473 vla_capture_type (tree array_type)
474 {
475 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
476 xref_basetypes (type, NULL_TREE);
477 type = begin_class_definition (type);
478 if (!ptr_id)
479 {
480 ptr_id = get_identifier ("ptr");
481 max_id = get_identifier ("max");
482 }
483 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
484 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
485 finish_member_declaration (field);
486 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
487 finish_member_declaration (field);
488 return finish_struct (type, NULL_TREE);
489 }
490
491 /* From an ID and INITIALIZER, create a capture (by reference if
492 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
493 and return it. If ID is `this', BY_REFERENCE_P says whether
494 `*this' is captured by reference. */
495
496 tree
497 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
498 bool explicit_init_p)
499 {
500 char *buf;
501 tree type, member, name;
502 bool vla = false;
503 bool variadic = false;
504 tree initializer = orig_init;
505
506 if (PACK_EXPANSION_P (initializer))
507 {
508 initializer = PACK_EXPANSION_PATTERN (initializer);
509 variadic = true;
510 }
511
512 if (TREE_CODE (initializer) == TREE_LIST
513 /* A pack expansion might end up with multiple elements. */
514 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
515 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
516 tf_warning_or_error);
517 type = TREE_TYPE (initializer);
518 if (type == error_mark_node)
519 return error_mark_node;
520
521 if (!dependent_type_p (type) && array_of_runtime_bound_p (type))
522 {
523 vla = true;
524 if (!by_reference_p)
525 error ("array of runtime bound cannot be captured by copy, "
526 "only by reference");
527
528 /* For a VLA, we capture the address of the first element and the
529 maximum index, and then reconstruct the VLA for the proxy. */
530 tree elt = cp_build_array_ref (input_location, initializer,
531 integer_zero_node, tf_warning_or_error);
532 initializer = build_constructor_va (init_list_type_node, 2,
533 NULL_TREE, build_address (elt),
534 NULL_TREE, array_type_nelts (type));
535 type = vla_capture_type (type);
536 }
537 else if (!dependent_type_p (type)
538 && variably_modified_type_p (type, NULL_TREE))
539 {
540 sorry ("capture of variably-modified type %qT that is not an N3639 array "
541 "of runtime bound", type);
542 if (TREE_CODE (type) == ARRAY_TYPE
543 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
544 inform (input_location, "because the array element type %qT has "
545 "variable size", TREE_TYPE (type));
546 return error_mark_node;
547 }
548 else
549 {
550 type = lambda_capture_field_type (initializer, explicit_init_p,
551 by_reference_p);
552 if (type == error_mark_node)
553 return error_mark_node;
554
555 if (id == this_identifier && !by_reference_p)
556 {
557 gcc_assert (INDIRECT_TYPE_P (type));
558 type = TREE_TYPE (type);
559 initializer = cp_build_fold_indirect_ref (initializer);
560 }
561
562 if (dependent_type_p (type))
563 ;
564 else if (id != this_identifier && by_reference_p)
565 {
566 if (!lvalue_p (initializer))
567 {
568 error ("cannot capture %qE by reference", initializer);
569 return error_mark_node;
570 }
571 }
572 else
573 {
574 /* Capture by copy requires a complete type. */
575 type = complete_type (type);
576 if (!COMPLETE_TYPE_P (type))
577 {
578 error ("capture by copy of incomplete type %qT", type);
579 cxx_incomplete_type_inform (type);
580 return error_mark_node;
581 }
582 }
583 }
584
585 /* Add __ to the beginning of the field name so that user code
586 won't find the field with name lookup. We can't just leave the name
587 unset because template instantiation uses the name to find
588 instantiated fields. */
589 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
590 buf[1] = buf[0] = '_';
591 memcpy (buf + 2, IDENTIFIER_POINTER (id),
592 IDENTIFIER_LENGTH (id) + 1);
593 name = get_identifier (buf);
594
595 if (variadic)
596 {
597 type = make_pack_expansion (type);
598 if (explicit_init_p)
599 /* With an explicit initializer 'type' is auto, which isn't really a
600 parameter pack in this context. We will want as many fields as we
601 have elements in the expansion of the initializer, so use its packs
602 instead. */
603 PACK_EXPANSION_PARAMETER_PACKS (type)
604 = uses_parameter_packs (initializer);
605 }
606
607 /* Make member variable. */
608 member = build_decl (input_location, FIELD_DECL, name, type);
609 DECL_VLA_CAPTURE_P (member) = vla;
610
611 if (!explicit_init_p)
612 /* Normal captures are invisible to name lookup but uses are replaced
613 with references to the capture field; we implement this by only
614 really making them invisible in unevaluated context; see
615 qualify_lookup. For now, let's make explicitly initialized captures
616 always visible. */
617 DECL_NORMAL_CAPTURE_P (member) = true;
618
619 if (id == this_identifier)
620 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
621
622 /* Add it to the appropriate closure class if we've started it. */
623 if (current_class_type
624 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
625 {
626 if (COMPLETE_TYPE_P (current_class_type))
627 internal_error ("trying to capture %qD in instantiation of "
628 "generic lambda", id);
629 finish_member_declaration (member);
630 }
631
632 tree listmem = member;
633 if (variadic)
634 {
635 listmem = make_pack_expansion (member);
636 initializer = orig_init;
637 }
638 LAMBDA_EXPR_CAPTURE_LIST (lambda)
639 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
640
641 if (LAMBDA_EXPR_CLOSURE (lambda))
642 return build_capture_proxy (member, initializer);
643 /* For explicit captures we haven't started the function yet, so we wait
644 and build the proxy from cp_parser_lambda_body. */
645 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
646 return NULL_TREE;
647 }
648
649 /* Register all the capture members on the list CAPTURES, which is the
650 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
651
652 void
653 register_capture_members (tree captures)
654 {
655 if (captures == NULL_TREE)
656 return;
657
658 register_capture_members (TREE_CHAIN (captures));
659
660 tree field = TREE_PURPOSE (captures);
661 if (PACK_EXPANSION_P (field))
662 field = PACK_EXPANSION_PATTERN (field);
663
664 finish_member_declaration (field);
665 }
666
667 /* Similar to add_capture, except this works on a stack of nested lambdas.
668 BY_REFERENCE_P in this case is derived from the default capture mode.
669 Returns the capture for the lambda at the bottom of the stack. */
670
671 tree
672 add_default_capture (tree lambda_stack, tree id, tree initializer)
673 {
674 bool this_capture_p = (id == this_identifier);
675 tree var = NULL_TREE;
676 tree saved_class_type = current_class_type;
677
678 for (tree node = lambda_stack;
679 node;
680 node = TREE_CHAIN (node))
681 {
682 tree lambda = TREE_VALUE (node);
683
684 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
685 if (DECL_PACK_P (initializer))
686 initializer = make_pack_expansion (initializer);
687 var = add_capture (lambda,
688 id,
689 initializer,
690 /*by_reference_p=*/
691 (this_capture_p
692 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
693 == CPLD_REFERENCE)),
694 /*explicit_init_p=*/false);
695 initializer = convert_from_reference (var);
696
697 /* Warn about deprecated implicit capture of this via [=]. */
698 if (cxx_dialect >= cxx2a
699 && this_capture_p
700 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY)
701 {
702 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
703 "implicit capture of %qE via %<[=]%> is deprecated "
704 "in C++20", this_identifier))
705 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
706 "%<*this%> capture");
707 }
708 }
709
710 current_class_type = saved_class_type;
711
712 return var;
713 }
714
715 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
716 form of an INDIRECT_REF, possibly adding it through default
717 capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative,
718 try to capture but don't complain if we can't. */
719
720 tree
721 lambda_expr_this_capture (tree lambda, int add_capture_p)
722 {
723 tree result;
724
725 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
726
727 /* In unevaluated context this isn't an odr-use, so don't capture. */
728 if (cp_unevaluated_operand)
729 add_capture_p = false;
730
731 /* Try to default capture 'this' if we can. */
732 if (!this_capture)
733 {
734 tree lambda_stack = NULL_TREE;
735 tree init = NULL_TREE;
736
737 /* If we are in a lambda function, we can move out until we hit:
738 1. a non-lambda function or NSDMI,
739 2. a lambda function capturing 'this', or
740 3. a non-default capturing lambda function. */
741 for (tree tlambda = lambda; ;)
742 {
743 if (add_capture_p
744 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
745 /* tlambda won't let us capture 'this'. */
746 break;
747
748 if (add_capture_p)
749 lambda_stack = tree_cons (NULL_TREE,
750 tlambda,
751 lambda_stack);
752
753 tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
754 tree containing_function
755 = decl_function_context (TYPE_NAME (closure));
756
757 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
758 if (ex && TREE_CODE (ex) == FIELD_DECL)
759 {
760 /* Lambda in an NSDMI. We don't have a function to look up
761 'this' in, but we can find (or rebuild) the fake one from
762 inject_this_parameter. */
763 if (!containing_function && !COMPLETE_TYPE_P (closure))
764 /* If we're parsing a lambda in a non-local class,
765 we can find the fake 'this' in scope_chain. */
766 init = scope_chain->x_current_class_ptr;
767 else
768 /* Otherwise it's either gone or buried in
769 function_context_stack, so make another. */
770 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
771 TYPE_UNQUALIFIED);
772 gcc_checking_assert
773 (init && (TREE_TYPE (TREE_TYPE (init))
774 == current_nonlambda_class_type ()));
775 break;
776 }
777
778 if (containing_function == NULL_TREE)
779 /* We ran out of scopes; there's no 'this' to capture. */
780 break;
781
782 if (!LAMBDA_FUNCTION_P (containing_function))
783 {
784 /* We found a non-lambda function. */
785 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
786 /* First parameter is 'this'. */
787 init = DECL_ARGUMENTS (containing_function);
788 break;
789 }
790
791 tlambda
792 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
793
794 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
795 {
796 /* An outer lambda has already captured 'this'. */
797 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
798 break;
799 }
800 }
801
802 if (init)
803 {
804 if (add_capture_p)
805 this_capture = add_default_capture (lambda_stack,
806 /*id=*/this_identifier,
807 init);
808 else
809 this_capture = init;
810 }
811 }
812
813 if (cp_unevaluated_operand)
814 result = this_capture;
815 else if (!this_capture)
816 {
817 if (add_capture_p == 1)
818 {
819 error ("%<this%> was not captured for this lambda function");
820 result = error_mark_node;
821 }
822 else
823 result = NULL_TREE;
824 }
825 else
826 {
827 /* To make sure that current_class_ref is for the lambda. */
828 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
829 == LAMBDA_EXPR_CLOSURE (lambda));
830
831 result = this_capture;
832
833 /* If 'this' is captured, each use of 'this' is transformed into an
834 access to the corresponding unnamed data member of the closure
835 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
836 ensures that the transformed expression is an rvalue. ] */
837 result = rvalue (result);
838 }
839
840 return result;
841 }
842
843 /* Return the innermost LAMBDA_EXPR we're currently in, if any. */
844
845 tree
846 current_lambda_expr (void)
847 {
848 tree type = current_class_type;
849 while (type && !LAMBDA_TYPE_P (type))
850 type = decl_type_context (TYPE_NAME (type));
851 if (type)
852 return CLASSTYPE_LAMBDA_EXPR (type);
853 else
854 return NULL_TREE;
855 }
856
857 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
858 object. NULL otherwise.. */
859
860 static tree
861 resolvable_dummy_lambda (tree object)
862 {
863 if (!is_dummy_object (object))
864 return NULL_TREE;
865
866 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
867 gcc_assert (!TYPE_PTR_P (type));
868
869 if (type != current_class_type
870 && current_class_type
871 && LAMBDA_TYPE_P (current_class_type)
872 && lambda_function (current_class_type)
873 && DERIVED_FROM_P (type, nonlambda_method_basetype()))
874 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
875
876 return NULL_TREE;
877 }
878
879 /* We don't want to capture 'this' until we know we need it, i.e. after
880 overload resolution has chosen a non-static member function. At that
881 point we call this function to turn a dummy object into a use of the
882 'this' capture. */
883
884 tree
885 maybe_resolve_dummy (tree object, bool add_capture_p)
886 {
887 if (tree lam = resolvable_dummy_lambda (object))
888 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
889 if (cap != error_mark_node)
890 object = build_fold_indirect_ref (cap);
891
892 return object;
893 }
894
895 /* When parsing a generic lambda containing an argument-dependent
896 member function call we defer overload resolution to instantiation
897 time. But we have to know now whether to capture this or not.
898 Do that if FNS contains any non-static fns.
899 The std doesn't anticipate this case, but I expect this to be the
900 outcome of discussion. */
901
902 void
903 maybe_generic_this_capture (tree object, tree fns)
904 {
905 if (tree lam = resolvable_dummy_lambda (object))
906 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
907 {
908 /* We've not yet captured, so look at the function set of
909 interest. */
910 if (BASELINK_P (fns))
911 fns = BASELINK_FUNCTIONS (fns);
912 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
913 if (id_expr)
914 fns = TREE_OPERAND (fns, 0);
915
916 for (lkp_iterator iter (fns); iter; ++iter)
917 if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
918 || TREE_CODE (*iter) == TEMPLATE_DECL)
919 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
920 {
921 /* Found a non-static member. Capture this. */
922 lambda_expr_this_capture (lam, /*maybe*/-1);
923 break;
924 }
925 }
926 }
927
928 /* Returns the innermost non-lambda function. */
929
930 tree
931 current_nonlambda_function (void)
932 {
933 tree fn = current_function_decl;
934 while (fn && LAMBDA_FUNCTION_P (fn))
935 fn = decl_function_context (fn);
936 return fn;
937 }
938
939 /* Returns the method basetype of the innermost non-lambda function, including
940 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */
941
942 tree
943 nonlambda_method_basetype (void)
944 {
945 if (!current_class_ref)
946 return NULL_TREE;
947
948 tree type = current_class_type;
949 if (!type || !LAMBDA_TYPE_P (type))
950 return type;
951
952 while (true)
953 {
954 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
955 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
956 if (ex && TREE_CODE (ex) == FIELD_DECL)
957 /* Lambda in an NSDMI. */
958 return DECL_CONTEXT (ex);
959
960 tree fn = TYPE_CONTEXT (type);
961 if (!fn || TREE_CODE (fn) != FUNCTION_DECL
962 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
963 /* No enclosing non-lambda method. */
964 return NULL_TREE;
965 if (!LAMBDA_FUNCTION_P (fn))
966 /* Found an enclosing non-lambda method. */
967 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
968 type = DECL_CONTEXT (fn);
969 }
970 }
971
972 /* Like current_scope, but looking through lambdas. */
973
974 tree
975 current_nonlambda_scope (void)
976 {
977 tree scope = current_scope ();
978 for (;;)
979 {
980 if (TREE_CODE (scope) == FUNCTION_DECL
981 && LAMBDA_FUNCTION_P (scope))
982 {
983 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
984 continue;
985 }
986 else if (LAMBDA_TYPE_P (scope))
987 {
988 scope = CP_TYPE_CONTEXT (scope);
989 continue;
990 }
991 break;
992 }
993 return scope;
994 }
995
996 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
997 indicated FN and NARGS, but do not initialize the return type or any of the
998 argument slots. */
999
1000 static tree
1001 prepare_op_call (tree fn, int nargs)
1002 {
1003 tree t;
1004
1005 t = build_vl_exp (CALL_EXPR, nargs + 3);
1006 CALL_EXPR_FN (t) = fn;
1007 CALL_EXPR_STATIC_CHAIN (t) = NULL;
1008
1009 return t;
1010 }
1011
1012 /* Return true iff CALLOP is the op() for a generic lambda. */
1013
1014 bool
1015 generic_lambda_fn_p (tree callop)
1016 {
1017 return (LAMBDA_FUNCTION_P (callop)
1018 && DECL_TEMPLATE_INFO (callop)
1019 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1020 }
1021
1022 /* If the closure TYPE has a static op(), also add a conversion to function
1023 pointer. */
1024
1025 void
1026 maybe_add_lambda_conv_op (tree type)
1027 {
1028 bool nested = (cfun != NULL);
1029 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
1030 tree callop = lambda_function (type);
1031 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1032
1033 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1034 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1035 return;
1036
1037 if (processing_template_decl)
1038 return;
1039
1040 bool const generic_lambda_p = generic_lambda_fn_p (callop);
1041
1042 if (!generic_lambda_p && undeduced_auto_decl (callop))
1043 {
1044 /* If the op() wasn't deduced due to errors, give up. */
1045 gcc_assert (errorcount || sorrycount);
1046 return;
1047 }
1048
1049 /* Non-generic non-capturing lambdas only have a conversion function to
1050 pointer to function when the trailing requires-clause's constraints are
1051 satisfied. */
1052 if (!generic_lambda_p && !constraints_satisfied_p (callop))
1053 return;
1054
1055 /* Non-template conversion operators are defined directly with build_call_a
1056 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1057 deferred and the CALL is built in-place. In the case of a deduced return
1058 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1059 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1060 the return expression may differ in flags from those in the body CALL. In
1061 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1062 the body CALL, but not in DECLTYPE_CALL. */
1063
1064 vec<tree, va_gc> *direct_argvec = 0;
1065 tree decltype_call = 0, call = 0;
1066 tree optype = TREE_TYPE (callop);
1067 tree fn_result = TREE_TYPE (optype);
1068
1069 tree thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
1070 if (generic_lambda_p)
1071 {
1072 ++processing_template_decl;
1073
1074 /* Prepare the dependent member call for the static member function
1075 '_FUN' and, potentially, prepare another call to be used in a decltype
1076 return expression for a deduced return call op to allow for simple
1077 implementation of the conversion operator. */
1078
1079 tree instance = cp_build_fold_indirect_ref (thisarg);
1080 tree objfn = lookup_template_function (DECL_NAME (callop),
1081 DECL_TI_ARGS (callop));
1082 objfn = build_min (COMPONENT_REF, NULL_TREE,
1083 instance, objfn, NULL_TREE);
1084 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1085
1086 call = prepare_op_call (objfn, nargs);
1087 if (type_uses_auto (fn_result))
1088 decltype_call = prepare_op_call (objfn, nargs);
1089 }
1090 else
1091 {
1092 direct_argvec = make_tree_vector ();
1093 direct_argvec->quick_push (thisarg);
1094 }
1095
1096 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1097 declare the static member function "_FUN" below. For each arg append to
1098 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1099 call args (for the template case). If a parameter pack is found, expand
1100 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1101
1102 tree fn_args = NULL_TREE;
1103 {
1104 int ix = 0;
1105 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1106 tree tgt = NULL;
1107
1108 while (src)
1109 {
1110 tree new_node = copy_node (src);
1111
1112 /* Clear TREE_ADDRESSABLE on thunk arguments. */
1113 TREE_ADDRESSABLE (new_node) = 0;
1114
1115 if (!fn_args)
1116 fn_args = tgt = new_node;
1117 else
1118 {
1119 TREE_CHAIN (tgt) = new_node;
1120 tgt = new_node;
1121 }
1122
1123 mark_exp_read (tgt);
1124
1125 if (generic_lambda_p)
1126 {
1127 tree a = tgt;
1128 if (DECL_PACK_P (tgt))
1129 {
1130 a = make_pack_expansion (a);
1131 PACK_EXPANSION_LOCAL_P (a) = true;
1132 }
1133 CALL_EXPR_ARG (call, ix) = a;
1134
1135 if (decltype_call)
1136 {
1137 /* Avoid capturing variables in this context. */
1138 ++cp_unevaluated_operand;
1139 CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1140 --cp_unevaluated_operand;
1141 }
1142
1143 ++ix;
1144 }
1145 else
1146 vec_safe_push (direct_argvec, tgt);
1147
1148 src = TREE_CHAIN (src);
1149 }
1150 }
1151
1152 if (generic_lambda_p)
1153 {
1154 if (decltype_call)
1155 {
1156 fn_result = finish_decltype_type
1157 (decltype_call, /*id_expression_or_member_access_p=*/false,
1158 tf_warning_or_error);
1159 }
1160 }
1161 else
1162 call = build_call_a (callop,
1163 direct_argvec->length (),
1164 direct_argvec->address ());
1165
1166 CALL_FROM_THUNK_P (call) = 1;
1167 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1168
1169 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1170 stattype = (cp_build_type_attribute_variant
1171 (stattype, TYPE_ATTRIBUTES (optype)));
1172 if (flag_noexcept_type
1173 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1174 stattype = build_exception_variant (stattype, noexcept_true_spec);
1175
1176 if (generic_lambda_p)
1177 --processing_template_decl;
1178
1179 /* First build up the conversion op. */
1180
1181 tree rettype = build_pointer_type (stattype);
1182 tree name = make_conv_op_name (rettype);
1183 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1184 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1185 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1186 SET_DECL_LANGUAGE (convfn, lang_cplusplus);
1187 tree fn = convfn;
1188 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1189 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1190 grokclassfn (type, fn, NO_SPECIAL);
1191 set_linkage_according_to_type (type, fn);
1192 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1193 DECL_IN_AGGR_P (fn) = 1;
1194 DECL_ARTIFICIAL (fn) = 1;
1195 DECL_NOT_REALLY_EXTERN (fn) = 1;
1196 DECL_DECLARED_INLINE_P (fn) = 1;
1197 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1198 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1199 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1200 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1201
1202 if (nested_def)
1203 DECL_INTERFACE_KNOWN (fn) = 1;
1204
1205 if (generic_lambda_p)
1206 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1207
1208 add_method (type, fn, false);
1209
1210 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1211 the conversion op is used. */
1212 if (varargs_function_p (callop))
1213 {
1214 DECL_DELETED_FN (fn) = 1;
1215 return;
1216 }
1217
1218 /* Now build up the thunk to be returned. */
1219
1220 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
1221 SET_DECL_LANGUAGE (statfn, lang_cplusplus);
1222 fn = statfn;
1223 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1224 grokclassfn (type, fn, NO_SPECIAL);
1225 set_linkage_according_to_type (type, fn);
1226 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1227 DECL_IN_AGGR_P (fn) = 1;
1228 DECL_ARTIFICIAL (fn) = 1;
1229 DECL_NOT_REALLY_EXTERN (fn) = 1;
1230 DECL_DECLARED_INLINE_P (fn) = 1;
1231 DECL_STATIC_FUNCTION_P (fn) = 1;
1232 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1233 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1234 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1235 DECL_ARGUMENTS (fn) = fn_args;
1236 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1237 {
1238 /* Avoid duplicate -Wshadow warnings. */
1239 DECL_NAME (arg) = NULL_TREE;
1240 DECL_CONTEXT (arg) = fn;
1241 }
1242 if (nested_def)
1243 DECL_INTERFACE_KNOWN (fn) = 1;
1244
1245 if (generic_lambda_p)
1246 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1247
1248 if (flag_sanitize & SANITIZE_NULL)
1249 /* Don't UBsan this function; we're deliberately calling op() with a null
1250 object argument. */
1251 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1252
1253 add_method (type, fn, false);
1254
1255 if (nested)
1256 push_function_context ();
1257 else
1258 /* Still increment function_depth so that we don't GC in the
1259 middle of an expression. */
1260 ++function_depth;
1261
1262 /* Generate the body of the thunk. */
1263
1264 start_preparsed_function (statfn, NULL_TREE,
1265 SF_PRE_PARSED | SF_INCLASS_INLINE);
1266 tree body = begin_function_body ();
1267 tree compound_stmt = begin_compound_stmt (0);
1268 if (!generic_lambda_p)
1269 {
1270 set_flags_from_callee (call);
1271 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1272 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1273 }
1274 call = convert_from_reference (call);
1275 finish_return_stmt (call);
1276
1277 finish_compound_stmt (compound_stmt);
1278 finish_function_body (body);
1279
1280 fn = finish_function (/*inline_p=*/true);
1281 if (!generic_lambda_p)
1282 expand_or_defer_fn (fn);
1283
1284 /* Generate the body of the conversion op. */
1285
1286 start_preparsed_function (convfn, NULL_TREE,
1287 SF_PRE_PARSED | SF_INCLASS_INLINE);
1288 body = begin_function_body ();
1289 compound_stmt = begin_compound_stmt (0);
1290
1291 /* decl_needed_p needs to see that it's used. */
1292 TREE_USED (statfn) = 1;
1293 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1294
1295 finish_compound_stmt (compound_stmt);
1296 finish_function_body (body);
1297
1298 fn = finish_function (/*inline_p=*/true);
1299 if (!generic_lambda_p)
1300 expand_or_defer_fn (fn);
1301
1302 if (nested)
1303 pop_function_context ();
1304 else
1305 --function_depth;
1306 }
1307
1308 /* True if FN is the static function "_FUN" that gets returned from the lambda
1309 conversion operator. */
1310
1311 bool
1312 lambda_static_thunk_p (tree fn)
1313 {
1314 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1315 && DECL_ARTIFICIAL (fn)
1316 && DECL_STATIC_FUNCTION_P (fn)
1317 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1318 }
1319
1320 /* Returns true iff VAL is a lambda-related declaration which should
1321 be ignored by unqualified lookup. */
1322
1323 bool
1324 is_lambda_ignored_entity (tree val)
1325 {
1326 /* Look past normal capture proxies. */
1327 if (is_normal_capture_proxy (val))
1328 return true;
1329
1330 /* Always ignore lambda fields, their names are only for debugging. */
1331 if (TREE_CODE (val) == FIELD_DECL
1332 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1333 return true;
1334
1335 /* None of the lookups that use qualify_lookup want the op() from the
1336 lambda; they want the one from the enclosing class. */
1337 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1338 return true;
1339
1340 return false;
1341 }
1342
1343 /* Lambdas that appear in variable initializer or default argument scope
1344 get that in their mangling, so we need to record it. We might as well
1345 use the count for function and namespace scopes as well. */
1346 static GTY(()) tree lambda_scope;
1347 static GTY(()) int lambda_count;
1348 struct GTY(()) tree_int
1349 {
1350 tree t;
1351 int i;
1352 };
1353 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1354
1355 void
1356 start_lambda_scope (tree decl)
1357 {
1358 tree_int ti;
1359 gcc_assert (decl);
1360 /* Once we're inside a function, we ignore variable scope and just push
1361 the function again so that popping works properly. */
1362 if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1363 decl = current_function_decl;
1364 ti.t = lambda_scope;
1365 ti.i = lambda_count;
1366 vec_safe_push (lambda_scope_stack, ti);
1367 if (lambda_scope != decl)
1368 {
1369 /* Don't reset the count if we're still in the same function. */
1370 lambda_scope = decl;
1371 lambda_count = 0;
1372 }
1373 }
1374
1375 void
1376 record_lambda_scope (tree lambda)
1377 {
1378 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1379 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1380 }
1381
1382 /* This lambda is an instantiation of a lambda in a template default argument
1383 that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either. But we do
1384 need to use and increment the global count to avoid collisions. */
1385
1386 void
1387 record_null_lambda_scope (tree lambda)
1388 {
1389 if (vec_safe_is_empty (lambda_scope_stack))
1390 record_lambda_scope (lambda);
1391 else
1392 {
1393 tree_int *p = lambda_scope_stack->begin();
1394 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t;
1395 LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++;
1396 }
1397 gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE);
1398 }
1399
1400 void
1401 finish_lambda_scope (void)
1402 {
1403 tree_int *p = &lambda_scope_stack->last ();
1404 if (lambda_scope != p->t)
1405 {
1406 lambda_scope = p->t;
1407 lambda_count = p->i;
1408 }
1409 lambda_scope_stack->pop ();
1410 }
1411
1412 tree
1413 start_lambda_function (tree fco, tree lambda_expr)
1414 {
1415 /* Let the front end know that we are going to be defining this
1416 function. */
1417 start_preparsed_function (fco,
1418 NULL_TREE,
1419 SF_PRE_PARSED | SF_INCLASS_INLINE);
1420
1421 tree body = begin_function_body ();
1422
1423 /* Push the proxies for any explicit captures. */
1424 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1425 cap = TREE_CHAIN (cap))
1426 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1427
1428 return body;
1429 }
1430
1431 /* Subroutine of prune_lambda_captures: CAP is a node in
1432 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we
1433 might optimize away the capture, or NULL_TREE if there is no such
1434 variable. */
1435
1436 static tree
1437 var_to_maybe_prune (tree cap)
1438 {
1439 if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1440 /* Don't prune explicit captures. */
1441 return NULL_TREE;
1442
1443 tree mem = TREE_PURPOSE (cap);
1444 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1445 /* Packs and init-captures aren't captures of constant vars. */
1446 return NULL_TREE;
1447
1448 tree init = TREE_VALUE (cap);
1449 if (is_normal_capture_proxy (init))
1450 init = DECL_CAPTURED_VARIABLE (init);
1451 if (decl_constant_var_p (init))
1452 return init;
1453
1454 return NULL_TREE;
1455 }
1456
1457 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1458 for constant variables are actually used in the lambda body.
1459
1460 There will always be a DECL_EXPR for the capture proxy; remember it when we
1461 see it, but replace it with any other use. */
1462
1463 static tree
1464 mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1465 {
1466 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1467
1468 tree var = NULL_TREE;
1469 if (TREE_CODE (*t) == DECL_EXPR)
1470 {
1471 tree decl = DECL_EXPR_DECL (*t);
1472 if (is_constant_capture_proxy (decl))
1473 {
1474 var = DECL_CAPTURED_VARIABLE (decl);
1475 *walk_subtrees = 0;
1476 }
1477 }
1478 else if (is_constant_capture_proxy (*t))
1479 var = DECL_CAPTURED_VARIABLE (*t);
1480
1481 if (var)
1482 {
1483 tree *&slot = const_vars.get_or_insert (var);
1484 if (!slot || VAR_P (*t))
1485 slot = t;
1486 }
1487
1488 return NULL_TREE;
1489 }
1490
1491 /* We're at the end of processing a lambda; go back and remove any captures of
1492 constant variables for which we've folded away all uses. */
1493
1494 static void
1495 prune_lambda_captures (tree body)
1496 {
1497 tree lam = current_lambda_expr ();
1498 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1499 /* No uses were optimized away. */
1500 return;
1501 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1502 /* No default captures, and we don't prune explicit captures. */
1503 return;
1504
1505 hash_map<tree,tree*> const_vars;
1506
1507 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1508
1509 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1510 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1511 {
1512 tree cap = *capp;
1513 if (tree var = var_to_maybe_prune (cap))
1514 {
1515 tree **use = const_vars.get (var);
1516 if (use && TREE_CODE (**use) == DECL_EXPR)
1517 {
1518 /* All uses of this capture were folded away, leaving only the
1519 proxy declaration. */
1520
1521 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */
1522 *capp = TREE_CHAIN (cap);
1523
1524 /* And out of TYPE_FIELDS. */
1525 tree field = TREE_PURPOSE (cap);
1526 while (*fieldp != field)
1527 fieldp = &DECL_CHAIN (*fieldp);
1528 *fieldp = DECL_CHAIN (*fieldp);
1529
1530 /* And remove the capture proxy declaration. */
1531 **use = void_node;
1532 continue;
1533 }
1534 }
1535
1536 capp = &TREE_CHAIN (cap);
1537 }
1538 }
1539
1540 void
1541 finish_lambda_function (tree body)
1542 {
1543 finish_function_body (body);
1544
1545 prune_lambda_captures (body);
1546
1547 /* Finish the function and generate code for it if necessary. */
1548 tree fn = finish_function (/*inline_p=*/true);
1549
1550 /* Only expand if the call op is not a template. */
1551 if (!DECL_TEMPLATE_INFO (fn))
1552 expand_or_defer_fn (fn);
1553 }
1554
1555 #include "gt-cp-lambda.h"