c++: Detect deduction guide redeclaration [PR97099]
[gcc.git] / gcc / cp / lambda.c
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
6 Copyright (C) 1998-2020 Free Software Foundation, Inc.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "target.h"
34
35 /* Constructor for a lambda expression. */
36
37 tree
38 build_lambda_expr (void)
39 {
40 tree lambda = make_node (LAMBDA_EXPR);
41 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
42 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
43 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
44 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
45 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
46 return lambda;
47 }
48
49 /* Create the closure object for a LAMBDA_EXPR. */
50
51 tree
52 build_lambda_object (tree lambda_expr)
53 {
54 /* Build aggregate constructor call.
55 - cp_parser_braced_list
56 - cp_parser_functional_cast */
57 vec<constructor_elt, va_gc> *elts = NULL;
58 tree node, expr, type;
59 location_t saved_loc;
60
61 if (processing_template_decl || lambda_expr == error_mark_node)
62 return lambda_expr;
63
64 /* Make sure any error messages refer to the lambda-introducer. */
65 saved_loc = input_location;
66 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
67
68 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
69 node;
70 node = TREE_CHAIN (node))
71 {
72 tree field = TREE_PURPOSE (node);
73 tree val = TREE_VALUE (node);
74
75 if (field == error_mark_node)
76 {
77 expr = error_mark_node;
78 goto out;
79 }
80
81 if (TREE_CODE (val) == TREE_LIST)
82 val = build_x_compound_expr_from_list (val, ELK_INIT,
83 tf_warning_or_error);
84
85 if (DECL_P (val))
86 mark_used (val);
87
88 /* Mere mortals can't copy arrays with aggregate initialization, so
89 do some magic to make it work here. */
90 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
91 val = build_array_copy (val);
92 else if (DECL_NORMAL_CAPTURE_P (field)
93 && !DECL_VLA_CAPTURE_P (field)
94 && !TYPE_REF_P (TREE_TYPE (field)))
95 {
96 /* "the entities that are captured by copy are used to
97 direct-initialize each corresponding non-static data
98 member of the resulting closure object."
99
100 There's normally no way to express direct-initialization
101 from an element of a CONSTRUCTOR, so we build up a special
102 TARGET_EXPR to bypass the usual copy-initialization. */
103 val = force_rvalue (val, tf_warning_or_error);
104 if (TREE_CODE (val) == TARGET_EXPR)
105 TARGET_EXPR_DIRECT_INIT_P (val) = true;
106 }
107
108 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
109 }
110
111 expr = build_constructor (init_list_type_node, elts);
112 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
113
114 /* N2927: "[The closure] class type is not an aggregate."
115 But we briefly treat it as an aggregate to make this simpler. */
116 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
117 CLASSTYPE_NON_AGGREGATE (type) = 0;
118 expr = finish_compound_literal (type, expr, tf_warning_or_error);
119 CLASSTYPE_NON_AGGREGATE (type) = 1;
120
121 out:
122 input_location = saved_loc;
123 return expr;
124 }
125
126 /* Return an initialized RECORD_TYPE for LAMBDA.
127 LAMBDA must have its explicit captures already. */
128
129 tree
130 begin_lambda_type (tree lambda)
131 {
132 /* Lambda names are nearly but not quite anonymous. */
133 tree name = make_anon_name ();
134 IDENTIFIER_LAMBDA_P (name) = true;
135
136 /* Create the new RECORD_TYPE for this lambda. */
137 tree type = xref_tag (/*tag_code=*/record_type, name,
138 /*scope=*/ts_lambda, /*template_header_p=*/false);
139 if (type == error_mark_node)
140 return error_mark_node;
141
142 /* Designate it as a struct so that we can use aggregate initialization. */
143 CLASSTYPE_DECLARED_CLASS (type) = false;
144
145 /* Cross-reference the expression and the type. */
146 LAMBDA_EXPR_CLOSURE (lambda) = type;
147 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
148
149 /* In C++17, assume the closure is literal; we'll clear the flag later if
150 necessary. */
151 if (cxx_dialect >= cxx17)
152 CLASSTYPE_LITERAL_P (type) = true;
153
154 /* Clear base types. */
155 xref_basetypes (type, /*bases=*/NULL_TREE);
156
157 /* Start the class. */
158 type = begin_class_definition (type);
159
160 return type;
161 }
162
163 /* Returns the type to use for the return type of the operator() of a
164 closure class. */
165
166 tree
167 lambda_return_type (tree expr)
168 {
169 if (expr == NULL_TREE)
170 return void_type_node;
171 if (type_unknown_p (expr)
172 || BRACE_ENCLOSED_INITIALIZER_P (expr))
173 {
174 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
175 return error_mark_node;
176 }
177 gcc_checking_assert (!type_dependent_expression_p (expr));
178 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
179 }
180
181 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
182 closure type. */
183
184 tree
185 lambda_function (tree lambda)
186 {
187 tree type;
188 if (TREE_CODE (lambda) == LAMBDA_EXPR)
189 type = LAMBDA_EXPR_CLOSURE (lambda);
190 else
191 type = lambda;
192 gcc_assert (LAMBDA_TYPE_P (type));
193 /* Don't let debug_tree cause instantiation. */
194 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
195 && !COMPLETE_OR_OPEN_TYPE_P (type))
196 return NULL_TREE;
197 lambda = lookup_member (type, call_op_identifier,
198 /*protect=*/0, /*want_type=*/false,
199 tf_warning_or_error);
200 if (lambda)
201 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
202 return lambda;
203 }
204
205 /* Returns the type to use for the FIELD_DECL corresponding to the
206 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
207 C++14 init capture, and BY_REFERENCE_P indicates whether we're
208 capturing by reference. */
209
210 tree
211 lambda_capture_field_type (tree expr, bool explicit_init_p,
212 bool by_reference_p)
213 {
214 tree type;
215 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
216
217 if (!is_this && explicit_init_p)
218 {
219 tree auto_node = make_auto ();
220
221 type = auto_node;
222 if (by_reference_p)
223 /* Add the reference now, so deduction doesn't lose
224 outermost CV qualifiers of EXPR. */
225 type = build_reference_type (type);
226 if (uses_parameter_packs (expr))
227 /* Stick with 'auto' even if the type could be deduced. */;
228 else
229 type = do_auto_deduction (type, expr, auto_node);
230 }
231 else if (!is_this && type_dependent_expression_p (expr))
232 {
233 type = cxx_make_type (DECLTYPE_TYPE);
234 DECLTYPE_TYPE_EXPR (type) = expr;
235 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
236 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
237 SET_TYPE_STRUCTURAL_EQUALITY (type);
238 }
239 else
240 {
241 type = non_reference (unlowered_expr_type (expr));
242
243 if (!is_this
244 && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE))
245 type = build_reference_type (type);
246 }
247
248 return type;
249 }
250
251 /* Returns true iff DECL is a lambda capture proxy variable created by
252 build_capture_proxy. */
253
254 bool
255 is_capture_proxy (tree decl)
256 {
257 return (VAR_P (decl)
258 && DECL_HAS_VALUE_EXPR_P (decl)
259 && !DECL_ANON_UNION_VAR_P (decl)
260 && !DECL_DECOMPOSITION_P (decl)
261 && !DECL_FNAME_P (decl)
262 && !(DECL_ARTIFICIAL (decl)
263 && DECL_LANG_SPECIFIC (decl)
264 && DECL_OMP_PRIVATIZED_MEMBER (decl))
265 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
266 }
267
268 /* Returns true iff DECL is a capture proxy for a normal capture
269 (i.e. without explicit initializer). */
270
271 bool
272 is_normal_capture_proxy (tree decl)
273 {
274 if (!is_capture_proxy (decl))
275 /* It's not a capture proxy. */
276 return false;
277
278 return (DECL_LANG_SPECIFIC (decl)
279 && DECL_CAPTURED_VARIABLE (decl));
280 }
281
282 /* Returns true iff DECL is a capture proxy for a normal capture
283 of a constant variable. */
284
285 bool
286 is_constant_capture_proxy (tree decl)
287 {
288 if (is_normal_capture_proxy (decl))
289 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
290 return false;
291 }
292
293 /* VAR is a capture proxy created by build_capture_proxy; add it to the
294 current function, which is the operator() for the appropriate lambda. */
295
296 void
297 insert_capture_proxy (tree var)
298 {
299 if (is_normal_capture_proxy (var))
300 {
301 tree cap = DECL_CAPTURED_VARIABLE (var);
302 if (CHECKING_P)
303 {
304 gcc_assert (!is_normal_capture_proxy (cap));
305 tree old = retrieve_local_specialization (cap);
306 if (old)
307 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
308 }
309 register_local_specialization (var, cap);
310 }
311
312 /* Put the capture proxy in the extra body block so that it won't clash
313 with a later local variable. */
314 pushdecl_outermost_localscope (var);
315
316 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
317 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
318 tree stmt_list = (*stmt_list_stack)[1];
319 gcc_assert (stmt_list);
320 append_to_statement_list_force (var, &stmt_list);
321 }
322
323 /* We've just finished processing a lambda; if the containing scope is also
324 a lambda, insert any capture proxies that were created while processing
325 the nested lambda. */
326
327 void
328 insert_pending_capture_proxies (void)
329 {
330 tree lam;
331 vec<tree, va_gc> *proxies;
332 unsigned i;
333
334 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
335 return;
336
337 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
338 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
339 for (i = 0; i < vec_safe_length (proxies); ++i)
340 {
341 tree var = (*proxies)[i];
342 insert_capture_proxy (var);
343 }
344 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
345 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
346 }
347
348 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
349 return the type we want the proxy to have: the type of the field itself,
350 with added const-qualification if the lambda isn't mutable and the
351 capture is by value. */
352
353 tree
354 lambda_proxy_type (tree ref)
355 {
356 tree type;
357 if (ref == error_mark_node)
358 return error_mark_node;
359 if (REFERENCE_REF_P (ref))
360 ref = TREE_OPERAND (ref, 0);
361 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
362 type = TREE_TYPE (ref);
363 if (!type || WILDCARD_TYPE_P (non_reference (type)))
364 {
365 type = cxx_make_type (DECLTYPE_TYPE);
366 DECLTYPE_TYPE_EXPR (type) = ref;
367 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
368 SET_TYPE_STRUCTURAL_EQUALITY (type);
369 }
370 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
371 type = make_pack_expansion (type);
372 return type;
373 }
374
375 /* MEMBER is a capture field in a lambda closure class. Now that we're
376 inside the operator(), build a placeholder var for future lookups and
377 debugging. */
378
379 static tree
380 build_capture_proxy (tree member, tree init)
381 {
382 tree var, object, fn, closure, name, lam, type;
383
384 if (PACK_EXPANSION_P (member))
385 member = PACK_EXPANSION_PATTERN (member);
386
387 closure = DECL_CONTEXT (member);
388 fn = lambda_function (closure);
389 lam = CLASSTYPE_LAMBDA_EXPR (closure);
390
391 /* The proxy variable forwards to the capture field. */
392 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
393 object = finish_non_static_data_member (member, object, NULL_TREE);
394 if (REFERENCE_REF_P (object))
395 object = TREE_OPERAND (object, 0);
396
397 /* Remove the __ inserted by add_capture. */
398 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
399
400 type = lambda_proxy_type (object);
401
402 if (name == this_identifier && !INDIRECT_TYPE_P (type))
403 {
404 type = build_pointer_type (type);
405 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
406 object = build_fold_addr_expr_with_type (object, type);
407 }
408
409 if (DECL_VLA_CAPTURE_P (member))
410 {
411 /* Rebuild the VLA type from the pointer and maxindex. */
412 tree field = next_initializable_field (TYPE_FIELDS (type));
413 tree ptr = build_simple_component_ref (object, field);
414 field = next_initializable_field (DECL_CHAIN (field));
415 tree max = build_simple_component_ref (object, field);
416 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
417 build_index_type (max));
418 type = build_reference_type (type);
419 object = convert (type, ptr);
420 }
421
422 complete_type (type);
423
424 var = build_decl (input_location, VAR_DECL, name, type);
425 SET_DECL_VALUE_EXPR (var, object);
426 DECL_HAS_VALUE_EXPR_P (var) = 1;
427 DECL_ARTIFICIAL (var) = 1;
428 TREE_USED (var) = 1;
429 DECL_CONTEXT (var) = fn;
430
431 if (DECL_NORMAL_CAPTURE_P (member))
432 {
433 if (DECL_VLA_CAPTURE_P (member))
434 {
435 init = CONSTRUCTOR_ELT (init, 0)->value;
436 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
437 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
438 }
439 else
440 {
441 if (PACK_EXPANSION_P (init))
442 init = PACK_EXPANSION_PATTERN (init);
443 }
444
445 if (INDIRECT_REF_P (init))
446 init = TREE_OPERAND (init, 0);
447 STRIP_NOPS (init);
448
449 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
450 while (is_normal_capture_proxy (init))
451 init = DECL_CAPTURED_VARIABLE (init);
452 retrofit_lang_decl (var);
453 DECL_CAPTURED_VARIABLE (var) = init;
454 }
455
456 if (name == this_identifier)
457 {
458 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
459 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
460 }
461
462 if (fn == current_function_decl)
463 insert_capture_proxy (var);
464 else
465 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
466
467 return var;
468 }
469
470 static GTY(()) tree ptr_id;
471 static GTY(()) tree max_id;
472
473 /* Return a struct containing a pointer and a length for lambda capture of
474 an array of runtime length. */
475
476 static tree
477 vla_capture_type (tree array_type)
478 {
479 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
480 xref_basetypes (type, NULL_TREE);
481 type = begin_class_definition (type);
482 if (!ptr_id)
483 {
484 ptr_id = get_identifier ("ptr");
485 max_id = get_identifier ("max");
486 }
487 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
488 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
489 finish_member_declaration (field);
490 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
491 finish_member_declaration (field);
492 return finish_struct (type, NULL_TREE);
493 }
494
495 /* From an ID and INITIALIZER, create a capture (by reference if
496 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
497 and return it. If ID is `this', BY_REFERENCE_P says whether
498 `*this' is captured by reference. */
499
500 tree
501 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
502 bool explicit_init_p)
503 {
504 char *buf;
505 tree type, member, name;
506 bool vla = false;
507 bool variadic = false;
508 tree initializer = orig_init;
509
510 if (PACK_EXPANSION_P (initializer))
511 {
512 initializer = PACK_EXPANSION_PATTERN (initializer);
513 variadic = true;
514 }
515
516 if (TREE_CODE (initializer) == TREE_LIST
517 /* A pack expansion might end up with multiple elements. */
518 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
519 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
520 tf_warning_or_error);
521 type = TREE_TYPE (initializer);
522 if (type == error_mark_node)
523 return error_mark_node;
524
525 if (!dependent_type_p (type) && array_of_runtime_bound_p (type))
526 {
527 vla = true;
528 if (!by_reference_p)
529 error ("array of runtime bound cannot be captured by copy, "
530 "only by reference");
531
532 /* For a VLA, we capture the address of the first element and the
533 maximum index, and then reconstruct the VLA for the proxy. */
534 tree elt = cp_build_array_ref (input_location, initializer,
535 integer_zero_node, tf_warning_or_error);
536 initializer = build_constructor_va (init_list_type_node, 2,
537 NULL_TREE, build_address (elt),
538 NULL_TREE, array_type_nelts (type));
539 type = vla_capture_type (type);
540 }
541 else if (!dependent_type_p (type)
542 && variably_modified_type_p (type, NULL_TREE))
543 {
544 sorry ("capture of variably-modified type %qT that is not an N3639 array "
545 "of runtime bound", type);
546 if (TREE_CODE (type) == ARRAY_TYPE
547 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
548 inform (input_location, "because the array element type %qT has "
549 "variable size", TREE_TYPE (type));
550 return error_mark_node;
551 }
552 else
553 {
554 type = lambda_capture_field_type (initializer, explicit_init_p,
555 by_reference_p);
556 if (type == error_mark_node)
557 return error_mark_node;
558
559 if (id == this_identifier && !by_reference_p)
560 {
561 gcc_assert (INDIRECT_TYPE_P (type));
562 type = TREE_TYPE (type);
563 initializer = cp_build_fold_indirect_ref (initializer);
564 }
565
566 if (dependent_type_p (type))
567 ;
568 else if (id != this_identifier && by_reference_p)
569 {
570 if (!lvalue_p (initializer))
571 {
572 error ("cannot capture %qE by reference", initializer);
573 return error_mark_node;
574 }
575 }
576 else
577 {
578 /* Capture by copy requires a complete type. */
579 type = complete_type (type);
580 if (!COMPLETE_TYPE_P (type))
581 {
582 error ("capture by copy of incomplete type %qT", type);
583 cxx_incomplete_type_inform (type);
584 return error_mark_node;
585 }
586 else if (!verify_type_context (input_location,
587 TCTX_CAPTURE_BY_COPY, type))
588 return error_mark_node;
589 }
590 }
591
592 /* Add __ to the beginning of the field name so that user code
593 won't find the field with name lookup. We can't just leave the name
594 unset because template instantiation uses the name to find
595 instantiated fields. */
596 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
597 buf[1] = buf[0] = '_';
598 memcpy (buf + 2, IDENTIFIER_POINTER (id),
599 IDENTIFIER_LENGTH (id) + 1);
600 name = get_identifier (buf);
601
602 if (variadic)
603 {
604 type = make_pack_expansion (type);
605 if (explicit_init_p)
606 /* With an explicit initializer 'type' is auto, which isn't really a
607 parameter pack in this context. We will want as many fields as we
608 have elements in the expansion of the initializer, so use its packs
609 instead. */
610 PACK_EXPANSION_PARAMETER_PACKS (type)
611 = uses_parameter_packs (initializer);
612 }
613
614 /* Make member variable. */
615 member = build_decl (input_location, FIELD_DECL, name, type);
616 DECL_VLA_CAPTURE_P (member) = vla;
617
618 if (!explicit_init_p)
619 /* Normal captures are invisible to name lookup but uses are replaced
620 with references to the capture field; we implement this by only
621 really making them invisible in unevaluated context; see
622 qualify_lookup. For now, let's make explicitly initialized captures
623 always visible. */
624 DECL_NORMAL_CAPTURE_P (member) = true;
625
626 if (id == this_identifier)
627 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
628
629 /* Add it to the appropriate closure class if we've started it. */
630 if (current_class_type
631 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
632 {
633 if (COMPLETE_TYPE_P (current_class_type))
634 internal_error ("trying to capture %qD in instantiation of "
635 "generic lambda", id);
636 finish_member_declaration (member);
637 }
638
639 tree listmem = member;
640 if (variadic)
641 {
642 listmem = make_pack_expansion (member);
643 initializer = orig_init;
644 }
645 LAMBDA_EXPR_CAPTURE_LIST (lambda)
646 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
647
648 if (LAMBDA_EXPR_CLOSURE (lambda))
649 return build_capture_proxy (member, initializer);
650 /* For explicit captures we haven't started the function yet, so we wait
651 and build the proxy from cp_parser_lambda_body. */
652 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
653 return NULL_TREE;
654 }
655
656 /* Register all the capture members on the list CAPTURES, which is the
657 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
658
659 void
660 register_capture_members (tree captures)
661 {
662 if (captures == NULL_TREE)
663 return;
664
665 register_capture_members (TREE_CHAIN (captures));
666
667 tree field = TREE_PURPOSE (captures);
668 if (PACK_EXPANSION_P (field))
669 field = PACK_EXPANSION_PATTERN (field);
670
671 finish_member_declaration (field);
672 }
673
674 /* Similar to add_capture, except this works on a stack of nested lambdas.
675 BY_REFERENCE_P in this case is derived from the default capture mode.
676 Returns the capture for the lambda at the bottom of the stack. */
677
678 tree
679 add_default_capture (tree lambda_stack, tree id, tree initializer)
680 {
681 bool this_capture_p = (id == this_identifier);
682 tree var = NULL_TREE;
683 tree saved_class_type = current_class_type;
684
685 for (tree node = lambda_stack;
686 node;
687 node = TREE_CHAIN (node))
688 {
689 tree lambda = TREE_VALUE (node);
690
691 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
692 if (DECL_PACK_P (initializer))
693 initializer = make_pack_expansion (initializer);
694 var = add_capture (lambda,
695 id,
696 initializer,
697 /*by_reference_p=*/
698 (this_capture_p
699 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
700 == CPLD_REFERENCE)),
701 /*explicit_init_p=*/false);
702 initializer = convert_from_reference (var);
703
704 /* Warn about deprecated implicit capture of this via [=]. */
705 if (cxx_dialect >= cxx20
706 && this_capture_p
707 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY)
708 {
709 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
710 "implicit capture of %qE via %<[=]%> is deprecated "
711 "in C++20", this_identifier))
712 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
713 "%<*this%> capture");
714 }
715 }
716
717 current_class_type = saved_class_type;
718
719 return var;
720 }
721
722 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
723 form of an INDIRECT_REF, possibly adding it through default
724 capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative,
725 try to capture but don't complain if we can't. */
726
727 tree
728 lambda_expr_this_capture (tree lambda, int add_capture_p)
729 {
730 tree result;
731
732 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
733
734 /* In unevaluated context this isn't an odr-use, so don't capture. */
735 if (cp_unevaluated_operand)
736 add_capture_p = false;
737
738 /* Try to default capture 'this' if we can. */
739 if (!this_capture)
740 {
741 tree lambda_stack = NULL_TREE;
742 tree init = NULL_TREE;
743
744 /* If we are in a lambda function, we can move out until we hit:
745 1. a non-lambda function or NSDMI,
746 2. a lambda function capturing 'this', or
747 3. a non-default capturing lambda function. */
748 for (tree tlambda = lambda; ;)
749 {
750 if (add_capture_p
751 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
752 /* tlambda won't let us capture 'this'. */
753 break;
754
755 if (add_capture_p)
756 lambda_stack = tree_cons (NULL_TREE,
757 tlambda,
758 lambda_stack);
759
760 tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
761 tree containing_function
762 = decl_function_context (TYPE_NAME (closure));
763
764 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
765 if (ex && TREE_CODE (ex) == FIELD_DECL)
766 {
767 /* Lambda in an NSDMI. We don't have a function to look up
768 'this' in, but we can find (or rebuild) the fake one from
769 inject_this_parameter. */
770 if (!containing_function && !COMPLETE_TYPE_P (closure))
771 /* If we're parsing a lambda in a non-local class,
772 we can find the fake 'this' in scope_chain. */
773 init = scope_chain->x_current_class_ptr;
774 else
775 /* Otherwise it's either gone or buried in
776 function_context_stack, so make another. */
777 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
778 TYPE_UNQUALIFIED);
779 gcc_checking_assert
780 (init && (TREE_TYPE (TREE_TYPE (init))
781 == current_nonlambda_class_type ()));
782 break;
783 }
784
785 if (containing_function == NULL_TREE)
786 /* We ran out of scopes; there's no 'this' to capture. */
787 break;
788
789 if (!LAMBDA_FUNCTION_P (containing_function))
790 {
791 /* We found a non-lambda function. */
792 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
793 /* First parameter is 'this'. */
794 init = DECL_ARGUMENTS (containing_function);
795 break;
796 }
797
798 tlambda
799 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
800
801 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
802 {
803 /* An outer lambda has already captured 'this'. */
804 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
805 break;
806 }
807 }
808
809 if (init)
810 {
811 if (add_capture_p)
812 this_capture = add_default_capture (lambda_stack,
813 /*id=*/this_identifier,
814 init);
815 else
816 this_capture = init;
817 }
818 }
819
820 if (cp_unevaluated_operand)
821 result = this_capture;
822 else if (!this_capture)
823 {
824 if (add_capture_p == 1)
825 {
826 error ("%<this%> was not captured for this lambda function");
827 result = error_mark_node;
828 }
829 else
830 result = NULL_TREE;
831 }
832 else
833 {
834 /* To make sure that current_class_ref is for the lambda. */
835 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
836 == LAMBDA_EXPR_CLOSURE (lambda));
837
838 result = this_capture;
839
840 /* If 'this' is captured, each use of 'this' is transformed into an
841 access to the corresponding unnamed data member of the closure
842 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
843 ensures that the transformed expression is an rvalue. ] */
844 result = rvalue (result);
845 }
846
847 return result;
848 }
849
850 /* Return the innermost LAMBDA_EXPR we're currently in, if any. */
851
852 tree
853 current_lambda_expr (void)
854 {
855 tree type = current_class_type;
856 while (type && !LAMBDA_TYPE_P (type))
857 type = decl_type_context (TYPE_NAME (type));
858 if (type)
859 return CLASSTYPE_LAMBDA_EXPR (type);
860 else
861 return NULL_TREE;
862 }
863
864 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
865 object. NULL otherwise.. */
866
867 static tree
868 resolvable_dummy_lambda (tree object)
869 {
870 if (!is_dummy_object (object))
871 return NULL_TREE;
872
873 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
874 gcc_assert (!TYPE_PTR_P (type));
875
876 if (type != current_class_type
877 && current_class_type
878 && LAMBDA_TYPE_P (current_class_type)
879 && lambda_function (current_class_type)
880 && DERIVED_FROM_P (type, nonlambda_method_basetype()))
881 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
882
883 return NULL_TREE;
884 }
885
886 /* We don't want to capture 'this' until we know we need it, i.e. after
887 overload resolution has chosen a non-static member function. At that
888 point we call this function to turn a dummy object into a use of the
889 'this' capture. */
890
891 tree
892 maybe_resolve_dummy (tree object, bool add_capture_p)
893 {
894 if (tree lam = resolvable_dummy_lambda (object))
895 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
896 if (cap != error_mark_node)
897 object = build_fold_indirect_ref (cap);
898
899 return object;
900 }
901
902 /* When parsing a generic lambda containing an argument-dependent
903 member function call we defer overload resolution to instantiation
904 time. But we have to know now whether to capture this or not.
905 Do that if FNS contains any non-static fns.
906 The std doesn't anticipate this case, but I expect this to be the
907 outcome of discussion. */
908
909 void
910 maybe_generic_this_capture (tree object, tree fns)
911 {
912 if (tree lam = resolvable_dummy_lambda (object))
913 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
914 {
915 /* We've not yet captured, so look at the function set of
916 interest. */
917 if (BASELINK_P (fns))
918 fns = BASELINK_FUNCTIONS (fns);
919 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
920 if (id_expr)
921 fns = TREE_OPERAND (fns, 0);
922
923 for (lkp_iterator iter (fns); iter; ++iter)
924 if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
925 || TREE_CODE (*iter) == TEMPLATE_DECL)
926 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
927 {
928 /* Found a non-static member. Capture this. */
929 lambda_expr_this_capture (lam, /*maybe*/-1);
930 break;
931 }
932 }
933 }
934
935 /* Returns the innermost non-lambda function. */
936
937 tree
938 current_nonlambda_function (void)
939 {
940 tree fn = current_function_decl;
941 while (fn && LAMBDA_FUNCTION_P (fn))
942 fn = decl_function_context (fn);
943 return fn;
944 }
945
946 /* Returns the method basetype of the innermost non-lambda function, including
947 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */
948
949 tree
950 nonlambda_method_basetype (void)
951 {
952 if (!current_class_ref)
953 return NULL_TREE;
954
955 tree type = current_class_type;
956 if (!type || !LAMBDA_TYPE_P (type))
957 return type;
958
959 while (true)
960 {
961 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
962 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
963 if (ex && TREE_CODE (ex) == FIELD_DECL)
964 /* Lambda in an NSDMI. */
965 return DECL_CONTEXT (ex);
966
967 tree fn = TYPE_CONTEXT (type);
968 if (!fn || TREE_CODE (fn) != FUNCTION_DECL
969 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
970 /* No enclosing non-lambda method. */
971 return NULL_TREE;
972 if (!LAMBDA_FUNCTION_P (fn))
973 /* Found an enclosing non-lambda method. */
974 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
975 type = DECL_CONTEXT (fn);
976 }
977 }
978
979 /* Like current_scope, but looking through lambdas. */
980
981 tree
982 current_nonlambda_scope (void)
983 {
984 tree scope = current_scope ();
985 for (;;)
986 {
987 if (TREE_CODE (scope) == FUNCTION_DECL
988 && LAMBDA_FUNCTION_P (scope))
989 {
990 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
991 continue;
992 }
993 else if (LAMBDA_TYPE_P (scope))
994 {
995 scope = CP_TYPE_CONTEXT (scope);
996 continue;
997 }
998 break;
999 }
1000 return scope;
1001 }
1002
1003 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1004 indicated FN and NARGS, but do not initialize the return type or any of the
1005 argument slots. */
1006
1007 static tree
1008 prepare_op_call (tree fn, int nargs)
1009 {
1010 tree t;
1011
1012 t = build_vl_exp (CALL_EXPR, nargs + 3);
1013 CALL_EXPR_FN (t) = fn;
1014 CALL_EXPR_STATIC_CHAIN (t) = NULL;
1015
1016 return t;
1017 }
1018
1019 /* Return true iff CALLOP is the op() for a generic lambda. */
1020
1021 bool
1022 generic_lambda_fn_p (tree callop)
1023 {
1024 return (LAMBDA_FUNCTION_P (callop)
1025 && DECL_TEMPLATE_INFO (callop)
1026 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1027 }
1028
1029 /* If the closure TYPE has a static op(), also add a conversion to function
1030 pointer. */
1031
1032 void
1033 maybe_add_lambda_conv_op (tree type)
1034 {
1035 bool nested = (cfun != NULL);
1036 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
1037 tree callop = lambda_function (type);
1038 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1039
1040 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1041 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1042 return;
1043
1044 if (processing_template_decl)
1045 return;
1046
1047 bool const generic_lambda_p = generic_lambda_fn_p (callop);
1048
1049 if (!generic_lambda_p && undeduced_auto_decl (callop))
1050 {
1051 /* If the op() wasn't deduced due to errors, give up. */
1052 gcc_assert (errorcount || sorrycount);
1053 return;
1054 }
1055
1056 /* Non-generic non-capturing lambdas only have a conversion function to
1057 pointer to function when the trailing requires-clause's constraints are
1058 satisfied. */
1059 if (!generic_lambda_p && !constraints_satisfied_p (callop))
1060 return;
1061
1062 /* Non-template conversion operators are defined directly with build_call_a
1063 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1064 deferred and the CALL is built in-place. In the case of a deduced return
1065 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1066 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1067 the return expression may differ in flags from those in the body CALL. In
1068 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1069 the body CALL, but not in DECLTYPE_CALL. */
1070
1071 vec<tree, va_gc> *direct_argvec = 0;
1072 tree decltype_call = 0, call = 0;
1073 tree optype = TREE_TYPE (callop);
1074 tree fn_result = TREE_TYPE (optype);
1075
1076 tree thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
1077 if (generic_lambda_p)
1078 {
1079 ++processing_template_decl;
1080
1081 /* Prepare the dependent member call for the static member function
1082 '_FUN' and, potentially, prepare another call to be used in a decltype
1083 return expression for a deduced return call op to allow for simple
1084 implementation of the conversion operator. */
1085
1086 tree instance = cp_build_fold_indirect_ref (thisarg);
1087 tree objfn = lookup_template_function (DECL_NAME (callop),
1088 DECL_TI_ARGS (callop));
1089 objfn = build_min (COMPONENT_REF, NULL_TREE,
1090 instance, objfn, NULL_TREE);
1091 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1092
1093 call = prepare_op_call (objfn, nargs);
1094 if (type_uses_auto (fn_result))
1095 decltype_call = prepare_op_call (objfn, nargs);
1096 }
1097 else
1098 {
1099 direct_argvec = make_tree_vector ();
1100 direct_argvec->quick_push (thisarg);
1101 }
1102
1103 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1104 declare the static member function "_FUN" below. For each arg append to
1105 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1106 call args (for the template case). If a parameter pack is found, expand
1107 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1108
1109 tree fn_args = NULL_TREE;
1110 {
1111 int ix = 0;
1112 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1113 tree tgt = NULL;
1114
1115 while (src)
1116 {
1117 tree new_node = copy_node (src);
1118
1119 /* Clear TREE_ADDRESSABLE on thunk arguments. */
1120 TREE_ADDRESSABLE (new_node) = 0;
1121
1122 if (!fn_args)
1123 fn_args = tgt = new_node;
1124 else
1125 {
1126 TREE_CHAIN (tgt) = new_node;
1127 tgt = new_node;
1128 }
1129
1130 mark_exp_read (tgt);
1131
1132 if (generic_lambda_p)
1133 {
1134 tree a = tgt;
1135 if (DECL_PACK_P (tgt))
1136 {
1137 a = make_pack_expansion (a);
1138 PACK_EXPANSION_LOCAL_P (a) = true;
1139 }
1140 CALL_EXPR_ARG (call, ix) = a;
1141
1142 if (decltype_call)
1143 {
1144 /* Avoid capturing variables in this context. */
1145 ++cp_unevaluated_operand;
1146 CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1147 --cp_unevaluated_operand;
1148 }
1149
1150 ++ix;
1151 }
1152 else
1153 vec_safe_push (direct_argvec, tgt);
1154
1155 src = TREE_CHAIN (src);
1156 }
1157 }
1158
1159 if (generic_lambda_p)
1160 {
1161 if (decltype_call)
1162 {
1163 fn_result = finish_decltype_type
1164 (decltype_call, /*id_expression_or_member_access_p=*/false,
1165 tf_warning_or_error);
1166 }
1167 }
1168 else
1169 call = build_call_a (callop,
1170 direct_argvec->length (),
1171 direct_argvec->address ());
1172
1173 CALL_FROM_THUNK_P (call) = 1;
1174 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1175
1176 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1177 stattype = (cp_build_type_attribute_variant
1178 (stattype, TYPE_ATTRIBUTES (optype)));
1179 if (flag_noexcept_type
1180 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1181 stattype = build_exception_variant (stattype, noexcept_true_spec);
1182
1183 if (generic_lambda_p)
1184 --processing_template_decl;
1185
1186 /* First build up the conversion op. */
1187
1188 tree rettype = build_pointer_type (stattype);
1189 tree name = make_conv_op_name (rettype);
1190 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1191 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1192 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1193 SET_DECL_LANGUAGE (convfn, lang_cplusplus);
1194 tree fn = convfn;
1195 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1196 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1197 grokclassfn (type, fn, NO_SPECIAL);
1198 set_linkage_according_to_type (type, fn);
1199 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1200 DECL_IN_AGGR_P (fn) = 1;
1201 DECL_ARTIFICIAL (fn) = 1;
1202 DECL_NOT_REALLY_EXTERN (fn) = 1;
1203 DECL_DECLARED_INLINE_P (fn) = 1;
1204 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1205 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1206 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1207 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1208
1209 if (nested_def)
1210 DECL_INTERFACE_KNOWN (fn) = 1;
1211
1212 if (generic_lambda_p)
1213 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1214
1215 add_method (type, fn, false);
1216
1217 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1218 the conversion op is used. */
1219 if (varargs_function_p (callop))
1220 {
1221 DECL_DELETED_FN (fn) = 1;
1222 return;
1223 }
1224
1225 /* Now build up the thunk to be returned. */
1226
1227 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
1228 SET_DECL_LANGUAGE (statfn, lang_cplusplus);
1229 fn = statfn;
1230 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1231 grokclassfn (type, fn, NO_SPECIAL);
1232 set_linkage_according_to_type (type, fn);
1233 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1234 DECL_IN_AGGR_P (fn) = 1;
1235 DECL_ARTIFICIAL (fn) = 1;
1236 DECL_NOT_REALLY_EXTERN (fn) = 1;
1237 DECL_DECLARED_INLINE_P (fn) = 1;
1238 DECL_STATIC_FUNCTION_P (fn) = 1;
1239 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1240 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1241 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1242 DECL_ARGUMENTS (fn) = fn_args;
1243 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1244 {
1245 /* Avoid duplicate -Wshadow warnings. */
1246 DECL_NAME (arg) = NULL_TREE;
1247 DECL_CONTEXT (arg) = fn;
1248 }
1249 if (nested_def)
1250 DECL_INTERFACE_KNOWN (fn) = 1;
1251
1252 if (generic_lambda_p)
1253 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1254
1255 if (flag_sanitize & SANITIZE_NULL)
1256 /* Don't UBsan this function; we're deliberately calling op() with a null
1257 object argument. */
1258 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1259
1260 add_method (type, fn, false);
1261
1262 if (nested)
1263 push_function_context ();
1264 else
1265 /* Still increment function_depth so that we don't GC in the
1266 middle of an expression. */
1267 ++function_depth;
1268
1269 /* Generate the body of the thunk. */
1270
1271 start_preparsed_function (statfn, NULL_TREE,
1272 SF_PRE_PARSED | SF_INCLASS_INLINE);
1273 tree body = begin_function_body ();
1274 tree compound_stmt = begin_compound_stmt (0);
1275 if (!generic_lambda_p)
1276 {
1277 set_flags_from_callee (call);
1278 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1279 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1280 }
1281 call = convert_from_reference (call);
1282 finish_return_stmt (call);
1283
1284 finish_compound_stmt (compound_stmt);
1285 finish_function_body (body);
1286
1287 fn = finish_function (/*inline_p=*/true);
1288 if (!generic_lambda_p)
1289 expand_or_defer_fn (fn);
1290
1291 /* Generate the body of the conversion op. */
1292
1293 start_preparsed_function (convfn, NULL_TREE,
1294 SF_PRE_PARSED | SF_INCLASS_INLINE);
1295 body = begin_function_body ();
1296 compound_stmt = begin_compound_stmt (0);
1297
1298 /* decl_needed_p needs to see that it's used. */
1299 TREE_USED (statfn) = 1;
1300 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1301
1302 finish_compound_stmt (compound_stmt);
1303 finish_function_body (body);
1304
1305 fn = finish_function (/*inline_p=*/true);
1306 if (!generic_lambda_p)
1307 expand_or_defer_fn (fn);
1308
1309 if (nested)
1310 pop_function_context ();
1311 else
1312 --function_depth;
1313 }
1314
1315 /* True if FN is the static function "_FUN" that gets returned from the lambda
1316 conversion operator. */
1317
1318 bool
1319 lambda_static_thunk_p (tree fn)
1320 {
1321 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1322 && DECL_ARTIFICIAL (fn)
1323 && DECL_STATIC_FUNCTION_P (fn)
1324 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1325 }
1326
1327 /* Returns true iff VAL is a lambda-related declaration which should
1328 be ignored by unqualified lookup. */
1329
1330 bool
1331 is_lambda_ignored_entity (tree val)
1332 {
1333 /* Look past normal, non-VLA capture proxies. */
1334 if (is_normal_capture_proxy (val)
1335 && !variably_modified_type_p (TREE_TYPE (val), NULL_TREE))
1336 return true;
1337
1338 /* Always ignore lambda fields, their names are only for debugging. */
1339 if (TREE_CODE (val) == FIELD_DECL
1340 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1341 return true;
1342
1343 /* None of the lookups that use qualify_lookup want the op() from the
1344 lambda; they want the one from the enclosing class. */
1345 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1346 return true;
1347
1348 return false;
1349 }
1350
1351 /* Lambdas that appear in variable initializer or default argument scope
1352 get that in their mangling, so we need to record it. We might as well
1353 use the count for function and namespace scopes as well. */
1354 static GTY(()) tree lambda_scope;
1355 static GTY(()) int lambda_count;
1356 struct GTY(()) tree_int
1357 {
1358 tree t;
1359 int i;
1360 };
1361 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1362
1363 void
1364 start_lambda_scope (tree decl)
1365 {
1366 tree_int ti;
1367 gcc_assert (decl);
1368 /* Once we're inside a function, we ignore variable scope and just push
1369 the function again so that popping works properly. */
1370 if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1371 decl = current_function_decl;
1372 ti.t = lambda_scope;
1373 ti.i = lambda_count;
1374 vec_safe_push (lambda_scope_stack, ti);
1375 if (lambda_scope != decl)
1376 {
1377 /* Don't reset the count if we're still in the same function. */
1378 lambda_scope = decl;
1379 lambda_count = 0;
1380 }
1381 }
1382
1383 void
1384 record_lambda_scope (tree lambda)
1385 {
1386 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1387 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1388 }
1389
1390 /* This lambda is an instantiation of a lambda in a template default argument
1391 that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either. But we do
1392 need to use and increment the global count to avoid collisions. */
1393
1394 void
1395 record_null_lambda_scope (tree lambda)
1396 {
1397 if (vec_safe_is_empty (lambda_scope_stack))
1398 record_lambda_scope (lambda);
1399 else
1400 {
1401 tree_int *p = lambda_scope_stack->begin();
1402 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t;
1403 LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++;
1404 }
1405 gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE);
1406 }
1407
1408 void
1409 finish_lambda_scope (void)
1410 {
1411 tree_int *p = &lambda_scope_stack->last ();
1412 if (lambda_scope != p->t)
1413 {
1414 lambda_scope = p->t;
1415 lambda_count = p->i;
1416 }
1417 lambda_scope_stack->pop ();
1418 }
1419
1420 tree
1421 start_lambda_function (tree fco, tree lambda_expr)
1422 {
1423 /* Let the front end know that we are going to be defining this
1424 function. */
1425 start_preparsed_function (fco,
1426 NULL_TREE,
1427 SF_PRE_PARSED | SF_INCLASS_INLINE);
1428
1429 tree body = begin_function_body ();
1430
1431 /* Push the proxies for any explicit captures. */
1432 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1433 cap = TREE_CHAIN (cap))
1434 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1435
1436 return body;
1437 }
1438
1439 /* Subroutine of prune_lambda_captures: CAP is a node in
1440 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we
1441 might optimize away the capture, or NULL_TREE if there is no such
1442 variable. */
1443
1444 static tree
1445 var_to_maybe_prune (tree cap)
1446 {
1447 if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1448 /* Don't prune explicit captures. */
1449 return NULL_TREE;
1450
1451 tree mem = TREE_PURPOSE (cap);
1452 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1453 /* Packs and init-captures aren't captures of constant vars. */
1454 return NULL_TREE;
1455
1456 tree init = TREE_VALUE (cap);
1457 if (is_normal_capture_proxy (init))
1458 init = DECL_CAPTURED_VARIABLE (init);
1459 if (decl_constant_var_p (init))
1460 return init;
1461
1462 return NULL_TREE;
1463 }
1464
1465 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1466 for constant variables are actually used in the lambda body.
1467
1468 There will always be a DECL_EXPR for the capture proxy; remember it when we
1469 see it, but replace it with any other use. */
1470
1471 static tree
1472 mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1473 {
1474 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1475
1476 tree var = NULL_TREE;
1477 if (TREE_CODE (*t) == DECL_EXPR)
1478 {
1479 tree decl = DECL_EXPR_DECL (*t);
1480 if (is_constant_capture_proxy (decl))
1481 {
1482 var = DECL_CAPTURED_VARIABLE (decl);
1483 *walk_subtrees = 0;
1484 }
1485 }
1486 else if (is_constant_capture_proxy (*t))
1487 var = DECL_CAPTURED_VARIABLE (*t);
1488
1489 if (var)
1490 {
1491 tree *&slot = const_vars.get_or_insert (var);
1492 if (!slot || VAR_P (*t))
1493 slot = t;
1494 }
1495
1496 return NULL_TREE;
1497 }
1498
1499 /* We're at the end of processing a lambda; go back and remove any captures of
1500 constant variables for which we've folded away all uses. */
1501
1502 static void
1503 prune_lambda_captures (tree body)
1504 {
1505 tree lam = current_lambda_expr ();
1506 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1507 /* No uses were optimized away. */
1508 return;
1509 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1510 /* No default captures, and we don't prune explicit captures. */
1511 return;
1512
1513 hash_map<tree,tree*> const_vars;
1514
1515 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1516
1517 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1518 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1519 {
1520 tree cap = *capp;
1521 if (tree var = var_to_maybe_prune (cap))
1522 {
1523 tree **use = const_vars.get (var);
1524 if (use && TREE_CODE (**use) == DECL_EXPR)
1525 {
1526 /* All uses of this capture were folded away, leaving only the
1527 proxy declaration. */
1528
1529 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */
1530 *capp = TREE_CHAIN (cap);
1531
1532 /* And out of TYPE_FIELDS. */
1533 tree field = TREE_PURPOSE (cap);
1534 while (*fieldp != field)
1535 fieldp = &DECL_CHAIN (*fieldp);
1536 *fieldp = DECL_CHAIN (*fieldp);
1537
1538 /* And remove the capture proxy declaration. */
1539 **use = void_node;
1540 continue;
1541 }
1542 }
1543
1544 capp = &TREE_CHAIN (cap);
1545 }
1546 }
1547
1548 void
1549 finish_lambda_function (tree body)
1550 {
1551 finish_function_body (body);
1552
1553 prune_lambda_captures (body);
1554
1555 /* Finish the function and generate code for it if necessary. */
1556 tree fn = finish_function (/*inline_p=*/true);
1557
1558 /* Only expand if the call op is not a template. */
1559 if (!DECL_TEMPLATE_INFO (fn))
1560 expand_or_defer_fn (fn);
1561 }
1562
1563 #include "gt-cp-lambda.h"