5ba6f141b72dd601ba68fbd5ad4dac9b5d3a5eb7
[gcc.git] / gcc / cp / lambda.c
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
6 Copyright (C) 1998-2014 Free Software Foundation, Inc.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "cp-tree.h"
32 #include "toplev.h"
33 #include "vec.h"
34
35 /* Constructor for a lambda expression. */
36
37 tree
38 build_lambda_expr (void)
39 {
40 tree lambda = make_node (LAMBDA_EXPR);
41 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
42 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
43 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
44 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
45 LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE;
46 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
47 return lambda;
48 }
49
50 /* Create the closure object for a LAMBDA_EXPR. */
51
52 tree
53 build_lambda_object (tree lambda_expr)
54 {
55 /* Build aggregate constructor call.
56 - cp_parser_braced_list
57 - cp_parser_functional_cast */
58 vec<constructor_elt, va_gc> *elts = NULL;
59 tree node, expr, type;
60 location_t saved_loc;
61
62 if (processing_template_decl)
63 return lambda_expr;
64
65 /* Make sure any error messages refer to the lambda-introducer. */
66 saved_loc = input_location;
67 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
68
69 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
70 node;
71 node = TREE_CHAIN (node))
72 {
73 tree field = TREE_PURPOSE (node);
74 tree val = TREE_VALUE (node);
75
76 if (field == error_mark_node)
77 {
78 expr = error_mark_node;
79 goto out;
80 }
81
82 if (DECL_P (val))
83 mark_used (val);
84
85 /* Mere mortals can't copy arrays with aggregate initialization, so
86 do some magic to make it work here. */
87 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
88 val = build_array_copy (val);
89 else if (DECL_NORMAL_CAPTURE_P (field)
90 && !DECL_VLA_CAPTURE_P (field)
91 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
92 {
93 /* "the entities that are captured by copy are used to
94 direct-initialize each corresponding non-static data
95 member of the resulting closure object."
96
97 There's normally no way to express direct-initialization
98 from an element of a CONSTRUCTOR, so we build up a special
99 TARGET_EXPR to bypass the usual copy-initialization. */
100 val = force_rvalue (val, tf_warning_or_error);
101 if (TREE_CODE (val) == TARGET_EXPR)
102 TARGET_EXPR_DIRECT_INIT_P (val) = true;
103 }
104
105 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
106 }
107
108 expr = build_constructor (init_list_type_node, elts);
109 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
110
111 /* N2927: "[The closure] class type is not an aggregate."
112 But we briefly treat it as an aggregate to make this simpler. */
113 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
114 CLASSTYPE_NON_AGGREGATE (type) = 0;
115 expr = finish_compound_literal (type, expr, tf_warning_or_error);
116 CLASSTYPE_NON_AGGREGATE (type) = 1;
117
118 out:
119 input_location = saved_loc;
120 return expr;
121 }
122
123 /* Return an initialized RECORD_TYPE for LAMBDA.
124 LAMBDA must have its explicit captures already. */
125
126 tree
127 begin_lambda_type (tree lambda)
128 {
129 tree type;
130
131 {
132 /* Unique name. This is just like an unnamed class, but we cannot use
133 make_anon_name because of certain checks against TYPE_ANONYMOUS_P. */
134 tree name;
135 name = make_lambda_name ();
136
137 /* Create the new RECORD_TYPE for this lambda. */
138 type = xref_tag (/*tag_code=*/record_type,
139 name,
140 /*scope=*/ts_lambda,
141 /*template_header_p=*/false);
142 if (type == error_mark_node)
143 return error_mark_node;
144 }
145
146 /* Designate it as a struct so that we can use aggregate initialization. */
147 CLASSTYPE_DECLARED_CLASS (type) = false;
148
149 /* Cross-reference the expression and the type. */
150 LAMBDA_EXPR_CLOSURE (lambda) = type;
151 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
152
153 /* Clear base types. */
154 xref_basetypes (type, /*bases=*/NULL_TREE);
155
156 /* Start the class. */
157 type = begin_class_definition (type);
158
159 return type;
160 }
161
162 /* Returns the type to use for the return type of the operator() of a
163 closure class. */
164
165 tree
166 lambda_return_type (tree expr)
167 {
168 if (expr == NULL_TREE)
169 return void_type_node;
170 if (type_unknown_p (expr)
171 || BRACE_ENCLOSED_INITIALIZER_P (expr))
172 {
173 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
174 return void_type_node;
175 }
176 gcc_checking_assert (!type_dependent_expression_p (expr));
177 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
178 }
179
180 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
181 closure type. */
182
183 tree
184 lambda_function (tree lambda)
185 {
186 tree type;
187 if (TREE_CODE (lambda) == LAMBDA_EXPR)
188 type = LAMBDA_EXPR_CLOSURE (lambda);
189 else
190 type = lambda;
191 gcc_assert (LAMBDA_TYPE_P (type));
192 /* Don't let debug_tree cause instantiation. */
193 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
194 && !COMPLETE_OR_OPEN_TYPE_P (type))
195 return NULL_TREE;
196 lambda = lookup_member (type, ansi_opname (CALL_EXPR),
197 /*protect=*/0, /*want_type=*/false,
198 tf_warning_or_error);
199 if (lambda)
200 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
201 return lambda;
202 }
203
204 /* Returns the type to use for the FIELD_DECL corresponding to the
205 capture of EXPR.
206 The caller should add REFERENCE_TYPE for capture by reference. */
207
208 tree
209 lambda_capture_field_type (tree expr, bool explicit_init_p)
210 {
211 tree type;
212 if (explicit_init_p)
213 {
214 type = make_auto ();
215 type = do_auto_deduction (type, expr, type);
216 }
217 else
218 type = non_reference (unlowered_expr_type (expr));
219 if (type_dependent_expression_p (expr)
220 && !is_this_parameter (tree_strip_nop_conversions (expr)))
221 {
222 type = cxx_make_type (DECLTYPE_TYPE);
223 DECLTYPE_TYPE_EXPR (type) = expr;
224 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
225 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
226 SET_TYPE_STRUCTURAL_EQUALITY (type);
227 }
228 return type;
229 }
230
231 /* Returns true iff DECL is a lambda capture proxy variable created by
232 build_capture_proxy. */
233
234 bool
235 is_capture_proxy (tree decl)
236 {
237 return (VAR_P (decl)
238 && DECL_HAS_VALUE_EXPR_P (decl)
239 && !DECL_ANON_UNION_VAR_P (decl)
240 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
241 }
242
243 /* Returns true iff DECL is a capture proxy for a normal capture
244 (i.e. without explicit initializer). */
245
246 bool
247 is_normal_capture_proxy (tree decl)
248 {
249 if (!is_capture_proxy (decl))
250 /* It's not a capture proxy. */
251 return false;
252
253 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
254 /* VLA capture. */
255 return true;
256
257 /* It is a capture proxy, is it a normal capture? */
258 tree val = DECL_VALUE_EXPR (decl);
259 if (val == error_mark_node)
260 return true;
261
262 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
263 val = TREE_OPERAND (val, 1);
264 return DECL_NORMAL_CAPTURE_P (val);
265 }
266
267 /* VAR is a capture proxy created by build_capture_proxy; add it to the
268 current function, which is the operator() for the appropriate lambda. */
269
270 void
271 insert_capture_proxy (tree var)
272 {
273 cp_binding_level *b;
274 tree stmt_list;
275
276 /* Put the capture proxy in the extra body block so that it won't clash
277 with a later local variable. */
278 b = current_binding_level;
279 for (;;)
280 {
281 cp_binding_level *n = b->level_chain;
282 if (n->kind == sk_function_parms)
283 break;
284 b = n;
285 }
286 pushdecl_with_scope (var, b, false);
287
288 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
289 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
290 stmt_list = (*stmt_list_stack)[1];
291 gcc_assert (stmt_list);
292 append_to_statement_list_force (var, &stmt_list);
293 }
294
295 /* We've just finished processing a lambda; if the containing scope is also
296 a lambda, insert any capture proxies that were created while processing
297 the nested lambda. */
298
299 void
300 insert_pending_capture_proxies (void)
301 {
302 tree lam;
303 vec<tree, va_gc> *proxies;
304 unsigned i;
305
306 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
307 return;
308
309 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
310 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
311 for (i = 0; i < vec_safe_length (proxies); ++i)
312 {
313 tree var = (*proxies)[i];
314 insert_capture_proxy (var);
315 }
316 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
317 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
318 }
319
320 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
321 return the type we want the proxy to have: the type of the field itself,
322 with added const-qualification if the lambda isn't mutable and the
323 capture is by value. */
324
325 tree
326 lambda_proxy_type (tree ref)
327 {
328 tree type;
329 if (ref == error_mark_node)
330 return error_mark_node;
331 if (REFERENCE_REF_P (ref))
332 ref = TREE_OPERAND (ref, 0);
333 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
334 type = TREE_TYPE (ref);
335 if (!type || WILDCARD_TYPE_P (non_reference (type)))
336 {
337 type = cxx_make_type (DECLTYPE_TYPE);
338 DECLTYPE_TYPE_EXPR (type) = ref;
339 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
340 SET_TYPE_STRUCTURAL_EQUALITY (type);
341 }
342 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
343 type = make_pack_expansion (type);
344 return type;
345 }
346
347 /* MEMBER is a capture field in a lambda closure class. Now that we're
348 inside the operator(), build a placeholder var for future lookups and
349 debugging. */
350
351 tree
352 build_capture_proxy (tree member)
353 {
354 tree var, object, fn, closure, name, lam, type;
355
356 if (PACK_EXPANSION_P (member))
357 member = PACK_EXPANSION_PATTERN (member);
358
359 closure = DECL_CONTEXT (member);
360 fn = lambda_function (closure);
361 lam = CLASSTYPE_LAMBDA_EXPR (closure);
362
363 /* The proxy variable forwards to the capture field. */
364 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
365 object = finish_non_static_data_member (member, object, NULL_TREE);
366 if (REFERENCE_REF_P (object))
367 object = TREE_OPERAND (object, 0);
368
369 /* Remove the __ inserted by add_capture. */
370 if (DECL_NORMAL_CAPTURE_P (member))
371 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
372 else
373 name = DECL_NAME (member);
374
375 type = lambda_proxy_type (object);
376
377 if (DECL_VLA_CAPTURE_P (member))
378 {
379 /* Rebuild the VLA type from the pointer and maxindex. */
380 tree field = next_initializable_field (TYPE_FIELDS (type));
381 tree ptr = build_simple_component_ref (object, field);
382 field = next_initializable_field (DECL_CHAIN (field));
383 tree max = build_simple_component_ref (object, field);
384 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
385 build_index_type (max));
386 type = build_reference_type (type);
387 REFERENCE_VLA_OK (type) = true;
388 object = convert (type, ptr);
389 }
390
391 var = build_decl (input_location, VAR_DECL, name, type);
392 SET_DECL_VALUE_EXPR (var, object);
393 DECL_HAS_VALUE_EXPR_P (var) = 1;
394 DECL_ARTIFICIAL (var) = 1;
395 TREE_USED (var) = 1;
396 DECL_CONTEXT (var) = fn;
397
398 if (name == this_identifier)
399 {
400 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
401 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
402 }
403
404 if (fn == current_function_decl)
405 insert_capture_proxy (var);
406 else
407 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
408
409 return var;
410 }
411
412 /* Return a struct containing a pointer and a length for lambda capture of
413 an array of runtime length. */
414
415 static tree
416 vla_capture_type (tree array_type)
417 {
418 static tree ptr_id, max_id;
419 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
420 xref_basetypes (type, NULL_TREE);
421 type = begin_class_definition (type);
422 if (!ptr_id)
423 {
424 ptr_id = get_identifier ("ptr");
425 max_id = get_identifier ("max");
426 }
427 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
428 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
429 finish_member_declaration (field);
430 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
431 finish_member_declaration (field);
432 return finish_struct (type, NULL_TREE);
433 }
434
435 /* From an ID and INITIALIZER, create a capture (by reference if
436 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
437 and return it. */
438
439 tree
440 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
441 bool explicit_init_p)
442 {
443 char *buf;
444 tree type, member, name;
445 bool vla = false;
446 bool variadic = false;
447 tree initializer = orig_init;
448
449 if (PACK_EXPANSION_P (initializer))
450 {
451 initializer = PACK_EXPANSION_PATTERN (initializer);
452 variadic = true;
453 }
454
455 if (TREE_CODE (initializer) == TREE_LIST)
456 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
457 tf_warning_or_error);
458 type = TREE_TYPE (initializer);
459 if (array_of_runtime_bound_p (type))
460 {
461 vla = true;
462 if (!by_reference_p)
463 error ("array of runtime bound cannot be captured by copy, "
464 "only by reference");
465
466 /* For a VLA, we capture the address of the first element and the
467 maximum index, and then reconstruct the VLA for the proxy. */
468 tree elt = cp_build_array_ref (input_location, initializer,
469 integer_zero_node, tf_warning_or_error);
470 initializer = build_constructor_va (init_list_type_node, 2,
471 NULL_TREE, build_address (elt),
472 NULL_TREE, array_type_nelts (type));
473 type = vla_capture_type (type);
474 }
475 else if (variably_modified_type_p (type, NULL_TREE))
476 {
477 error ("capture of variable-size type %qT that is not a C++1y array "
478 "of runtime bound", type);
479 if (TREE_CODE (type) == ARRAY_TYPE
480 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
481 inform (input_location, "because the array element type %qT has "
482 "variable size", TREE_TYPE (type));
483 type = error_mark_node;
484 }
485 else
486 {
487 type = lambda_capture_field_type (initializer, explicit_init_p);
488 if (by_reference_p)
489 {
490 type = build_reference_type (type);
491 if (!real_lvalue_p (initializer))
492 error ("cannot capture %qE by reference", initializer);
493 }
494 else
495 /* Capture by copy requires a complete type. */
496 type = complete_type (type);
497 }
498
499 /* Add __ to the beginning of the field name so that user code
500 won't find the field with name lookup. We can't just leave the name
501 unset because template instantiation uses the name to find
502 instantiated fields. */
503 if (!explicit_init_p)
504 {
505 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
506 buf[1] = buf[0] = '_';
507 memcpy (buf + 2, IDENTIFIER_POINTER (id),
508 IDENTIFIER_LENGTH (id) + 1);
509 name = get_identifier (buf);
510 }
511 else
512 /* But captures with explicit initializers are named. */
513 name = id;
514
515 /* If TREE_TYPE isn't set, we're still in the introducer, so check
516 for duplicates. */
517 if (!LAMBDA_EXPR_CLOSURE (lambda))
518 {
519 if (IDENTIFIER_MARKED (name))
520 {
521 pedwarn (input_location, 0,
522 "already captured %qD in lambda expression", id);
523 return NULL_TREE;
524 }
525 IDENTIFIER_MARKED (name) = true;
526 }
527
528 if (variadic)
529 type = make_pack_expansion (type);
530
531 /* Make member variable. */
532 member = build_decl (input_location, FIELD_DECL, name, type);
533 DECL_VLA_CAPTURE_P (member) = vla;
534
535 if (!explicit_init_p)
536 /* Normal captures are invisible to name lookup but uses are replaced
537 with references to the capture field; we implement this by only
538 really making them invisible in unevaluated context; see
539 qualify_lookup. For now, let's make explicitly initialized captures
540 always visible. */
541 DECL_NORMAL_CAPTURE_P (member) = true;
542
543 if (id == this_identifier)
544 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
545
546 /* Add it to the appropriate closure class if we've started it. */
547 if (current_class_type
548 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
549 finish_member_declaration (member);
550
551 tree listmem = member;
552 if (variadic)
553 {
554 listmem = make_pack_expansion (member);
555 initializer = orig_init;
556 }
557 LAMBDA_EXPR_CAPTURE_LIST (lambda)
558 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
559
560 if (LAMBDA_EXPR_CLOSURE (lambda))
561 return build_capture_proxy (member);
562 /* For explicit captures we haven't started the function yet, so we wait
563 and build the proxy from cp_parser_lambda_body. */
564 return NULL_TREE;
565 }
566
567 /* Register all the capture members on the list CAPTURES, which is the
568 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
569
570 void
571 register_capture_members (tree captures)
572 {
573 if (captures == NULL_TREE)
574 return;
575
576 register_capture_members (TREE_CHAIN (captures));
577
578 tree field = TREE_PURPOSE (captures);
579 if (PACK_EXPANSION_P (field))
580 field = PACK_EXPANSION_PATTERN (field);
581
582 /* We set this in add_capture to avoid duplicates. */
583 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
584 finish_member_declaration (field);
585 }
586
587 /* Similar to add_capture, except this works on a stack of nested lambdas.
588 BY_REFERENCE_P in this case is derived from the default capture mode.
589 Returns the capture for the lambda at the bottom of the stack. */
590
591 tree
592 add_default_capture (tree lambda_stack, tree id, tree initializer)
593 {
594 bool this_capture_p = (id == this_identifier);
595
596 tree var = NULL_TREE;
597
598 tree saved_class_type = current_class_type;
599
600 tree node;
601
602 for (node = lambda_stack;
603 node;
604 node = TREE_CHAIN (node))
605 {
606 tree lambda = TREE_VALUE (node);
607
608 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
609 if (DECL_PACK_P (initializer))
610 initializer = make_pack_expansion (initializer);
611 var = add_capture (lambda,
612 id,
613 initializer,
614 /*by_reference_p=*/
615 (!this_capture_p
616 && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
617 == CPLD_REFERENCE)),
618 /*explicit_init_p=*/false);
619 initializer = convert_from_reference (var);
620 }
621
622 current_class_type = saved_class_type;
623
624 return var;
625 }
626
627 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the form of an
628 INDIRECT_REF, possibly adding it through default capturing. */
629
630 tree
631 lambda_expr_this_capture (tree lambda)
632 {
633 tree result;
634
635 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
636
637 /* In unevaluated context this isn't an odr-use, so just return the
638 nearest 'this'. */
639 if (cp_unevaluated_operand)
640 {
641 /* In an NSDMI the fake 'this' pointer that we're using for
642 parsing is in scope_chain. */
643 if (LAMBDA_EXPR_EXTRA_SCOPE (lambda)
644 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (lambda)) == FIELD_DECL)
645 return scope_chain->x_current_class_ptr;
646 return lookup_name (this_identifier);
647 }
648
649 /* Try to default capture 'this' if we can. */
650 if (!this_capture
651 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE)
652 {
653 tree lambda_stack = NULL_TREE;
654 tree init = NULL_TREE;
655
656 /* If we are in a lambda function, we can move out until we hit:
657 1. a non-lambda function or NSDMI,
658 2. a lambda function capturing 'this', or
659 3. a non-default capturing lambda function. */
660 for (tree tlambda = lambda; ;)
661 {
662 lambda_stack = tree_cons (NULL_TREE,
663 tlambda,
664 lambda_stack);
665
666 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
667 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
668 {
669 /* In an NSDMI, we don't have a function to look up the decl in,
670 but the fake 'this' pointer that we're using for parsing is
671 in scope_chain. */
672 init = scope_chain->x_current_class_ptr;
673 gcc_checking_assert
674 (init && (TREE_TYPE (TREE_TYPE (init))
675 == current_nonlambda_class_type ()));
676 break;
677 }
678
679 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
680 tree containing_function = decl_function_context (closure_decl);
681
682 if (containing_function == NULL_TREE)
683 /* We ran out of scopes; there's no 'this' to capture. */
684 break;
685
686 if (!LAMBDA_FUNCTION_P (containing_function))
687 {
688 /* We found a non-lambda function. */
689 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
690 /* First parameter is 'this'. */
691 init = DECL_ARGUMENTS (containing_function);
692 break;
693 }
694
695 tlambda
696 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
697
698 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
699 {
700 /* An outer lambda has already captured 'this'. */
701 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
702 break;
703 }
704
705 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
706 /* An outer lambda won't let us capture 'this'. */
707 break;
708 }
709
710 if (init)
711 this_capture = add_default_capture (lambda_stack,
712 /*id=*/this_identifier,
713 init);
714 }
715
716 if (!this_capture)
717 {
718 error ("%<this%> was not captured for this lambda function");
719 result = error_mark_node;
720 }
721 else
722 {
723 /* To make sure that current_class_ref is for the lambda. */
724 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
725 == LAMBDA_EXPR_CLOSURE (lambda));
726
727 result = this_capture;
728
729 /* If 'this' is captured, each use of 'this' is transformed into an
730 access to the corresponding unnamed data member of the closure
731 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
732 ensures that the transformed expression is an rvalue. ] */
733 result = rvalue (result);
734 }
735
736 return result;
737 }
738
739 /* We don't want to capture 'this' until we know we need it, i.e. after
740 overload resolution has chosen a non-static member function. At that
741 point we call this function to turn a dummy object into a use of the
742 'this' capture. */
743
744 tree
745 maybe_resolve_dummy (tree object)
746 {
747 if (!is_dummy_object (object))
748 return object;
749
750 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
751 gcc_assert (!TYPE_PTR_P (type));
752
753 if (type != current_class_type
754 && current_class_type
755 && LAMBDA_TYPE_P (current_class_type)
756 && lambda_function (current_class_type)
757 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
758 {
759 /* In a lambda, need to go through 'this' capture. */
760 tree lam = CLASSTYPE_LAMBDA_EXPR (current_class_type);
761 tree cap = lambda_expr_this_capture (lam);
762 object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
763 RO_NULL, tf_warning_or_error);
764 }
765
766 return object;
767 }
768
769 /* Returns the method basetype of the innermost non-lambda function, or
770 NULL_TREE if none. */
771
772 tree
773 nonlambda_method_basetype (void)
774 {
775 tree fn, type;
776 if (!current_class_ref)
777 return NULL_TREE;
778
779 type = current_class_type;
780 if (!LAMBDA_TYPE_P (type))
781 return type;
782
783 /* Find the nearest enclosing non-lambda function. */
784 fn = TYPE_NAME (type);
785 do
786 fn = decl_function_context (fn);
787 while (fn && LAMBDA_FUNCTION_P (fn));
788
789 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
790 return NULL_TREE;
791
792 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
793 }
794
795 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
796 indicated FN and NARGS, but do not initialize the return type or any of the
797 argument slots. */
798
799 static tree
800 prepare_op_call (tree fn, int nargs)
801 {
802 tree t;
803
804 t = build_vl_exp (CALL_EXPR, nargs + 3);
805 CALL_EXPR_FN (t) = fn;
806 CALL_EXPR_STATIC_CHAIN (t) = NULL;
807
808 return t;
809 }
810
811 /* If the closure TYPE has a static op(), also add a conversion to function
812 pointer. */
813
814 void
815 maybe_add_lambda_conv_op (tree type)
816 {
817 bool nested = (current_function_decl != NULL_TREE);
818 tree callop = lambda_function (type);
819
820 if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE)
821 return;
822
823 if (processing_template_decl)
824 return;
825
826 bool const generic_lambda_p
827 = (DECL_TEMPLATE_INFO (callop)
828 && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop)) == callop);
829
830 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
831 {
832 /* If the op() wasn't instantiated due to errors, give up. */
833 gcc_assert (errorcount || sorrycount);
834 return;
835 }
836
837 /* Non-template conversion operators are defined directly with build_call_a
838 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
839 deferred and the CALL is built in-place. In the case of a deduced return
840 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
841 the return type is also built in-place. The arguments of DECLTYPE_CALL in
842 the return expression may differ in flags from those in the body CALL. In
843 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
844 the body CALL, but not in DECLTYPE_CALL. */
845
846 vec<tree, va_gc> *direct_argvec = 0;
847 tree decltype_call = 0, call = 0;
848 tree fn_result = TREE_TYPE (TREE_TYPE (callop));
849
850 if (generic_lambda_p)
851 {
852 /* Prepare the dependent member call for the static member function
853 '_FUN' and, potentially, prepare another call to be used in a decltype
854 return expression for a deduced return call op to allow for simple
855 implementation of the conversion operator. */
856
857 tree instance = build_nop (type, null_pointer_node);
858 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
859 instance, DECL_NAME (callop), NULL_TREE);
860 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
861
862 call = prepare_op_call (objfn, nargs);
863 if (type_uses_auto (fn_result))
864 decltype_call = prepare_op_call (objfn, nargs);
865 }
866 else
867 {
868 direct_argvec = make_tree_vector ();
869 direct_argvec->quick_push (build1 (NOP_EXPR,
870 TREE_TYPE (DECL_ARGUMENTS (callop)),
871 null_pointer_node));
872 }
873
874 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
875 declare the static member function "_FUN" below. For each arg append to
876 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
877 call args (for the template case). If a parameter pack is found, expand
878 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
879
880 tree fn_args = NULL_TREE;
881 {
882 int ix = 0;
883 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
884 tree tgt;
885
886 while (src)
887 {
888 tree new_node = copy_node (src);
889
890 if (!fn_args)
891 fn_args = tgt = new_node;
892 else
893 {
894 TREE_CHAIN (tgt) = new_node;
895 tgt = new_node;
896 }
897
898 mark_exp_read (tgt);
899
900 if (generic_lambda_p)
901 {
902 if (DECL_PACK_P (tgt))
903 {
904 tree a = make_pack_expansion (tgt);
905 if (decltype_call)
906 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
907 PACK_EXPANSION_LOCAL_P (a) = true;
908 CALL_EXPR_ARG (call, ix) = a;
909 }
910 else
911 {
912 tree a = convert_from_reference (tgt);
913 CALL_EXPR_ARG (call, ix) = a;
914 if (decltype_call)
915 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
916 }
917 ++ix;
918 }
919 else
920 vec_safe_push (direct_argvec, tgt);
921
922 src = TREE_CHAIN (src);
923 }
924 }
925
926
927 if (generic_lambda_p)
928 {
929 if (decltype_call)
930 {
931 ++processing_template_decl;
932 fn_result = finish_decltype_type
933 (decltype_call, /*id_expression_or_member_access_p=*/false,
934 tf_warning_or_error);
935 --processing_template_decl;
936 }
937 }
938 else
939 call = build_call_a (callop,
940 direct_argvec->length (),
941 direct_argvec->address ());
942
943 CALL_FROM_THUNK_P (call) = 1;
944
945 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
946
947 /* First build up the conversion op. */
948
949 tree rettype = build_pointer_type (stattype);
950 tree name = mangle_conv_op_name_for_type (rettype);
951 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
952 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
953 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
954 tree fn = convfn;
955 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
956
957 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
958 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
959 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
960
961 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
962 grokclassfn (type, fn, NO_SPECIAL);
963 set_linkage_according_to_type (type, fn);
964 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
965 DECL_IN_AGGR_P (fn) = 1;
966 DECL_ARTIFICIAL (fn) = 1;
967 DECL_NOT_REALLY_EXTERN (fn) = 1;
968 DECL_DECLARED_INLINE_P (fn) = 1;
969 DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
970 if (nested)
971 DECL_INTERFACE_KNOWN (fn) = 1;
972
973 if (generic_lambda_p)
974 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
975
976 add_method (type, fn, NULL_TREE);
977
978 /* Generic thunk code fails for varargs; we'll complain in mark_used if
979 the conversion op is used. */
980 if (varargs_function_p (callop))
981 {
982 DECL_DELETED_FN (fn) = 1;
983 return;
984 }
985
986 /* Now build up the thunk to be returned. */
987
988 name = get_identifier ("_FUN");
989 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
990 fn = statfn;
991 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
992 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
993 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
994 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
995 grokclassfn (type, fn, NO_SPECIAL);
996 set_linkage_according_to_type (type, fn);
997 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
998 DECL_IN_AGGR_P (fn) = 1;
999 DECL_ARTIFICIAL (fn) = 1;
1000 DECL_NOT_REALLY_EXTERN (fn) = 1;
1001 DECL_DECLARED_INLINE_P (fn) = 1;
1002 DECL_STATIC_FUNCTION_P (fn) = 1;
1003 DECL_ARGUMENTS (fn) = fn_args;
1004 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1005 {
1006 /* Avoid duplicate -Wshadow warnings. */
1007 DECL_NAME (arg) = NULL_TREE;
1008 DECL_CONTEXT (arg) = fn;
1009 }
1010 if (nested)
1011 DECL_INTERFACE_KNOWN (fn) = 1;
1012
1013 if (generic_lambda_p)
1014 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1015
1016 add_method (type, fn, NULL_TREE);
1017
1018 if (nested)
1019 push_function_context ();
1020 else
1021 /* Still increment function_depth so that we don't GC in the
1022 middle of an expression. */
1023 ++function_depth;
1024
1025 /* Generate the body of the thunk. */
1026
1027 start_preparsed_function (statfn, NULL_TREE,
1028 SF_PRE_PARSED | SF_INCLASS_INLINE);
1029 if (DECL_ONE_ONLY (statfn))
1030 {
1031 /* Put the thunk in the same comdat group as the call op. */
1032 symtab_add_to_same_comdat_group
1033 (cgraph_get_create_node (statfn),
1034 cgraph_get_create_node (callop));
1035 }
1036 tree body = begin_function_body ();
1037 tree compound_stmt = begin_compound_stmt (0);
1038 if (!generic_lambda_p)
1039 {
1040 set_flags_from_callee (call);
1041 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1042 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1043 }
1044 call = convert_from_reference (call);
1045 finish_return_stmt (call);
1046
1047 finish_compound_stmt (compound_stmt);
1048 finish_function_body (body);
1049
1050 fn = finish_function (/*inline*/2);
1051 if (!generic_lambda_p)
1052 expand_or_defer_fn (fn);
1053
1054 /* Generate the body of the conversion op. */
1055
1056 start_preparsed_function (convfn, NULL_TREE,
1057 SF_PRE_PARSED | SF_INCLASS_INLINE);
1058 body = begin_function_body ();
1059 compound_stmt = begin_compound_stmt (0);
1060
1061 /* decl_needed_p needs to see that it's used. */
1062 TREE_USED (statfn) = 1;
1063 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1064
1065 finish_compound_stmt (compound_stmt);
1066 finish_function_body (body);
1067
1068 fn = finish_function (/*inline*/2);
1069 if (!generic_lambda_p)
1070 expand_or_defer_fn (fn);
1071
1072 if (nested)
1073 pop_function_context ();
1074 else
1075 --function_depth;
1076 }
1077
1078 /* Returns true iff VAL is a lambda-related declaration which should
1079 be ignored by unqualified lookup. */
1080
1081 bool
1082 is_lambda_ignored_entity (tree val)
1083 {
1084 /* In unevaluated context, look past normal capture proxies. */
1085 if (cp_unevaluated_operand && is_normal_capture_proxy (val))
1086 return true;
1087
1088 /* Always ignore lambda fields, their names are only for debugging. */
1089 if (TREE_CODE (val) == FIELD_DECL
1090 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1091 return true;
1092
1093 /* None of the lookups that use qualify_lookup want the op() from the
1094 lambda; they want the one from the enclosing class. */
1095 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1096 return true;
1097
1098 return false;
1099 }