Introduce gcc_rich_location::add_fixit_misspelled_id
[gcc.git] / gcc / cp / lambda.c
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
6 Copyright (C) 1998-2016 Free Software Foundation, Inc.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "cp-cilkplus.h"
34
35 /* Constructor for a lambda expression. */
36
37 tree
38 build_lambda_expr (void)
39 {
40 tree lambda = make_node (LAMBDA_EXPR);
41 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
42 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
43 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
44 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
45 LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE;
46 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
47 return lambda;
48 }
49
50 /* Create the closure object for a LAMBDA_EXPR. */
51
52 tree
53 build_lambda_object (tree lambda_expr)
54 {
55 /* Build aggregate constructor call.
56 - cp_parser_braced_list
57 - cp_parser_functional_cast */
58 vec<constructor_elt, va_gc> *elts = NULL;
59 tree node, expr, type;
60 location_t saved_loc;
61
62 if (processing_template_decl)
63 return lambda_expr;
64
65 /* Make sure any error messages refer to the lambda-introducer. */
66 saved_loc = input_location;
67 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
68
69 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
70 node;
71 node = TREE_CHAIN (node))
72 {
73 tree field = TREE_PURPOSE (node);
74 tree val = TREE_VALUE (node);
75
76 if (field == error_mark_node)
77 {
78 expr = error_mark_node;
79 goto out;
80 }
81
82 if (DECL_P (val))
83 mark_used (val);
84
85 /* Mere mortals can't copy arrays with aggregate initialization, so
86 do some magic to make it work here. */
87 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
88 val = build_array_copy (val);
89 else if (DECL_NORMAL_CAPTURE_P (field)
90 && !DECL_VLA_CAPTURE_P (field)
91 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
92 {
93 /* "the entities that are captured by copy are used to
94 direct-initialize each corresponding non-static data
95 member of the resulting closure object."
96
97 There's normally no way to express direct-initialization
98 from an element of a CONSTRUCTOR, so we build up a special
99 TARGET_EXPR to bypass the usual copy-initialization. */
100 val = force_rvalue (val, tf_warning_or_error);
101 if (TREE_CODE (val) == TARGET_EXPR)
102 TARGET_EXPR_DIRECT_INIT_P (val) = true;
103 }
104
105 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
106 }
107
108 expr = build_constructor (init_list_type_node, elts);
109 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
110
111 /* N2927: "[The closure] class type is not an aggregate."
112 But we briefly treat it as an aggregate to make this simpler. */
113 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
114 CLASSTYPE_NON_AGGREGATE (type) = 0;
115 expr = finish_compound_literal (type, expr, tf_warning_or_error);
116 CLASSTYPE_NON_AGGREGATE (type) = 1;
117
118 out:
119 input_location = saved_loc;
120 return expr;
121 }
122
123 /* Return an initialized RECORD_TYPE for LAMBDA.
124 LAMBDA must have its explicit captures already. */
125
126 tree
127 begin_lambda_type (tree lambda)
128 {
129 tree type;
130
131 {
132 /* Unique name. This is just like an unnamed class, but we cannot use
133 make_anon_name because of certain checks against TYPE_ANONYMOUS_P. */
134 tree name;
135 name = make_lambda_name ();
136
137 /* Create the new RECORD_TYPE for this lambda. */
138 type = xref_tag (/*tag_code=*/record_type,
139 name,
140 /*scope=*/ts_lambda,
141 /*template_header_p=*/false);
142 if (type == error_mark_node)
143 return error_mark_node;
144 }
145
146 /* Designate it as a struct so that we can use aggregate initialization. */
147 CLASSTYPE_DECLARED_CLASS (type) = false;
148
149 /* Cross-reference the expression and the type. */
150 LAMBDA_EXPR_CLOSURE (lambda) = type;
151 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
152
153 /* Clear base types. */
154 xref_basetypes (type, /*bases=*/NULL_TREE);
155
156 /* Start the class. */
157 type = begin_class_definition (type);
158
159 return type;
160 }
161
162 /* Returns the type to use for the return type of the operator() of a
163 closure class. */
164
165 tree
166 lambda_return_type (tree expr)
167 {
168 if (expr == NULL_TREE)
169 return void_type_node;
170 if (type_unknown_p (expr)
171 || BRACE_ENCLOSED_INITIALIZER_P (expr))
172 {
173 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
174 return error_mark_node;
175 }
176 gcc_checking_assert (!type_dependent_expression_p (expr));
177 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
178 }
179
180 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
181 closure type. */
182
183 tree
184 lambda_function (tree lambda)
185 {
186 tree type;
187 if (TREE_CODE (lambda) == LAMBDA_EXPR)
188 type = LAMBDA_EXPR_CLOSURE (lambda);
189 else
190 type = lambda;
191 gcc_assert (LAMBDA_TYPE_P (type));
192 /* Don't let debug_tree cause instantiation. */
193 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
194 && !COMPLETE_OR_OPEN_TYPE_P (type))
195 return NULL_TREE;
196 lambda = lookup_member (type, ansi_opname (CALL_EXPR),
197 /*protect=*/0, /*want_type=*/false,
198 tf_warning_or_error);
199 if (lambda)
200 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
201 return lambda;
202 }
203
204 /* Returns the type to use for the FIELD_DECL corresponding to the
205 capture of EXPR.
206 The caller should add REFERENCE_TYPE for capture by reference. */
207
208 tree
209 lambda_capture_field_type (tree expr, bool explicit_init_p)
210 {
211 tree type;
212 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
213 if (!is_this && type_dependent_expression_p (expr))
214 {
215 type = cxx_make_type (DECLTYPE_TYPE);
216 DECLTYPE_TYPE_EXPR (type) = expr;
217 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
218 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
219 SET_TYPE_STRUCTURAL_EQUALITY (type);
220 }
221 else if (!is_this && explicit_init_p)
222 {
223 type = make_auto ();
224 type = do_auto_deduction (type, expr, type);
225 }
226 else
227 type = non_reference (unlowered_expr_type (expr));
228 return type;
229 }
230
231 /* Returns true iff DECL is a lambda capture proxy variable created by
232 build_capture_proxy. */
233
234 bool
235 is_capture_proxy (tree decl)
236 {
237 return (VAR_P (decl)
238 && DECL_HAS_VALUE_EXPR_P (decl)
239 && !DECL_ANON_UNION_VAR_P (decl)
240 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
241 }
242
243 /* Returns true iff DECL is a capture proxy for a normal capture
244 (i.e. without explicit initializer). */
245
246 bool
247 is_normal_capture_proxy (tree decl)
248 {
249 if (!is_capture_proxy (decl))
250 /* It's not a capture proxy. */
251 return false;
252
253 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
254 /* VLA capture. */
255 return true;
256
257 /* It is a capture proxy, is it a normal capture? */
258 tree val = DECL_VALUE_EXPR (decl);
259 if (val == error_mark_node)
260 return true;
261
262 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
263 val = TREE_OPERAND (val, 1);
264 return DECL_NORMAL_CAPTURE_P (val);
265 }
266
267 /* VAR is a capture proxy created by build_capture_proxy; add it to the
268 current function, which is the operator() for the appropriate lambda. */
269
270 void
271 insert_capture_proxy (tree var)
272 {
273 cp_binding_level *b;
274 tree stmt_list;
275
276 /* Put the capture proxy in the extra body block so that it won't clash
277 with a later local variable. */
278 b = current_binding_level;
279 for (;;)
280 {
281 cp_binding_level *n = b->level_chain;
282 if (n->kind == sk_function_parms)
283 break;
284 b = n;
285 }
286 pushdecl_with_scope (var, b, false);
287
288 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
289 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
290 stmt_list = (*stmt_list_stack)[1];
291 gcc_assert (stmt_list);
292 append_to_statement_list_force (var, &stmt_list);
293 }
294
295 /* We've just finished processing a lambda; if the containing scope is also
296 a lambda, insert any capture proxies that were created while processing
297 the nested lambda. */
298
299 void
300 insert_pending_capture_proxies (void)
301 {
302 tree lam;
303 vec<tree, va_gc> *proxies;
304 unsigned i;
305
306 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
307 return;
308
309 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
310 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
311 for (i = 0; i < vec_safe_length (proxies); ++i)
312 {
313 tree var = (*proxies)[i];
314 insert_capture_proxy (var);
315 }
316 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
317 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
318 }
319
320 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
321 return the type we want the proxy to have: the type of the field itself,
322 with added const-qualification if the lambda isn't mutable and the
323 capture is by value. */
324
325 tree
326 lambda_proxy_type (tree ref)
327 {
328 tree type;
329 if (ref == error_mark_node)
330 return error_mark_node;
331 if (REFERENCE_REF_P (ref))
332 ref = TREE_OPERAND (ref, 0);
333 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
334 type = TREE_TYPE (ref);
335 if (!type || WILDCARD_TYPE_P (non_reference (type)))
336 {
337 type = cxx_make_type (DECLTYPE_TYPE);
338 DECLTYPE_TYPE_EXPR (type) = ref;
339 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
340 SET_TYPE_STRUCTURAL_EQUALITY (type);
341 }
342 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
343 type = make_pack_expansion (type);
344 return type;
345 }
346
347 /* MEMBER is a capture field in a lambda closure class. Now that we're
348 inside the operator(), build a placeholder var for future lookups and
349 debugging. */
350
351 tree
352 build_capture_proxy (tree member)
353 {
354 tree var, object, fn, closure, name, lam, type;
355
356 if (PACK_EXPANSION_P (member))
357 member = PACK_EXPANSION_PATTERN (member);
358
359 closure = DECL_CONTEXT (member);
360 fn = lambda_function (closure);
361 lam = CLASSTYPE_LAMBDA_EXPR (closure);
362
363 /* The proxy variable forwards to the capture field. */
364 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
365 object = finish_non_static_data_member (member, object, NULL_TREE);
366 if (REFERENCE_REF_P (object))
367 object = TREE_OPERAND (object, 0);
368
369 /* Remove the __ inserted by add_capture. */
370 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
371
372 type = lambda_proxy_type (object);
373
374 if (DECL_VLA_CAPTURE_P (member))
375 {
376 /* Rebuild the VLA type from the pointer and maxindex. */
377 tree field = next_initializable_field (TYPE_FIELDS (type));
378 tree ptr = build_simple_component_ref (object, field);
379 field = next_initializable_field (DECL_CHAIN (field));
380 tree max = build_simple_component_ref (object, field);
381 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
382 build_index_type (max));
383 type = build_reference_type (type);
384 REFERENCE_VLA_OK (type) = true;
385 object = convert (type, ptr);
386 }
387
388 var = build_decl (input_location, VAR_DECL, name, type);
389 SET_DECL_VALUE_EXPR (var, object);
390 DECL_HAS_VALUE_EXPR_P (var) = 1;
391 DECL_ARTIFICIAL (var) = 1;
392 TREE_USED (var) = 1;
393 DECL_CONTEXT (var) = fn;
394
395 if (name == this_identifier)
396 {
397 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
398 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
399 }
400
401 if (fn == current_function_decl)
402 insert_capture_proxy (var);
403 else
404 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
405
406 return var;
407 }
408
409 /* Return a struct containing a pointer and a length for lambda capture of
410 an array of runtime length. */
411
412 static tree
413 vla_capture_type (tree array_type)
414 {
415 static tree ptr_id, max_id;
416 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
417 xref_basetypes (type, NULL_TREE);
418 type = begin_class_definition (type);
419 if (!ptr_id)
420 {
421 ptr_id = get_identifier ("ptr");
422 max_id = get_identifier ("max");
423 }
424 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
425 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
426 finish_member_declaration (field);
427 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
428 finish_member_declaration (field);
429 return finish_struct (type, NULL_TREE);
430 }
431
432 /* From an ID and INITIALIZER, create a capture (by reference if
433 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
434 and return it. */
435
436 tree
437 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
438 bool explicit_init_p)
439 {
440 char *buf;
441 tree type, member, name;
442 bool vla = false;
443 bool variadic = false;
444 tree initializer = orig_init;
445
446 if (PACK_EXPANSION_P (initializer))
447 {
448 initializer = PACK_EXPANSION_PATTERN (initializer);
449 variadic = true;
450 }
451
452 if (TREE_CODE (initializer) == TREE_LIST)
453 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
454 tf_warning_or_error);
455 type = TREE_TYPE (initializer);
456 if (type == error_mark_node)
457 return error_mark_node;
458
459 if (array_of_runtime_bound_p (type))
460 {
461 vla = true;
462 if (!by_reference_p)
463 error ("array of runtime bound cannot be captured by copy, "
464 "only by reference");
465
466 /* For a VLA, we capture the address of the first element and the
467 maximum index, and then reconstruct the VLA for the proxy. */
468 tree elt = cp_build_array_ref (input_location, initializer,
469 integer_zero_node, tf_warning_or_error);
470 initializer = build_constructor_va (init_list_type_node, 2,
471 NULL_TREE, build_address (elt),
472 NULL_TREE, array_type_nelts (type));
473 type = vla_capture_type (type);
474 }
475 else if (!dependent_type_p (type)
476 && variably_modified_type_p (type, NULL_TREE))
477 {
478 error ("capture of variable-size type %qT that is not an N3639 array "
479 "of runtime bound", type);
480 if (TREE_CODE (type) == ARRAY_TYPE
481 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
482 inform (input_location, "because the array element type %qT has "
483 "variable size", TREE_TYPE (type));
484 type = error_mark_node;
485 }
486 else
487 {
488 type = lambda_capture_field_type (initializer, explicit_init_p);
489 if (by_reference_p)
490 {
491 type = build_reference_type (type);
492 if (!dependent_type_p (type) && !real_lvalue_p (initializer))
493 error ("cannot capture %qE by reference", initializer);
494 }
495 else
496 {
497 /* Capture by copy requires a complete type. */
498 type = complete_type (type);
499 if (!dependent_type_p (type) && !COMPLETE_TYPE_P (type))
500 {
501 error ("capture by copy of incomplete type %qT", type);
502 cxx_incomplete_type_inform (type);
503 return error_mark_node;
504 }
505 }
506 }
507
508 /* Add __ to the beginning of the field name so that user code
509 won't find the field with name lookup. We can't just leave the name
510 unset because template instantiation uses the name to find
511 instantiated fields. */
512 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
513 buf[1] = buf[0] = '_';
514 memcpy (buf + 2, IDENTIFIER_POINTER (id),
515 IDENTIFIER_LENGTH (id) + 1);
516 name = get_identifier (buf);
517
518 /* If TREE_TYPE isn't set, we're still in the introducer, so check
519 for duplicates. */
520 if (!LAMBDA_EXPR_CLOSURE (lambda))
521 {
522 if (IDENTIFIER_MARKED (name))
523 {
524 pedwarn (input_location, 0,
525 "already captured %qD in lambda expression", id);
526 return NULL_TREE;
527 }
528 IDENTIFIER_MARKED (name) = true;
529 }
530
531 if (variadic)
532 type = make_pack_expansion (type);
533
534 /* Make member variable. */
535 member = build_decl (input_location, FIELD_DECL, name, type);
536 DECL_VLA_CAPTURE_P (member) = vla;
537
538 if (!explicit_init_p)
539 /* Normal captures are invisible to name lookup but uses are replaced
540 with references to the capture field; we implement this by only
541 really making them invisible in unevaluated context; see
542 qualify_lookup. For now, let's make explicitly initialized captures
543 always visible. */
544 DECL_NORMAL_CAPTURE_P (member) = true;
545
546 if (id == this_identifier)
547 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
548
549 /* Add it to the appropriate closure class if we've started it. */
550 if (current_class_type
551 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
552 finish_member_declaration (member);
553
554 tree listmem = member;
555 if (variadic)
556 {
557 listmem = make_pack_expansion (member);
558 initializer = orig_init;
559 }
560 LAMBDA_EXPR_CAPTURE_LIST (lambda)
561 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
562
563 if (LAMBDA_EXPR_CLOSURE (lambda))
564 return build_capture_proxy (member);
565 /* For explicit captures we haven't started the function yet, so we wait
566 and build the proxy from cp_parser_lambda_body. */
567 return NULL_TREE;
568 }
569
570 /* Register all the capture members on the list CAPTURES, which is the
571 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
572
573 void
574 register_capture_members (tree captures)
575 {
576 if (captures == NULL_TREE)
577 return;
578
579 register_capture_members (TREE_CHAIN (captures));
580
581 tree field = TREE_PURPOSE (captures);
582 if (PACK_EXPANSION_P (field))
583 field = PACK_EXPANSION_PATTERN (field);
584
585 /* We set this in add_capture to avoid duplicates. */
586 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
587 finish_member_declaration (field);
588 }
589
590 /* Similar to add_capture, except this works on a stack of nested lambdas.
591 BY_REFERENCE_P in this case is derived from the default capture mode.
592 Returns the capture for the lambda at the bottom of the stack. */
593
594 tree
595 add_default_capture (tree lambda_stack, tree id, tree initializer)
596 {
597 bool this_capture_p = (id == this_identifier);
598
599 tree var = NULL_TREE;
600
601 tree saved_class_type = current_class_type;
602
603 tree node;
604
605 for (node = lambda_stack;
606 node;
607 node = TREE_CHAIN (node))
608 {
609 tree lambda = TREE_VALUE (node);
610
611 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
612 if (DECL_PACK_P (initializer))
613 initializer = make_pack_expansion (initializer);
614 var = add_capture (lambda,
615 id,
616 initializer,
617 /*by_reference_p=*/
618 (!this_capture_p
619 && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
620 == CPLD_REFERENCE)),
621 /*explicit_init_p=*/false);
622 initializer = convert_from_reference (var);
623 }
624
625 current_class_type = saved_class_type;
626
627 return var;
628 }
629
630 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
631 form of an INDIRECT_REF, possibly adding it through default
632 capturing, if ADD_CAPTURE_P is true. */
633
634 tree
635 lambda_expr_this_capture (tree lambda, bool add_capture_p)
636 {
637 tree result;
638
639 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
640
641 /* In unevaluated context this isn't an odr-use, so don't capture. */
642 if (cp_unevaluated_operand)
643 add_capture_p = false;
644
645 /* Try to default capture 'this' if we can. */
646 if (!this_capture
647 && (!add_capture_p
648 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
649 {
650 tree lambda_stack = NULL_TREE;
651 tree init = NULL_TREE;
652
653 /* If we are in a lambda function, we can move out until we hit:
654 1. a non-lambda function or NSDMI,
655 2. a lambda function capturing 'this', or
656 3. a non-default capturing lambda function. */
657 for (tree tlambda = lambda; ;)
658 {
659 lambda_stack = tree_cons (NULL_TREE,
660 tlambda,
661 lambda_stack);
662
663 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
664 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
665 {
666 /* In an NSDMI, we don't have a function to look up the decl in,
667 but the fake 'this' pointer that we're using for parsing is
668 in scope_chain. */
669 init = scope_chain->x_current_class_ptr;
670 gcc_checking_assert
671 (init && (TREE_TYPE (TREE_TYPE (init))
672 == current_nonlambda_class_type ()));
673 break;
674 }
675
676 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
677 tree containing_function = decl_function_context (closure_decl);
678
679 if (containing_function == NULL_TREE)
680 /* We ran out of scopes; there's no 'this' to capture. */
681 break;
682
683 if (!LAMBDA_FUNCTION_P (containing_function))
684 {
685 /* We found a non-lambda function. */
686 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
687 /* First parameter is 'this'. */
688 init = DECL_ARGUMENTS (containing_function);
689 break;
690 }
691
692 tlambda
693 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
694
695 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
696 {
697 /* An outer lambda has already captured 'this'. */
698 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
699 break;
700 }
701
702 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
703 /* An outer lambda won't let us capture 'this'. */
704 break;
705 }
706
707 if (init)
708 {
709 if (add_capture_p)
710 this_capture = add_default_capture (lambda_stack,
711 /*id=*/this_identifier,
712 init);
713 else
714 this_capture = init;
715 }
716 }
717
718 if (cp_unevaluated_operand)
719 result = this_capture;
720 else if (!this_capture)
721 {
722 if (add_capture_p)
723 {
724 error ("%<this%> was not captured for this lambda function");
725 result = error_mark_node;
726 }
727 else
728 result = NULL_TREE;
729 }
730 else
731 {
732 /* To make sure that current_class_ref is for the lambda. */
733 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
734 == LAMBDA_EXPR_CLOSURE (lambda));
735
736 result = this_capture;
737
738 /* If 'this' is captured, each use of 'this' is transformed into an
739 access to the corresponding unnamed data member of the closure
740 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
741 ensures that the transformed expression is an rvalue. ] */
742 result = rvalue (result);
743 }
744
745 return result;
746 }
747
748 /* We don't want to capture 'this' until we know we need it, i.e. after
749 overload resolution has chosen a non-static member function. At that
750 point we call this function to turn a dummy object into a use of the
751 'this' capture. */
752
753 tree
754 maybe_resolve_dummy (tree object, bool add_capture_p)
755 {
756 if (!is_dummy_object (object))
757 return object;
758
759 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
760 gcc_assert (!TYPE_PTR_P (type));
761
762 if (type != current_class_type
763 && current_class_type
764 && LAMBDA_TYPE_P (current_class_type)
765 && lambda_function (current_class_type)
766 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
767 {
768 /* In a lambda, need to go through 'this' capture. */
769 tree lam = CLASSTYPE_LAMBDA_EXPR (current_class_type);
770 tree cap = lambda_expr_this_capture (lam, add_capture_p);
771 if (cap && cap != error_mark_node)
772 object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
773 RO_NULL, tf_warning_or_error);
774 }
775
776 return object;
777 }
778
779 /* Returns the innermost non-lambda function. */
780
781 tree
782 current_nonlambda_function (void)
783 {
784 tree fn = current_function_decl;
785 while (fn && LAMBDA_FUNCTION_P (fn))
786 fn = decl_function_context (fn);
787 return fn;
788 }
789
790 /* Returns the method basetype of the innermost non-lambda function, or
791 NULL_TREE if none. */
792
793 tree
794 nonlambda_method_basetype (void)
795 {
796 tree fn, type;
797 if (!current_class_ref)
798 return NULL_TREE;
799
800 type = current_class_type;
801 if (!LAMBDA_TYPE_P (type))
802 return type;
803
804 /* Find the nearest enclosing non-lambda function. */
805 fn = TYPE_NAME (type);
806 do
807 fn = decl_function_context (fn);
808 while (fn && LAMBDA_FUNCTION_P (fn));
809
810 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
811 return NULL_TREE;
812
813 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
814 }
815
816 /* Like current_scope, but looking through lambdas. */
817
818 tree
819 current_nonlambda_scope (void)
820 {
821 tree scope = current_scope ();
822 for (;;)
823 {
824 if (TREE_CODE (scope) == FUNCTION_DECL
825 && LAMBDA_FUNCTION_P (scope))
826 {
827 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
828 continue;
829 }
830 else if (LAMBDA_TYPE_P (scope))
831 {
832 scope = CP_TYPE_CONTEXT (scope);
833 continue;
834 }
835 break;
836 }
837 return scope;
838 }
839
840 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
841 indicated FN and NARGS, but do not initialize the return type or any of the
842 argument slots. */
843
844 static tree
845 prepare_op_call (tree fn, int nargs)
846 {
847 tree t;
848
849 t = build_vl_exp (CALL_EXPR, nargs + 3);
850 CALL_EXPR_FN (t) = fn;
851 CALL_EXPR_STATIC_CHAIN (t) = NULL;
852
853 return t;
854 }
855
856 /* Return true iff CALLOP is the op() for a generic lambda. */
857
858 bool
859 generic_lambda_fn_p (tree callop)
860 {
861 return (LAMBDA_FUNCTION_P (callop)
862 && DECL_TEMPLATE_INFO (callop)
863 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
864 }
865
866 /* If the closure TYPE has a static op(), also add a conversion to function
867 pointer. */
868
869 void
870 maybe_add_lambda_conv_op (tree type)
871 {
872 bool nested = (cfun != NULL);
873 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
874 tree callop = lambda_function (type);
875 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
876
877 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
878 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
879 return;
880
881 if (processing_template_decl)
882 return;
883
884 bool const generic_lambda_p = generic_lambda_fn_p (callop);
885
886 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
887 {
888 /* If the op() wasn't instantiated due to errors, give up. */
889 gcc_assert (errorcount || sorrycount);
890 return;
891 }
892
893 /* Non-template conversion operators are defined directly with build_call_a
894 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
895 deferred and the CALL is built in-place. In the case of a deduced return
896 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
897 the return type is also built in-place. The arguments of DECLTYPE_CALL in
898 the return expression may differ in flags from those in the body CALL. In
899 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
900 the body CALL, but not in DECLTYPE_CALL. */
901
902 vec<tree, va_gc> *direct_argvec = 0;
903 tree decltype_call = 0, call = 0;
904 tree optype = TREE_TYPE (callop);
905 tree fn_result = TREE_TYPE (optype);
906
907 if (generic_lambda_p)
908 {
909 /* Prepare the dependent member call for the static member function
910 '_FUN' and, potentially, prepare another call to be used in a decltype
911 return expression for a deduced return call op to allow for simple
912 implementation of the conversion operator. */
913
914 tree instance = build_nop (type, null_pointer_node);
915 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
916 instance, DECL_NAME (callop), NULL_TREE);
917 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
918
919 call = prepare_op_call (objfn, nargs);
920 if (type_uses_auto (fn_result))
921 decltype_call = prepare_op_call (objfn, nargs);
922 }
923 else
924 {
925 direct_argvec = make_tree_vector ();
926 direct_argvec->quick_push (build1 (NOP_EXPR,
927 TREE_TYPE (DECL_ARGUMENTS (callop)),
928 null_pointer_node));
929 }
930
931 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
932 declare the static member function "_FUN" below. For each arg append to
933 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
934 call args (for the template case). If a parameter pack is found, expand
935 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
936
937 tree fn_args = NULL_TREE;
938 {
939 int ix = 0;
940 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
941 tree tgt;
942
943 while (src)
944 {
945 tree new_node = copy_node (src);
946
947 if (!fn_args)
948 fn_args = tgt = new_node;
949 else
950 {
951 TREE_CHAIN (tgt) = new_node;
952 tgt = new_node;
953 }
954
955 mark_exp_read (tgt);
956
957 if (generic_lambda_p)
958 {
959 ++processing_template_decl;
960 tree a = forward_parm (tgt);
961 --processing_template_decl;
962
963 CALL_EXPR_ARG (call, ix) = a;
964 if (decltype_call)
965 CALL_EXPR_ARG (decltype_call, ix) = unshare_expr (a);
966
967 if (PACK_EXPANSION_P (a))
968 /* Set this after unsharing so it's not in decltype_call. */
969 PACK_EXPANSION_LOCAL_P (a) = true;
970
971 ++ix;
972 }
973 else
974 vec_safe_push (direct_argvec, tgt);
975
976 src = TREE_CHAIN (src);
977 }
978 }
979
980
981 if (generic_lambda_p)
982 {
983 if (decltype_call)
984 {
985 ++processing_template_decl;
986 fn_result = finish_decltype_type
987 (decltype_call, /*id_expression_or_member_access_p=*/false,
988 tf_warning_or_error);
989 --processing_template_decl;
990 }
991 }
992 else
993 call = build_call_a (callop,
994 direct_argvec->length (),
995 direct_argvec->address ());
996
997 CALL_FROM_THUNK_P (call) = 1;
998
999 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1000 stattype = (cp_build_type_attribute_variant
1001 (stattype, TYPE_ATTRIBUTES (optype)));
1002
1003 /* First build up the conversion op. */
1004
1005 tree rettype = build_pointer_type (stattype);
1006 tree name = mangle_conv_op_name_for_type (rettype);
1007 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1008 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1009 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1010 tree fn = convfn;
1011 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1012 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1013 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
1014 grokclassfn (type, fn, NO_SPECIAL);
1015 set_linkage_according_to_type (type, fn);
1016 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1017 DECL_IN_AGGR_P (fn) = 1;
1018 DECL_ARTIFICIAL (fn) = 1;
1019 DECL_NOT_REALLY_EXTERN (fn) = 1;
1020 DECL_DECLARED_INLINE_P (fn) = 1;
1021 DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
1022 if (nested_def)
1023 DECL_INTERFACE_KNOWN (fn) = 1;
1024
1025 if (generic_lambda_p)
1026 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1027
1028 add_method (type, fn, NULL_TREE);
1029
1030 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1031 the conversion op is used. */
1032 if (varargs_function_p (callop))
1033 {
1034 DECL_DELETED_FN (fn) = 1;
1035 return;
1036 }
1037
1038 /* Now build up the thunk to be returned. */
1039
1040 name = get_identifier ("_FUN");
1041 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1042 fn = statfn;
1043 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1044 grokclassfn (type, fn, NO_SPECIAL);
1045 set_linkage_according_to_type (type, fn);
1046 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1047 DECL_IN_AGGR_P (fn) = 1;
1048 DECL_ARTIFICIAL (fn) = 1;
1049 DECL_NOT_REALLY_EXTERN (fn) = 1;
1050 DECL_DECLARED_INLINE_P (fn) = 1;
1051 DECL_STATIC_FUNCTION_P (fn) = 1;
1052 DECL_ARGUMENTS (fn) = fn_args;
1053 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1054 {
1055 /* Avoid duplicate -Wshadow warnings. */
1056 DECL_NAME (arg) = NULL_TREE;
1057 DECL_CONTEXT (arg) = fn;
1058 }
1059 if (nested_def)
1060 DECL_INTERFACE_KNOWN (fn) = 1;
1061
1062 if (generic_lambda_p)
1063 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1064
1065 if (flag_sanitize & SANITIZE_NULL)
1066 {
1067 /* Don't UBsan this function; we're deliberately calling op() with a null
1068 object argument. */
1069 tree attrs = build_tree_list (get_identifier ("no_sanitize_undefined"),
1070 NULL_TREE);
1071 cplus_decl_attributes (&fn, attrs, 0);
1072 }
1073
1074 add_method (type, fn, NULL_TREE);
1075
1076 if (nested)
1077 push_function_context ();
1078 else
1079 /* Still increment function_depth so that we don't GC in the
1080 middle of an expression. */
1081 ++function_depth;
1082
1083 /* Generate the body of the thunk. */
1084
1085 start_preparsed_function (statfn, NULL_TREE,
1086 SF_PRE_PARSED | SF_INCLASS_INLINE);
1087 if (DECL_ONE_ONLY (statfn))
1088 {
1089 /* Put the thunk in the same comdat group as the call op. */
1090 cgraph_node::get_create (statfn)->add_to_same_comdat_group
1091 (cgraph_node::get_create (callop));
1092 }
1093 tree body = begin_function_body ();
1094 tree compound_stmt = begin_compound_stmt (0);
1095 if (!generic_lambda_p)
1096 {
1097 set_flags_from_callee (call);
1098 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1099 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1100 }
1101 call = convert_from_reference (call);
1102 finish_return_stmt (call);
1103
1104 finish_compound_stmt (compound_stmt);
1105 finish_function_body (body);
1106
1107 fn = finish_function (/*inline*/2);
1108 if (!generic_lambda_p)
1109 expand_or_defer_fn (fn);
1110
1111 /* Generate the body of the conversion op. */
1112
1113 start_preparsed_function (convfn, NULL_TREE,
1114 SF_PRE_PARSED | SF_INCLASS_INLINE);
1115 body = begin_function_body ();
1116 compound_stmt = begin_compound_stmt (0);
1117
1118 /* decl_needed_p needs to see that it's used. */
1119 TREE_USED (statfn) = 1;
1120 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1121
1122 finish_compound_stmt (compound_stmt);
1123 finish_function_body (body);
1124
1125 fn = finish_function (/*inline*/2);
1126 if (!generic_lambda_p)
1127 expand_or_defer_fn (fn);
1128
1129 if (nested)
1130 pop_function_context ();
1131 else
1132 --function_depth;
1133 }
1134
1135 /* Returns true iff VAL is a lambda-related declaration which should
1136 be ignored by unqualified lookup. */
1137
1138 bool
1139 is_lambda_ignored_entity (tree val)
1140 {
1141 /* In unevaluated context, look past normal capture proxies. */
1142 if (cp_unevaluated_operand && is_normal_capture_proxy (val))
1143 return true;
1144
1145 /* Always ignore lambda fields, their names are only for debugging. */
1146 if (TREE_CODE (val) == FIELD_DECL
1147 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1148 return true;
1149
1150 /* None of the lookups that use qualify_lookup want the op() from the
1151 lambda; they want the one from the enclosing class. */
1152 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1153 return true;
1154
1155 return false;
1156 }