re PR debug/66691 (ICE on valid code at -O3 with -g enabled in simplify_subreg, at...
[gcc.git] / gcc / cp / lambda.c
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
6 Copyright (C) 1998-2015 Free Software Foundation, Inc.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "alias.h"
28 #include "symtab.h"
29 #include "options.h"
30 #include "tree.h"
31 #include "stringpool.h"
32 #include "tm.h"
33 #include "hard-reg-set.h"
34 #include "function.h"
35 #include "cgraph.h"
36 #include "tree-iterator.h"
37 #include "cp-tree.h"
38 #include "toplev.h"
39
40 /* Constructor for a lambda expression. */
41
42 tree
43 build_lambda_expr (void)
44 {
45 tree lambda = make_node (LAMBDA_EXPR);
46 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
47 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
48 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
49 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
50 LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE;
51 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
52 return lambda;
53 }
54
55 /* Create the closure object for a LAMBDA_EXPR. */
56
57 tree
58 build_lambda_object (tree lambda_expr)
59 {
60 /* Build aggregate constructor call.
61 - cp_parser_braced_list
62 - cp_parser_functional_cast */
63 vec<constructor_elt, va_gc> *elts = NULL;
64 tree node, expr, type;
65 location_t saved_loc;
66
67 if (processing_template_decl)
68 return lambda_expr;
69
70 /* Make sure any error messages refer to the lambda-introducer. */
71 saved_loc = input_location;
72 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
73
74 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
75 node;
76 node = TREE_CHAIN (node))
77 {
78 tree field = TREE_PURPOSE (node);
79 tree val = TREE_VALUE (node);
80
81 if (field == error_mark_node)
82 {
83 expr = error_mark_node;
84 goto out;
85 }
86
87 if (DECL_P (val))
88 mark_used (val);
89
90 /* Mere mortals can't copy arrays with aggregate initialization, so
91 do some magic to make it work here. */
92 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
93 val = build_array_copy (val);
94 else if (DECL_NORMAL_CAPTURE_P (field)
95 && !DECL_VLA_CAPTURE_P (field)
96 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
97 {
98 /* "the entities that are captured by copy are used to
99 direct-initialize each corresponding non-static data
100 member of the resulting closure object."
101
102 There's normally no way to express direct-initialization
103 from an element of a CONSTRUCTOR, so we build up a special
104 TARGET_EXPR to bypass the usual copy-initialization. */
105 val = force_rvalue (val, tf_warning_or_error);
106 if (TREE_CODE (val) == TARGET_EXPR)
107 TARGET_EXPR_DIRECT_INIT_P (val) = true;
108 }
109
110 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
111 }
112
113 expr = build_constructor (init_list_type_node, elts);
114 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
115
116 /* N2927: "[The closure] class type is not an aggregate."
117 But we briefly treat it as an aggregate to make this simpler. */
118 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
119 CLASSTYPE_NON_AGGREGATE (type) = 0;
120 expr = finish_compound_literal (type, expr, tf_warning_or_error);
121 CLASSTYPE_NON_AGGREGATE (type) = 1;
122
123 out:
124 input_location = saved_loc;
125 return expr;
126 }
127
128 /* Return an initialized RECORD_TYPE for LAMBDA.
129 LAMBDA must have its explicit captures already. */
130
131 tree
132 begin_lambda_type (tree lambda)
133 {
134 tree type;
135
136 {
137 /* Unique name. This is just like an unnamed class, but we cannot use
138 make_anon_name because of certain checks against TYPE_ANONYMOUS_P. */
139 tree name;
140 name = make_lambda_name ();
141
142 /* Create the new RECORD_TYPE for this lambda. */
143 type = xref_tag (/*tag_code=*/record_type,
144 name,
145 /*scope=*/ts_lambda,
146 /*template_header_p=*/false);
147 if (type == error_mark_node)
148 return error_mark_node;
149 }
150
151 /* Designate it as a struct so that we can use aggregate initialization. */
152 CLASSTYPE_DECLARED_CLASS (type) = false;
153
154 /* Cross-reference the expression and the type. */
155 LAMBDA_EXPR_CLOSURE (lambda) = type;
156 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
157
158 /* Clear base types. */
159 xref_basetypes (type, /*bases=*/NULL_TREE);
160
161 /* Start the class. */
162 type = begin_class_definition (type);
163
164 return type;
165 }
166
167 /* Returns the type to use for the return type of the operator() of a
168 closure class. */
169
170 tree
171 lambda_return_type (tree expr)
172 {
173 if (expr == NULL_TREE)
174 return void_type_node;
175 if (type_unknown_p (expr)
176 || BRACE_ENCLOSED_INITIALIZER_P (expr))
177 {
178 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
179 return void_type_node;
180 }
181 gcc_checking_assert (!type_dependent_expression_p (expr));
182 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
183 }
184
185 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
186 closure type. */
187
188 tree
189 lambda_function (tree lambda)
190 {
191 tree type;
192 if (TREE_CODE (lambda) == LAMBDA_EXPR)
193 type = LAMBDA_EXPR_CLOSURE (lambda);
194 else
195 type = lambda;
196 gcc_assert (LAMBDA_TYPE_P (type));
197 /* Don't let debug_tree cause instantiation. */
198 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
199 && !COMPLETE_OR_OPEN_TYPE_P (type))
200 return NULL_TREE;
201 lambda = lookup_member (type, ansi_opname (CALL_EXPR),
202 /*protect=*/0, /*want_type=*/false,
203 tf_warning_or_error);
204 if (lambda)
205 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
206 return lambda;
207 }
208
209 /* Returns the type to use for the FIELD_DECL corresponding to the
210 capture of EXPR.
211 The caller should add REFERENCE_TYPE for capture by reference. */
212
213 tree
214 lambda_capture_field_type (tree expr, bool explicit_init_p)
215 {
216 tree type;
217 if (explicit_init_p)
218 {
219 type = make_auto ();
220 type = do_auto_deduction (type, expr, type);
221 }
222 else
223 type = non_reference (unlowered_expr_type (expr));
224 if (type_dependent_expression_p (expr)
225 && !is_this_parameter (tree_strip_nop_conversions (expr)))
226 {
227 type = cxx_make_type (DECLTYPE_TYPE);
228 DECLTYPE_TYPE_EXPR (type) = expr;
229 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
230 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
231 SET_TYPE_STRUCTURAL_EQUALITY (type);
232 }
233 return type;
234 }
235
236 /* Returns true iff DECL is a lambda capture proxy variable created by
237 build_capture_proxy. */
238
239 bool
240 is_capture_proxy (tree decl)
241 {
242 return (VAR_P (decl)
243 && DECL_HAS_VALUE_EXPR_P (decl)
244 && !DECL_ANON_UNION_VAR_P (decl)
245 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
246 }
247
248 /* Returns true iff DECL is a capture proxy for a normal capture
249 (i.e. without explicit initializer). */
250
251 bool
252 is_normal_capture_proxy (tree decl)
253 {
254 if (!is_capture_proxy (decl))
255 /* It's not a capture proxy. */
256 return false;
257
258 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
259 /* VLA capture. */
260 return true;
261
262 /* It is a capture proxy, is it a normal capture? */
263 tree val = DECL_VALUE_EXPR (decl);
264 if (val == error_mark_node)
265 return true;
266
267 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
268 val = TREE_OPERAND (val, 1);
269 return DECL_NORMAL_CAPTURE_P (val);
270 }
271
272 /* VAR is a capture proxy created by build_capture_proxy; add it to the
273 current function, which is the operator() for the appropriate lambda. */
274
275 void
276 insert_capture_proxy (tree var)
277 {
278 cp_binding_level *b;
279 tree stmt_list;
280
281 /* Put the capture proxy in the extra body block so that it won't clash
282 with a later local variable. */
283 b = current_binding_level;
284 for (;;)
285 {
286 cp_binding_level *n = b->level_chain;
287 if (n->kind == sk_function_parms)
288 break;
289 b = n;
290 }
291 pushdecl_with_scope (var, b, false);
292
293 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
294 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
295 stmt_list = (*stmt_list_stack)[1];
296 gcc_assert (stmt_list);
297 append_to_statement_list_force (var, &stmt_list);
298 }
299
300 /* We've just finished processing a lambda; if the containing scope is also
301 a lambda, insert any capture proxies that were created while processing
302 the nested lambda. */
303
304 void
305 insert_pending_capture_proxies (void)
306 {
307 tree lam;
308 vec<tree, va_gc> *proxies;
309 unsigned i;
310
311 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
312 return;
313
314 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
315 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
316 for (i = 0; i < vec_safe_length (proxies); ++i)
317 {
318 tree var = (*proxies)[i];
319 insert_capture_proxy (var);
320 }
321 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
322 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
323 }
324
325 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
326 return the type we want the proxy to have: the type of the field itself,
327 with added const-qualification if the lambda isn't mutable and the
328 capture is by value. */
329
330 tree
331 lambda_proxy_type (tree ref)
332 {
333 tree type;
334 if (ref == error_mark_node)
335 return error_mark_node;
336 if (REFERENCE_REF_P (ref))
337 ref = TREE_OPERAND (ref, 0);
338 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
339 type = TREE_TYPE (ref);
340 if (!type || WILDCARD_TYPE_P (non_reference (type)))
341 {
342 type = cxx_make_type (DECLTYPE_TYPE);
343 DECLTYPE_TYPE_EXPR (type) = ref;
344 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
345 SET_TYPE_STRUCTURAL_EQUALITY (type);
346 }
347 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
348 type = make_pack_expansion (type);
349 return type;
350 }
351
352 /* MEMBER is a capture field in a lambda closure class. Now that we're
353 inside the operator(), build a placeholder var for future lookups and
354 debugging. */
355
356 tree
357 build_capture_proxy (tree member)
358 {
359 tree var, object, fn, closure, name, lam, type;
360
361 if (PACK_EXPANSION_P (member))
362 member = PACK_EXPANSION_PATTERN (member);
363
364 closure = DECL_CONTEXT (member);
365 fn = lambda_function (closure);
366 lam = CLASSTYPE_LAMBDA_EXPR (closure);
367
368 /* The proxy variable forwards to the capture field. */
369 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
370 object = finish_non_static_data_member (member, object, NULL_TREE);
371 if (REFERENCE_REF_P (object))
372 object = TREE_OPERAND (object, 0);
373
374 /* Remove the __ inserted by add_capture. */
375 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
376
377 type = lambda_proxy_type (object);
378
379 if (DECL_VLA_CAPTURE_P (member))
380 {
381 /* Rebuild the VLA type from the pointer and maxindex. */
382 tree field = next_initializable_field (TYPE_FIELDS (type));
383 tree ptr = build_simple_component_ref (object, field);
384 field = next_initializable_field (DECL_CHAIN (field));
385 tree max = build_simple_component_ref (object, field);
386 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
387 build_index_type (max));
388 type = build_reference_type (type);
389 REFERENCE_VLA_OK (type) = true;
390 object = convert (type, ptr);
391 }
392
393 var = build_decl (input_location, VAR_DECL, name, type);
394 SET_DECL_VALUE_EXPR (var, object);
395 DECL_HAS_VALUE_EXPR_P (var) = 1;
396 DECL_ARTIFICIAL (var) = 1;
397 TREE_USED (var) = 1;
398 DECL_CONTEXT (var) = fn;
399
400 if (name == this_identifier)
401 {
402 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
403 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
404 }
405
406 if (fn == current_function_decl)
407 insert_capture_proxy (var);
408 else
409 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
410
411 return var;
412 }
413
414 /* Return a struct containing a pointer and a length for lambda capture of
415 an array of runtime length. */
416
417 static tree
418 vla_capture_type (tree array_type)
419 {
420 static tree ptr_id, max_id;
421 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
422 xref_basetypes (type, NULL_TREE);
423 type = begin_class_definition (type);
424 if (!ptr_id)
425 {
426 ptr_id = get_identifier ("ptr");
427 max_id = get_identifier ("max");
428 }
429 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
430 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
431 finish_member_declaration (field);
432 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
433 finish_member_declaration (field);
434 return finish_struct (type, NULL_TREE);
435 }
436
437 /* From an ID and INITIALIZER, create a capture (by reference if
438 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
439 and return it. */
440
441 tree
442 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
443 bool explicit_init_p)
444 {
445 char *buf;
446 tree type, member, name;
447 bool vla = false;
448 bool variadic = false;
449 tree initializer = orig_init;
450
451 if (PACK_EXPANSION_P (initializer))
452 {
453 initializer = PACK_EXPANSION_PATTERN (initializer);
454 variadic = true;
455 }
456
457 if (TREE_CODE (initializer) == TREE_LIST)
458 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
459 tf_warning_or_error);
460 type = TREE_TYPE (initializer);
461 if (type == error_mark_node)
462 return error_mark_node;
463
464 if (array_of_runtime_bound_p (type))
465 {
466 vla = true;
467 if (!by_reference_p)
468 error ("array of runtime bound cannot be captured by copy, "
469 "only by reference");
470
471 /* For a VLA, we capture the address of the first element and the
472 maximum index, and then reconstruct the VLA for the proxy. */
473 tree elt = cp_build_array_ref (input_location, initializer,
474 integer_zero_node, tf_warning_or_error);
475 initializer = build_constructor_va (init_list_type_node, 2,
476 NULL_TREE, build_address (elt),
477 NULL_TREE, array_type_nelts (type));
478 type = vla_capture_type (type);
479 }
480 else if (!dependent_type_p (type)
481 && variably_modified_type_p (type, NULL_TREE))
482 {
483 error ("capture of variable-size type %qT that is not an N3639 array "
484 "of runtime bound", type);
485 if (TREE_CODE (type) == ARRAY_TYPE
486 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
487 inform (input_location, "because the array element type %qT has "
488 "variable size", TREE_TYPE (type));
489 type = error_mark_node;
490 }
491 else
492 {
493 type = lambda_capture_field_type (initializer, explicit_init_p);
494 if (by_reference_p)
495 {
496 type = build_reference_type (type);
497 if (!dependent_type_p (type) && !real_lvalue_p (initializer))
498 error ("cannot capture %qE by reference", initializer);
499 }
500 else
501 {
502 /* Capture by copy requires a complete type. */
503 type = complete_type (type);
504 if (!dependent_type_p (type) && !COMPLETE_TYPE_P (type))
505 {
506 error ("capture by copy of incomplete type %qT", type);
507 cxx_incomplete_type_inform (type);
508 return error_mark_node;
509 }
510 }
511 }
512
513 /* Add __ to the beginning of the field name so that user code
514 won't find the field with name lookup. We can't just leave the name
515 unset because template instantiation uses the name to find
516 instantiated fields. */
517 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
518 buf[1] = buf[0] = '_';
519 memcpy (buf + 2, IDENTIFIER_POINTER (id),
520 IDENTIFIER_LENGTH (id) + 1);
521 name = get_identifier (buf);
522
523 /* If TREE_TYPE isn't set, we're still in the introducer, so check
524 for duplicates. */
525 if (!LAMBDA_EXPR_CLOSURE (lambda))
526 {
527 if (IDENTIFIER_MARKED (name))
528 {
529 pedwarn (input_location, 0,
530 "already captured %qD in lambda expression", id);
531 return NULL_TREE;
532 }
533 IDENTIFIER_MARKED (name) = true;
534 }
535
536 if (variadic)
537 type = make_pack_expansion (type);
538
539 /* Make member variable. */
540 member = build_decl (input_location, FIELD_DECL, name, type);
541 DECL_VLA_CAPTURE_P (member) = vla;
542
543 if (!explicit_init_p)
544 /* Normal captures are invisible to name lookup but uses are replaced
545 with references to the capture field; we implement this by only
546 really making them invisible in unevaluated context; see
547 qualify_lookup. For now, let's make explicitly initialized captures
548 always visible. */
549 DECL_NORMAL_CAPTURE_P (member) = true;
550
551 if (id == this_identifier)
552 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
553
554 /* Add it to the appropriate closure class if we've started it. */
555 if (current_class_type
556 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
557 finish_member_declaration (member);
558
559 tree listmem = member;
560 if (variadic)
561 {
562 listmem = make_pack_expansion (member);
563 initializer = orig_init;
564 }
565 LAMBDA_EXPR_CAPTURE_LIST (lambda)
566 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
567
568 if (LAMBDA_EXPR_CLOSURE (lambda))
569 return build_capture_proxy (member);
570 /* For explicit captures we haven't started the function yet, so we wait
571 and build the proxy from cp_parser_lambda_body. */
572 return NULL_TREE;
573 }
574
575 /* Register all the capture members on the list CAPTURES, which is the
576 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
577
578 void
579 register_capture_members (tree captures)
580 {
581 if (captures == NULL_TREE)
582 return;
583
584 register_capture_members (TREE_CHAIN (captures));
585
586 tree field = TREE_PURPOSE (captures);
587 if (PACK_EXPANSION_P (field))
588 field = PACK_EXPANSION_PATTERN (field);
589
590 /* We set this in add_capture to avoid duplicates. */
591 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
592 finish_member_declaration (field);
593 }
594
595 /* Similar to add_capture, except this works on a stack of nested lambdas.
596 BY_REFERENCE_P in this case is derived from the default capture mode.
597 Returns the capture for the lambda at the bottom of the stack. */
598
599 tree
600 add_default_capture (tree lambda_stack, tree id, tree initializer)
601 {
602 bool this_capture_p = (id == this_identifier);
603
604 tree var = NULL_TREE;
605
606 tree saved_class_type = current_class_type;
607
608 tree node;
609
610 for (node = lambda_stack;
611 node;
612 node = TREE_CHAIN (node))
613 {
614 tree lambda = TREE_VALUE (node);
615
616 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
617 if (DECL_PACK_P (initializer))
618 initializer = make_pack_expansion (initializer);
619 var = add_capture (lambda,
620 id,
621 initializer,
622 /*by_reference_p=*/
623 (!this_capture_p
624 && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
625 == CPLD_REFERENCE)),
626 /*explicit_init_p=*/false);
627 initializer = convert_from_reference (var);
628 }
629
630 current_class_type = saved_class_type;
631
632 return var;
633 }
634
635 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
636 form of an INDIRECT_REF, possibly adding it through default
637 capturing, if ADD_CAPTURE_P is true. */
638
639 tree
640 lambda_expr_this_capture (tree lambda, bool add_capture_p)
641 {
642 tree result;
643
644 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
645
646 /* In unevaluated context this isn't an odr-use, so don't capture. */
647 if (cp_unevaluated_operand)
648 add_capture_p = false;
649
650 /* Try to default capture 'this' if we can. */
651 if (!this_capture
652 && (!add_capture_p
653 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
654 {
655 tree lambda_stack = NULL_TREE;
656 tree init = NULL_TREE;
657
658 /* If we are in a lambda function, we can move out until we hit:
659 1. a non-lambda function or NSDMI,
660 2. a lambda function capturing 'this', or
661 3. a non-default capturing lambda function. */
662 for (tree tlambda = lambda; ;)
663 {
664 lambda_stack = tree_cons (NULL_TREE,
665 tlambda,
666 lambda_stack);
667
668 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
669 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
670 {
671 /* In an NSDMI, we don't have a function to look up the decl in,
672 but the fake 'this' pointer that we're using for parsing is
673 in scope_chain. */
674 init = scope_chain->x_current_class_ptr;
675 gcc_checking_assert
676 (init && (TREE_TYPE (TREE_TYPE (init))
677 == current_nonlambda_class_type ()));
678 break;
679 }
680
681 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
682 tree containing_function = decl_function_context (closure_decl);
683
684 if (containing_function == NULL_TREE)
685 /* We ran out of scopes; there's no 'this' to capture. */
686 break;
687
688 if (!LAMBDA_FUNCTION_P (containing_function))
689 {
690 /* We found a non-lambda function. */
691 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
692 /* First parameter is 'this'. */
693 init = DECL_ARGUMENTS (containing_function);
694 break;
695 }
696
697 tlambda
698 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
699
700 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
701 {
702 /* An outer lambda has already captured 'this'. */
703 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
704 break;
705 }
706
707 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
708 /* An outer lambda won't let us capture 'this'. */
709 break;
710 }
711
712 if (init)
713 {
714 if (add_capture_p)
715 this_capture = add_default_capture (lambda_stack,
716 /*id=*/this_identifier,
717 init);
718 else
719 this_capture = init;
720 }
721 }
722
723 if (cp_unevaluated_operand)
724 result = this_capture;
725 else if (!this_capture)
726 {
727 if (add_capture_p)
728 {
729 error ("%<this%> was not captured for this lambda function");
730 result = error_mark_node;
731 }
732 else
733 result = NULL_TREE;
734 }
735 else
736 {
737 /* To make sure that current_class_ref is for the lambda. */
738 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
739 == LAMBDA_EXPR_CLOSURE (lambda));
740
741 result = this_capture;
742
743 /* If 'this' is captured, each use of 'this' is transformed into an
744 access to the corresponding unnamed data member of the closure
745 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
746 ensures that the transformed expression is an rvalue. ] */
747 result = rvalue (result);
748 }
749
750 return result;
751 }
752
753 /* We don't want to capture 'this' until we know we need it, i.e. after
754 overload resolution has chosen a non-static member function. At that
755 point we call this function to turn a dummy object into a use of the
756 'this' capture. */
757
758 tree
759 maybe_resolve_dummy (tree object, bool add_capture_p)
760 {
761 if (!is_dummy_object (object))
762 return object;
763
764 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
765 gcc_assert (!TYPE_PTR_P (type));
766
767 if (type != current_class_type
768 && current_class_type
769 && LAMBDA_TYPE_P (current_class_type)
770 && lambda_function (current_class_type)
771 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
772 {
773 /* In a lambda, need to go through 'this' capture. */
774 tree lam = CLASSTYPE_LAMBDA_EXPR (current_class_type);
775 tree cap = lambda_expr_this_capture (lam, add_capture_p);
776 if (cap && cap != error_mark_node)
777 object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
778 RO_NULL, tf_warning_or_error);
779 }
780
781 return object;
782 }
783
784 /* Returns the innermost non-lambda function. */
785
786 tree
787 current_nonlambda_function (void)
788 {
789 tree fn = current_function_decl;
790 while (fn && LAMBDA_FUNCTION_P (fn))
791 fn = decl_function_context (fn);
792 return fn;
793 }
794
795 /* Returns the method basetype of the innermost non-lambda function, or
796 NULL_TREE if none. */
797
798 tree
799 nonlambda_method_basetype (void)
800 {
801 tree fn, type;
802 if (!current_class_ref)
803 return NULL_TREE;
804
805 type = current_class_type;
806 if (!LAMBDA_TYPE_P (type))
807 return type;
808
809 /* Find the nearest enclosing non-lambda function. */
810 fn = TYPE_NAME (type);
811 do
812 fn = decl_function_context (fn);
813 while (fn && LAMBDA_FUNCTION_P (fn));
814
815 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
816 return NULL_TREE;
817
818 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
819 }
820
821 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
822 indicated FN and NARGS, but do not initialize the return type or any of the
823 argument slots. */
824
825 static tree
826 prepare_op_call (tree fn, int nargs)
827 {
828 tree t;
829
830 t = build_vl_exp (CALL_EXPR, nargs + 3);
831 CALL_EXPR_FN (t) = fn;
832 CALL_EXPR_STATIC_CHAIN (t) = NULL;
833
834 return t;
835 }
836
837 /* If the closure TYPE has a static op(), also add a conversion to function
838 pointer. */
839
840 void
841 maybe_add_lambda_conv_op (tree type)
842 {
843 bool nested = (cfun != NULL);
844 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
845 tree callop = lambda_function (type);
846
847 if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE)
848 return;
849
850 if (processing_template_decl)
851 return;
852
853 bool const generic_lambda_p
854 = (DECL_TEMPLATE_INFO (callop)
855 && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop)) == callop);
856
857 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
858 {
859 /* If the op() wasn't instantiated due to errors, give up. */
860 gcc_assert (errorcount || sorrycount);
861 return;
862 }
863
864 /* Non-template conversion operators are defined directly with build_call_a
865 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
866 deferred and the CALL is built in-place. In the case of a deduced return
867 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
868 the return type is also built in-place. The arguments of DECLTYPE_CALL in
869 the return expression may differ in flags from those in the body CALL. In
870 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
871 the body CALL, but not in DECLTYPE_CALL. */
872
873 vec<tree, va_gc> *direct_argvec = 0;
874 tree decltype_call = 0, call = 0;
875 tree fn_result = TREE_TYPE (TREE_TYPE (callop));
876
877 if (generic_lambda_p)
878 {
879 /* Prepare the dependent member call for the static member function
880 '_FUN' and, potentially, prepare another call to be used in a decltype
881 return expression for a deduced return call op to allow for simple
882 implementation of the conversion operator. */
883
884 tree instance = build_nop (type, null_pointer_node);
885 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
886 instance, DECL_NAME (callop), NULL_TREE);
887 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
888
889 call = prepare_op_call (objfn, nargs);
890 if (type_uses_auto (fn_result))
891 decltype_call = prepare_op_call (objfn, nargs);
892 }
893 else
894 {
895 direct_argvec = make_tree_vector ();
896 direct_argvec->quick_push (build1 (NOP_EXPR,
897 TREE_TYPE (DECL_ARGUMENTS (callop)),
898 null_pointer_node));
899 }
900
901 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
902 declare the static member function "_FUN" below. For each arg append to
903 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
904 call args (for the template case). If a parameter pack is found, expand
905 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
906
907 tree fn_args = NULL_TREE;
908 {
909 int ix = 0;
910 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
911 tree tgt;
912
913 while (src)
914 {
915 tree new_node = copy_node (src);
916
917 if (!fn_args)
918 fn_args = tgt = new_node;
919 else
920 {
921 TREE_CHAIN (tgt) = new_node;
922 tgt = new_node;
923 }
924
925 mark_exp_read (tgt);
926
927 if (generic_lambda_p)
928 {
929 if (DECL_PACK_P (tgt))
930 {
931 tree a = make_pack_expansion (tgt);
932 if (decltype_call)
933 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
934 PACK_EXPANSION_LOCAL_P (a) = true;
935 CALL_EXPR_ARG (call, ix) = a;
936 }
937 else
938 {
939 tree a = convert_from_reference (tgt);
940 CALL_EXPR_ARG (call, ix) = a;
941 if (decltype_call)
942 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
943 }
944 ++ix;
945 }
946 else
947 vec_safe_push (direct_argvec, tgt);
948
949 src = TREE_CHAIN (src);
950 }
951 }
952
953
954 if (generic_lambda_p)
955 {
956 if (decltype_call)
957 {
958 ++processing_template_decl;
959 fn_result = finish_decltype_type
960 (decltype_call, /*id_expression_or_member_access_p=*/false,
961 tf_warning_or_error);
962 --processing_template_decl;
963 }
964 }
965 else
966 call = build_call_a (callop,
967 direct_argvec->length (),
968 direct_argvec->address ());
969
970 CALL_FROM_THUNK_P (call) = 1;
971
972 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
973
974 /* First build up the conversion op. */
975
976 tree rettype = build_pointer_type (stattype);
977 tree name = mangle_conv_op_name_for_type (rettype);
978 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
979 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
980 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
981 tree fn = convfn;
982 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
983
984 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
985 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
986 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
987
988 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
989 grokclassfn (type, fn, NO_SPECIAL);
990 set_linkage_according_to_type (type, fn);
991 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
992 DECL_IN_AGGR_P (fn) = 1;
993 DECL_ARTIFICIAL (fn) = 1;
994 DECL_NOT_REALLY_EXTERN (fn) = 1;
995 DECL_DECLARED_INLINE_P (fn) = 1;
996 DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
997 if (nested_def)
998 DECL_INTERFACE_KNOWN (fn) = 1;
999
1000 if (generic_lambda_p)
1001 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1002
1003 add_method (type, fn, NULL_TREE);
1004
1005 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1006 the conversion op is used. */
1007 if (varargs_function_p (callop))
1008 {
1009 DECL_DELETED_FN (fn) = 1;
1010 return;
1011 }
1012
1013 /* Now build up the thunk to be returned. */
1014
1015 name = get_identifier ("_FUN");
1016 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1017 fn = statfn;
1018 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1019 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
1020 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
1021 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
1022 grokclassfn (type, fn, NO_SPECIAL);
1023 set_linkage_according_to_type (type, fn);
1024 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1025 DECL_IN_AGGR_P (fn) = 1;
1026 DECL_ARTIFICIAL (fn) = 1;
1027 DECL_NOT_REALLY_EXTERN (fn) = 1;
1028 DECL_DECLARED_INLINE_P (fn) = 1;
1029 DECL_STATIC_FUNCTION_P (fn) = 1;
1030 DECL_ARGUMENTS (fn) = fn_args;
1031 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1032 {
1033 /* Avoid duplicate -Wshadow warnings. */
1034 DECL_NAME (arg) = NULL_TREE;
1035 DECL_CONTEXT (arg) = fn;
1036 }
1037 if (nested_def)
1038 DECL_INTERFACE_KNOWN (fn) = 1;
1039
1040 if (generic_lambda_p)
1041 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1042
1043 add_method (type, fn, NULL_TREE);
1044
1045 if (nested)
1046 push_function_context ();
1047 else
1048 /* Still increment function_depth so that we don't GC in the
1049 middle of an expression. */
1050 ++function_depth;
1051
1052 /* Generate the body of the thunk. */
1053
1054 start_preparsed_function (statfn, NULL_TREE,
1055 SF_PRE_PARSED | SF_INCLASS_INLINE);
1056 if (DECL_ONE_ONLY (statfn))
1057 {
1058 /* Put the thunk in the same comdat group as the call op. */
1059 cgraph_node::get_create (statfn)->add_to_same_comdat_group
1060 (cgraph_node::get_create (callop));
1061 }
1062 tree body = begin_function_body ();
1063 tree compound_stmt = begin_compound_stmt (0);
1064 if (!generic_lambda_p)
1065 {
1066 set_flags_from_callee (call);
1067 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1068 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1069 }
1070 call = convert_from_reference (call);
1071 finish_return_stmt (call);
1072
1073 finish_compound_stmt (compound_stmt);
1074 finish_function_body (body);
1075
1076 fn = finish_function (/*inline*/2);
1077 if (!generic_lambda_p)
1078 expand_or_defer_fn (fn);
1079
1080 /* Generate the body of the conversion op. */
1081
1082 start_preparsed_function (convfn, NULL_TREE,
1083 SF_PRE_PARSED | SF_INCLASS_INLINE);
1084 body = begin_function_body ();
1085 compound_stmt = begin_compound_stmt (0);
1086
1087 /* decl_needed_p needs to see that it's used. */
1088 TREE_USED (statfn) = 1;
1089 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1090
1091 finish_compound_stmt (compound_stmt);
1092 finish_function_body (body);
1093
1094 fn = finish_function (/*inline*/2);
1095 if (!generic_lambda_p)
1096 expand_or_defer_fn (fn);
1097
1098 if (nested)
1099 pop_function_context ();
1100 else
1101 --function_depth;
1102 }
1103
1104 /* Returns true iff VAL is a lambda-related declaration which should
1105 be ignored by unqualified lookup. */
1106
1107 bool
1108 is_lambda_ignored_entity (tree val)
1109 {
1110 /* In unevaluated context, look past normal capture proxies. */
1111 if (cp_unevaluated_operand && is_normal_capture_proxy (val))
1112 return true;
1113
1114 /* Always ignore lambda fields, their names are only for debugging. */
1115 if (TREE_CODE (val) == FIELD_DECL
1116 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1117 return true;
1118
1119 /* None of the lookups that use qualify_lookup want the op() from the
1120 lambda; they want the one from the enclosing class. */
1121 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1122 return true;
1123
1124 return false;
1125 }