Do not update SSA in lto-stremaer-in
[gcc.git] / gcc / cp / semantics.c
1 /* Perform the semantic phase of parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
6 Copyright (C) 1998-2019 Free Software Foundation, Inc.
7 Written by Mark Mitchell (mmitchell@usa.net) based on code found
8 formerly in parse.y and pt.c.
9
10 This file is part of GCC.
11
12 GCC is free software; you can redistribute it and/or modify it
13 under the terms of the GNU General Public License as published by
14 the Free Software Foundation; either version 3, or (at your option)
15 any later version.
16
17 GCC is distributed in the hope that it will be useful, but
18 WITHOUT ANY WARRANTY; without even the implied warranty of
19 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 General Public License for more details.
21
22 You should have received a copy of the GNU General Public License
23 along with GCC; see the file COPYING3. If not see
24 <http://www.gnu.org/licenses/>. */
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "bitmap.h"
31 #include "cp-tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "stmt.h"
35 #include "varasm.h"
36 #include "stor-layout.h"
37 #include "c-family/c-objc.h"
38 #include "tree-inline.h"
39 #include "intl.h"
40 #include "tree-iterator.h"
41 #include "omp-general.h"
42 #include "convert.h"
43 #include "stringpool.h"
44 #include "attribs.h"
45 #include "gomp-constants.h"
46 #include "predict.h"
47 #include "memmodel.h"
48
49 /* There routines provide a modular interface to perform many parsing
50 operations. They may therefore be used during actual parsing, or
51 during template instantiation, which may be regarded as a
52 degenerate form of parsing. */
53
54 static tree maybe_convert_cond (tree);
55 static tree finalize_nrv_r (tree *, int *, void *);
56 static tree capture_decltype (tree);
57
58 /* Used for OpenMP non-static data member privatization. */
59
60 static hash_map<tree, tree> *omp_private_member_map;
61 static vec<tree> omp_private_member_vec;
62 static bool omp_private_member_ignore_next;
63
64
65 /* Deferred Access Checking Overview
66 ---------------------------------
67
68 Most C++ expressions and declarations require access checking
69 to be performed during parsing. However, in several cases,
70 this has to be treated differently.
71
72 For member declarations, access checking has to be deferred
73 until more information about the declaration is known. For
74 example:
75
76 class A {
77 typedef int X;
78 public:
79 X f();
80 };
81
82 A::X A::f();
83 A::X g();
84
85 When we are parsing the function return type `A::X', we don't
86 really know if this is allowed until we parse the function name.
87
88 Furthermore, some contexts require that access checking is
89 never performed at all. These include class heads, and template
90 instantiations.
91
92 Typical use of access checking functions is described here:
93
94 1. When we enter a context that requires certain access checking
95 mode, the function `push_deferring_access_checks' is called with
96 DEFERRING argument specifying the desired mode. Access checking
97 may be performed immediately (dk_no_deferred), deferred
98 (dk_deferred), or not performed (dk_no_check).
99
100 2. When a declaration such as a type, or a variable, is encountered,
101 the function `perform_or_defer_access_check' is called. It
102 maintains a vector of all deferred checks.
103
104 3. The global `current_class_type' or `current_function_decl' is then
105 setup by the parser. `enforce_access' relies on these information
106 to check access.
107
108 4. Upon exiting the context mentioned in step 1,
109 `perform_deferred_access_checks' is called to check all declaration
110 stored in the vector. `pop_deferring_access_checks' is then
111 called to restore the previous access checking mode.
112
113 In case of parsing error, we simply call `pop_deferring_access_checks'
114 without `perform_deferred_access_checks'. */
115
116 struct GTY(()) deferred_access {
117 /* A vector representing name-lookups for which we have deferred
118 checking access controls. We cannot check the accessibility of
119 names used in a decl-specifier-seq until we know what is being
120 declared because code like:
121
122 class A {
123 class B {};
124 B* f();
125 }
126
127 A::B* A::f() { return 0; }
128
129 is valid, even though `A::B' is not generally accessible. */
130 vec<deferred_access_check, va_gc> * GTY(()) deferred_access_checks;
131
132 /* The current mode of access checks. */
133 enum deferring_kind deferring_access_checks_kind;
134
135 };
136
137 /* Data for deferred access checking. */
138 static GTY(()) vec<deferred_access, va_gc> *deferred_access_stack;
139 static GTY(()) unsigned deferred_access_no_check;
140
141 /* Save the current deferred access states and start deferred
142 access checking iff DEFER_P is true. */
143
144 void
145 push_deferring_access_checks (deferring_kind deferring)
146 {
147 /* For context like template instantiation, access checking
148 disabling applies to all nested context. */
149 if (deferred_access_no_check || deferring == dk_no_check)
150 deferred_access_no_check++;
151 else
152 {
153 deferred_access e = {NULL, deferring};
154 vec_safe_push (deferred_access_stack, e);
155 }
156 }
157
158 /* Save the current deferred access states and start deferred access
159 checking, continuing the set of deferred checks in CHECKS. */
160
161 void
162 reopen_deferring_access_checks (vec<deferred_access_check, va_gc> * checks)
163 {
164 push_deferring_access_checks (dk_deferred);
165 if (!deferred_access_no_check)
166 deferred_access_stack->last().deferred_access_checks = checks;
167 }
168
169 /* Resume deferring access checks again after we stopped doing
170 this previously. */
171
172 void
173 resume_deferring_access_checks (void)
174 {
175 if (!deferred_access_no_check)
176 deferred_access_stack->last().deferring_access_checks_kind = dk_deferred;
177 }
178
179 /* Stop deferring access checks. */
180
181 void
182 stop_deferring_access_checks (void)
183 {
184 if (!deferred_access_no_check)
185 deferred_access_stack->last().deferring_access_checks_kind = dk_no_deferred;
186 }
187
188 /* Discard the current deferred access checks and restore the
189 previous states. */
190
191 void
192 pop_deferring_access_checks (void)
193 {
194 if (deferred_access_no_check)
195 deferred_access_no_check--;
196 else
197 deferred_access_stack->pop ();
198 }
199
200 /* Returns a TREE_LIST representing the deferred checks.
201 The TREE_PURPOSE of each node is the type through which the
202 access occurred; the TREE_VALUE is the declaration named.
203 */
204
205 vec<deferred_access_check, va_gc> *
206 get_deferred_access_checks (void)
207 {
208 if (deferred_access_no_check)
209 return NULL;
210 else
211 return (deferred_access_stack->last().deferred_access_checks);
212 }
213
214 /* Take current deferred checks and combine with the
215 previous states if we also defer checks previously.
216 Otherwise perform checks now. */
217
218 void
219 pop_to_parent_deferring_access_checks (void)
220 {
221 if (deferred_access_no_check)
222 deferred_access_no_check--;
223 else
224 {
225 vec<deferred_access_check, va_gc> *checks;
226 deferred_access *ptr;
227
228 checks = (deferred_access_stack->last ().deferred_access_checks);
229
230 deferred_access_stack->pop ();
231 ptr = &deferred_access_stack->last ();
232 if (ptr->deferring_access_checks_kind == dk_no_deferred)
233 {
234 /* Check access. */
235 perform_access_checks (checks, tf_warning_or_error);
236 }
237 else
238 {
239 /* Merge with parent. */
240 int i, j;
241 deferred_access_check *chk, *probe;
242
243 FOR_EACH_VEC_SAFE_ELT (checks, i, chk)
244 {
245 FOR_EACH_VEC_SAFE_ELT (ptr->deferred_access_checks, j, probe)
246 {
247 if (probe->binfo == chk->binfo &&
248 probe->decl == chk->decl &&
249 probe->diag_decl == chk->diag_decl)
250 goto found;
251 }
252 /* Insert into parent's checks. */
253 vec_safe_push (ptr->deferred_access_checks, *chk);
254 found:;
255 }
256 }
257 }
258 }
259
260 /* Perform the access checks in CHECKS. The TREE_PURPOSE of each node
261 is the BINFO indicating the qualifying scope used to access the
262 DECL node stored in the TREE_VALUE of the node. If CHECKS is empty
263 or we aren't in SFINAE context or all the checks succeed return TRUE,
264 otherwise FALSE. */
265
266 bool
267 perform_access_checks (vec<deferred_access_check, va_gc> *checks,
268 tsubst_flags_t complain)
269 {
270 int i;
271 deferred_access_check *chk;
272 location_t loc = input_location;
273 bool ok = true;
274
275 if (!checks)
276 return true;
277
278 FOR_EACH_VEC_SAFE_ELT (checks, i, chk)
279 {
280 input_location = chk->loc;
281 ok &= enforce_access (chk->binfo, chk->decl, chk->diag_decl, complain);
282 }
283
284 input_location = loc;
285 return (complain & tf_error) ? true : ok;
286 }
287
288 /* Perform the deferred access checks.
289
290 After performing the checks, we still have to keep the list
291 `deferred_access_stack->deferred_access_checks' since we may want
292 to check access for them again later in a different context.
293 For example:
294
295 class A {
296 typedef int X;
297 static X a;
298 };
299 A::X A::a, x; // No error for `A::a', error for `x'
300
301 We have to perform deferred access of `A::X', first with `A::a',
302 next with `x'. Return value like perform_access_checks above. */
303
304 bool
305 perform_deferred_access_checks (tsubst_flags_t complain)
306 {
307 return perform_access_checks (get_deferred_access_checks (), complain);
308 }
309
310 /* Defer checking the accessibility of DECL, when looked up in
311 BINFO. DIAG_DECL is the declaration to use to print diagnostics.
312 Return value like perform_access_checks above.
313 If non-NULL, report failures to AFI. */
314
315 bool
316 perform_or_defer_access_check (tree binfo, tree decl, tree diag_decl,
317 tsubst_flags_t complain,
318 access_failure_info *afi)
319 {
320 int i;
321 deferred_access *ptr;
322 deferred_access_check *chk;
323
324
325 /* Exit if we are in a context that no access checking is performed.
326 */
327 if (deferred_access_no_check)
328 return true;
329
330 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
331
332 ptr = &deferred_access_stack->last ();
333
334 /* If we are not supposed to defer access checks, just check now. */
335 if (ptr->deferring_access_checks_kind == dk_no_deferred)
336 {
337 bool ok = enforce_access (binfo, decl, diag_decl, complain, afi);
338 return (complain & tf_error) ? true : ok;
339 }
340
341 /* See if we are already going to perform this check. */
342 FOR_EACH_VEC_SAFE_ELT (ptr->deferred_access_checks, i, chk)
343 {
344 if (chk->decl == decl && chk->binfo == binfo &&
345 chk->diag_decl == diag_decl)
346 {
347 return true;
348 }
349 }
350 /* If not, record the check. */
351 deferred_access_check new_access = {binfo, decl, diag_decl, input_location};
352 vec_safe_push (ptr->deferred_access_checks, new_access);
353
354 return true;
355 }
356
357 /* Returns nonzero if the current statement is a full expression,
358 i.e. temporaries created during that statement should be destroyed
359 at the end of the statement. */
360
361 int
362 stmts_are_full_exprs_p (void)
363 {
364 return current_stmt_tree ()->stmts_are_full_exprs_p;
365 }
366
367 /* T is a statement. Add it to the statement-tree. This is the C++
368 version. The C/ObjC frontends have a slightly different version of
369 this function. */
370
371 tree
372 add_stmt (tree t)
373 {
374 enum tree_code code = TREE_CODE (t);
375
376 if (EXPR_P (t) && code != LABEL_EXPR)
377 {
378 if (!EXPR_HAS_LOCATION (t))
379 SET_EXPR_LOCATION (t, input_location);
380
381 /* When we expand a statement-tree, we must know whether or not the
382 statements are full-expressions. We record that fact here. */
383 STMT_IS_FULL_EXPR_P (t) = stmts_are_full_exprs_p ();
384 }
385
386 if (code == LABEL_EXPR || code == CASE_LABEL_EXPR)
387 STATEMENT_LIST_HAS_LABEL (cur_stmt_list) = 1;
388
389 /* Add T to the statement-tree. Non-side-effect statements need to be
390 recorded during statement expressions. */
391 gcc_checking_assert (!stmt_list_stack->is_empty ());
392 append_to_statement_list_force (t, &cur_stmt_list);
393
394 return t;
395 }
396
397 /* Returns the stmt_tree to which statements are currently being added. */
398
399 stmt_tree
400 current_stmt_tree (void)
401 {
402 return (cfun
403 ? &cfun->language->base.x_stmt_tree
404 : &scope_chain->x_stmt_tree);
405 }
406
407 /* If statements are full expressions, wrap STMT in a CLEANUP_POINT_EXPR. */
408
409 static tree
410 maybe_cleanup_point_expr (tree expr)
411 {
412 if (!processing_template_decl && stmts_are_full_exprs_p ())
413 expr = fold_build_cleanup_point_expr (TREE_TYPE (expr), expr);
414 return expr;
415 }
416
417 /* Like maybe_cleanup_point_expr except have the type of the new expression be
418 void so we don't need to create a temporary variable to hold the inner
419 expression. The reason why we do this is because the original type might be
420 an aggregate and we cannot create a temporary variable for that type. */
421
422 tree
423 maybe_cleanup_point_expr_void (tree expr)
424 {
425 if (!processing_template_decl && stmts_are_full_exprs_p ())
426 expr = fold_build_cleanup_point_expr (void_type_node, expr);
427 return expr;
428 }
429
430
431
432 /* Create a declaration statement for the declaration given by the DECL. */
433
434 void
435 add_decl_expr (tree decl)
436 {
437 tree r = build_stmt (DECL_SOURCE_LOCATION (decl), DECL_EXPR, decl);
438 if (DECL_INITIAL (decl)
439 || (DECL_SIZE (decl) && TREE_SIDE_EFFECTS (DECL_SIZE (decl))))
440 r = maybe_cleanup_point_expr_void (r);
441 add_stmt (r);
442 }
443
444 /* Finish a scope. */
445
446 tree
447 do_poplevel (tree stmt_list)
448 {
449 tree block = NULL;
450
451 if (stmts_are_full_exprs_p ())
452 block = poplevel (kept_level_p (), 1, 0);
453
454 stmt_list = pop_stmt_list (stmt_list);
455
456 if (!processing_template_decl)
457 {
458 stmt_list = c_build_bind_expr (input_location, block, stmt_list);
459 /* ??? See c_end_compound_stmt re statement expressions. */
460 }
461
462 return stmt_list;
463 }
464
465 /* Begin a new scope. */
466
467 static tree
468 do_pushlevel (scope_kind sk)
469 {
470 tree ret = push_stmt_list ();
471 if (stmts_are_full_exprs_p ())
472 begin_scope (sk, NULL);
473 return ret;
474 }
475
476 /* Queue a cleanup. CLEANUP is an expression/statement to be executed
477 when the current scope is exited. EH_ONLY is true when this is not
478 meant to apply to normal control flow transfer. */
479
480 void
481 push_cleanup (tree decl, tree cleanup, bool eh_only)
482 {
483 tree stmt = build_stmt (input_location, CLEANUP_STMT, NULL, cleanup, decl);
484 CLEANUP_EH_ONLY (stmt) = eh_only;
485 add_stmt (stmt);
486 CLEANUP_BODY (stmt) = push_stmt_list ();
487 }
488
489 /* Simple infinite loop tracking for -Wreturn-type. We keep a stack of all
490 the current loops, represented by 'NULL_TREE' if we've seen a possible
491 exit, and 'error_mark_node' if not. This is currently used only to
492 suppress the warning about a function with no return statements, and
493 therefore we don't bother noting returns as possible exits. We also
494 don't bother with gotos. */
495
496 static void
497 begin_maybe_infinite_loop (tree cond)
498 {
499 /* Only track this while parsing a function, not during instantiation. */
500 if (!cfun || (DECL_TEMPLATE_INSTANTIATION (current_function_decl)
501 && !processing_template_decl))
502 return;
503 bool maybe_infinite = true;
504 if (cond)
505 {
506 cond = fold_non_dependent_expr (cond);
507 maybe_infinite = integer_nonzerop (cond);
508 }
509 vec_safe_push (cp_function_chain->infinite_loops,
510 maybe_infinite ? error_mark_node : NULL_TREE);
511
512 }
513
514 /* A break is a possible exit for the current loop. */
515
516 void
517 break_maybe_infinite_loop (void)
518 {
519 if (!cfun)
520 return;
521 cp_function_chain->infinite_loops->last() = NULL_TREE;
522 }
523
524 /* If we reach the end of the loop without seeing a possible exit, we have
525 an infinite loop. */
526
527 static void
528 end_maybe_infinite_loop (tree cond)
529 {
530 if (!cfun || (DECL_TEMPLATE_INSTANTIATION (current_function_decl)
531 && !processing_template_decl))
532 return;
533 tree current = cp_function_chain->infinite_loops->pop();
534 if (current != NULL_TREE)
535 {
536 cond = fold_non_dependent_expr (cond);
537 if (integer_nonzerop (cond))
538 current_function_infinite_loop = 1;
539 }
540 }
541
542
543 /* Begin a conditional that might contain a declaration. When generating
544 normal code, we want the declaration to appear before the statement
545 containing the conditional. When generating template code, we want the
546 conditional to be rendered as the raw DECL_EXPR. */
547
548 static void
549 begin_cond (tree *cond_p)
550 {
551 if (processing_template_decl)
552 *cond_p = push_stmt_list ();
553 }
554
555 /* Finish such a conditional. */
556
557 static void
558 finish_cond (tree *cond_p, tree expr)
559 {
560 if (processing_template_decl)
561 {
562 tree cond = pop_stmt_list (*cond_p);
563
564 if (expr == NULL_TREE)
565 /* Empty condition in 'for'. */
566 gcc_assert (empty_expr_stmt_p (cond));
567 else if (check_for_bare_parameter_packs (expr))
568 expr = error_mark_node;
569 else if (!empty_expr_stmt_p (cond))
570 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), cond, expr);
571 }
572 *cond_p = expr;
573 }
574
575 /* If *COND_P specifies a conditional with a declaration, transform the
576 loop such that
577 while (A x = 42) { }
578 for (; A x = 42;) { }
579 becomes
580 while (true) { A x = 42; if (!x) break; }
581 for (;;) { A x = 42; if (!x) break; }
582 The statement list for BODY will be empty if the conditional did
583 not declare anything. */
584
585 static void
586 simplify_loop_decl_cond (tree *cond_p, tree body)
587 {
588 tree cond, if_stmt;
589
590 if (!TREE_SIDE_EFFECTS (body))
591 return;
592
593 cond = *cond_p;
594 *cond_p = boolean_true_node;
595
596 if_stmt = begin_if_stmt ();
597 cond = cp_build_unary_op (TRUTH_NOT_EXPR, cond, false, tf_warning_or_error);
598 finish_if_stmt_cond (cond, if_stmt);
599 finish_break_stmt ();
600 finish_then_clause (if_stmt);
601 finish_if_stmt (if_stmt);
602 }
603
604 /* Finish a goto-statement. */
605
606 tree
607 finish_goto_stmt (tree destination)
608 {
609 if (identifier_p (destination))
610 destination = lookup_label (destination);
611
612 /* We warn about unused labels with -Wunused. That means we have to
613 mark the used labels as used. */
614 if (TREE_CODE (destination) == LABEL_DECL)
615 TREE_USED (destination) = 1;
616 else
617 {
618 destination = mark_rvalue_use (destination);
619 if (!processing_template_decl)
620 {
621 destination = cp_convert (ptr_type_node, destination,
622 tf_warning_or_error);
623 if (error_operand_p (destination))
624 return NULL_TREE;
625 destination
626 = fold_build_cleanup_point_expr (TREE_TYPE (destination),
627 destination);
628 }
629 }
630
631 check_goto (destination);
632
633 add_stmt (build_predict_expr (PRED_GOTO, NOT_TAKEN));
634 return add_stmt (build_stmt (input_location, GOTO_EXPR, destination));
635 }
636
637 /* COND is the condition-expression for an if, while, etc.,
638 statement. Convert it to a boolean value, if appropriate.
639 In addition, verify sequence points if -Wsequence-point is enabled. */
640
641 static tree
642 maybe_convert_cond (tree cond)
643 {
644 /* Empty conditions remain empty. */
645 if (!cond)
646 return NULL_TREE;
647
648 /* Wait until we instantiate templates before doing conversion. */
649 if (type_dependent_expression_p (cond))
650 return cond;
651
652 if (warn_sequence_point && !processing_template_decl)
653 verify_sequence_points (cond);
654
655 /* Do the conversion. */
656 cond = convert_from_reference (cond);
657
658 if (TREE_CODE (cond) == MODIFY_EXPR
659 && !TREE_NO_WARNING (cond)
660 && warn_parentheses
661 && warning_at (cp_expr_loc_or_input_loc (cond),
662 OPT_Wparentheses, "suggest parentheses around "
663 "assignment used as truth value"))
664 TREE_NO_WARNING (cond) = 1;
665
666 return condition_conversion (cond);
667 }
668
669 /* Finish an expression-statement, whose EXPRESSION is as indicated. */
670
671 tree
672 finish_expr_stmt (tree expr)
673 {
674 tree r = NULL_TREE;
675 location_t loc = EXPR_LOCATION (expr);
676
677 if (expr != NULL_TREE)
678 {
679 /* If we ran into a problem, make sure we complained. */
680 gcc_assert (expr != error_mark_node || seen_error ());
681
682 if (!processing_template_decl)
683 {
684 if (warn_sequence_point)
685 verify_sequence_points (expr);
686 expr = convert_to_void (expr, ICV_STATEMENT, tf_warning_or_error);
687 }
688 else if (!type_dependent_expression_p (expr))
689 convert_to_void (build_non_dependent_expr (expr), ICV_STATEMENT,
690 tf_warning_or_error);
691
692 if (check_for_bare_parameter_packs (expr))
693 expr = error_mark_node;
694
695 /* Simplification of inner statement expressions, compound exprs,
696 etc can result in us already having an EXPR_STMT. */
697 if (TREE_CODE (expr) != CLEANUP_POINT_EXPR)
698 {
699 if (TREE_CODE (expr) != EXPR_STMT)
700 expr = build_stmt (loc, EXPR_STMT, expr);
701 expr = maybe_cleanup_point_expr_void (expr);
702 }
703
704 r = add_stmt (expr);
705 }
706
707 return r;
708 }
709
710
711 /* Begin an if-statement. Returns a newly created IF_STMT if
712 appropriate. */
713
714 tree
715 begin_if_stmt (void)
716 {
717 tree r, scope;
718 scope = do_pushlevel (sk_cond);
719 r = build_stmt (input_location, IF_STMT, NULL_TREE,
720 NULL_TREE, NULL_TREE, scope);
721 current_binding_level->this_entity = r;
722 begin_cond (&IF_COND (r));
723 return r;
724 }
725
726 /* Returns true if FN, a CALL_EXPR, is a call to
727 std::is_constant_evaluated or __builtin_is_constant_evaluated. */
728
729 static bool
730 is_std_constant_evaluated_p (tree fn)
731 {
732 /* std::is_constant_evaluated takes no arguments. */
733 if (call_expr_nargs (fn) != 0)
734 return false;
735
736 tree fndecl = cp_get_callee_fndecl_nofold (fn);
737 if (fndecl_built_in_p (fndecl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
738 BUILT_IN_FRONTEND))
739 return true;
740
741 if (!decl_in_std_namespace_p (fndecl))
742 return false;
743
744 tree name = DECL_NAME (fndecl);
745 return name && id_equal (name, "is_constant_evaluated");
746 }
747
748 /* Process the COND of an if-statement, which may be given by
749 IF_STMT. */
750
751 tree
752 finish_if_stmt_cond (tree cond, tree if_stmt)
753 {
754 cond = maybe_convert_cond (cond);
755 if (IF_STMT_CONSTEXPR_P (if_stmt)
756 && !type_dependent_expression_p (cond)
757 && require_constant_expression (cond)
758 && !instantiation_dependent_expression_p (cond)
759 /* Wait until instantiation time, since only then COND has been
760 converted to bool. */
761 && TYPE_MAIN_VARIANT (TREE_TYPE (cond)) == boolean_type_node)
762 {
763 /* if constexpr (std::is_constant_evaluated()) is always true,
764 so give the user a clue. */
765 if (warn_tautological_compare)
766 {
767 tree t = cond;
768 if (TREE_CODE (t) == CLEANUP_POINT_EXPR)
769 t = TREE_OPERAND (t, 0);
770 if (TREE_CODE (t) == CALL_EXPR
771 && is_std_constant_evaluated_p (t))
772 warning_at (EXPR_LOCATION (cond), OPT_Wtautological_compare,
773 "%qs always evaluates to true in %<if constexpr%>",
774 "std::is_constant_evaluated");
775 }
776
777 cond = instantiate_non_dependent_expr (cond);
778 cond = cxx_constant_value (cond, NULL_TREE);
779 }
780 finish_cond (&IF_COND (if_stmt), cond);
781 add_stmt (if_stmt);
782 THEN_CLAUSE (if_stmt) = push_stmt_list ();
783 return cond;
784 }
785
786 /* Finish the then-clause of an if-statement, which may be given by
787 IF_STMT. */
788
789 tree
790 finish_then_clause (tree if_stmt)
791 {
792 THEN_CLAUSE (if_stmt) = pop_stmt_list (THEN_CLAUSE (if_stmt));
793 return if_stmt;
794 }
795
796 /* Begin the else-clause of an if-statement. */
797
798 void
799 begin_else_clause (tree if_stmt)
800 {
801 ELSE_CLAUSE (if_stmt) = push_stmt_list ();
802 }
803
804 /* Finish the else-clause of an if-statement, which may be given by
805 IF_STMT. */
806
807 void
808 finish_else_clause (tree if_stmt)
809 {
810 ELSE_CLAUSE (if_stmt) = pop_stmt_list (ELSE_CLAUSE (if_stmt));
811 }
812
813 /* Callback for cp_walk_tree to mark all {VAR,PARM}_DECLs in a tree as
814 read. */
815
816 static tree
817 maybe_mark_exp_read_r (tree *tp, int *, void *)
818 {
819 tree t = *tp;
820 if (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
821 mark_exp_read (t);
822 return NULL_TREE;
823 }
824
825 /* Finish an if-statement. */
826
827 void
828 finish_if_stmt (tree if_stmt)
829 {
830 tree scope = IF_SCOPE (if_stmt);
831 IF_SCOPE (if_stmt) = NULL;
832 if (IF_STMT_CONSTEXPR_P (if_stmt))
833 {
834 /* Prevent various -Wunused warnings. We might not instantiate
835 either of these branches, so we would not mark the variables
836 used in that branch as read. */
837 cp_walk_tree_without_duplicates (&THEN_CLAUSE (if_stmt),
838 maybe_mark_exp_read_r, NULL);
839 cp_walk_tree_without_duplicates (&ELSE_CLAUSE (if_stmt),
840 maybe_mark_exp_read_r, NULL);
841 }
842 add_stmt (do_poplevel (scope));
843 }
844
845 /* Begin a while-statement. Returns a newly created WHILE_STMT if
846 appropriate. */
847
848 tree
849 begin_while_stmt (void)
850 {
851 tree r;
852 r = build_stmt (input_location, WHILE_STMT, NULL_TREE, NULL_TREE);
853 add_stmt (r);
854 WHILE_BODY (r) = do_pushlevel (sk_block);
855 begin_cond (&WHILE_COND (r));
856 return r;
857 }
858
859 /* Process the COND of a while-statement, which may be given by
860 WHILE_STMT. */
861
862 void
863 finish_while_stmt_cond (tree cond, tree while_stmt, bool ivdep,
864 unsigned short unroll)
865 {
866 cond = maybe_convert_cond (cond);
867 finish_cond (&WHILE_COND (while_stmt), cond);
868 begin_maybe_infinite_loop (cond);
869 if (ivdep && cond != error_mark_node)
870 WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR,
871 TREE_TYPE (WHILE_COND (while_stmt)),
872 WHILE_COND (while_stmt),
873 build_int_cst (integer_type_node,
874 annot_expr_ivdep_kind),
875 integer_zero_node);
876 if (unroll && cond != error_mark_node)
877 WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR,
878 TREE_TYPE (WHILE_COND (while_stmt)),
879 WHILE_COND (while_stmt),
880 build_int_cst (integer_type_node,
881 annot_expr_unroll_kind),
882 build_int_cst (integer_type_node,
883 unroll));
884 simplify_loop_decl_cond (&WHILE_COND (while_stmt), WHILE_BODY (while_stmt));
885 }
886
887 /* Finish a while-statement, which may be given by WHILE_STMT. */
888
889 void
890 finish_while_stmt (tree while_stmt)
891 {
892 end_maybe_infinite_loop (boolean_true_node);
893 WHILE_BODY (while_stmt) = do_poplevel (WHILE_BODY (while_stmt));
894 }
895
896 /* Begin a do-statement. Returns a newly created DO_STMT if
897 appropriate. */
898
899 tree
900 begin_do_stmt (void)
901 {
902 tree r = build_stmt (input_location, DO_STMT, NULL_TREE, NULL_TREE);
903 begin_maybe_infinite_loop (boolean_true_node);
904 add_stmt (r);
905 DO_BODY (r) = push_stmt_list ();
906 return r;
907 }
908
909 /* Finish the body of a do-statement, which may be given by DO_STMT. */
910
911 void
912 finish_do_body (tree do_stmt)
913 {
914 tree body = DO_BODY (do_stmt) = pop_stmt_list (DO_BODY (do_stmt));
915
916 if (TREE_CODE (body) == STATEMENT_LIST && STATEMENT_LIST_TAIL (body))
917 body = STATEMENT_LIST_TAIL (body)->stmt;
918
919 if (IS_EMPTY_STMT (body))
920 warning (OPT_Wempty_body,
921 "suggest explicit braces around empty body in %<do%> statement");
922 }
923
924 /* Finish a do-statement, which may be given by DO_STMT, and whose
925 COND is as indicated. */
926
927 void
928 finish_do_stmt (tree cond, tree do_stmt, bool ivdep, unsigned short unroll)
929 {
930 cond = maybe_convert_cond (cond);
931 end_maybe_infinite_loop (cond);
932 if (ivdep && cond != error_mark_node)
933 cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond,
934 build_int_cst (integer_type_node, annot_expr_ivdep_kind),
935 integer_zero_node);
936 if (unroll && cond != error_mark_node)
937 cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond,
938 build_int_cst (integer_type_node, annot_expr_unroll_kind),
939 build_int_cst (integer_type_node, unroll));
940 DO_COND (do_stmt) = cond;
941 }
942
943 /* Finish a return-statement. The EXPRESSION returned, if any, is as
944 indicated. */
945
946 tree
947 finish_return_stmt (tree expr)
948 {
949 tree r;
950 bool no_warning;
951
952 expr = check_return_expr (expr, &no_warning);
953
954 if (error_operand_p (expr)
955 || (flag_openmp && !check_omp_return ()))
956 {
957 /* Suppress -Wreturn-type for this function. */
958 if (warn_return_type)
959 TREE_NO_WARNING (current_function_decl) = true;
960 return error_mark_node;
961 }
962
963 if (!processing_template_decl)
964 {
965 if (warn_sequence_point)
966 verify_sequence_points (expr);
967
968 if (DECL_DESTRUCTOR_P (current_function_decl)
969 || (DECL_CONSTRUCTOR_P (current_function_decl)
970 && targetm.cxx.cdtor_returns_this ()))
971 {
972 /* Similarly, all destructors must run destructors for
973 base-classes before returning. So, all returns in a
974 destructor get sent to the DTOR_LABEL; finish_function emits
975 code to return a value there. */
976 return finish_goto_stmt (cdtor_label);
977 }
978 }
979
980 r = build_stmt (input_location, RETURN_EXPR, expr);
981 TREE_NO_WARNING (r) |= no_warning;
982 r = maybe_cleanup_point_expr_void (r);
983 r = add_stmt (r);
984
985 return r;
986 }
987
988 /* Begin the scope of a for-statement or a range-for-statement.
989 Both the returned trees are to be used in a call to
990 begin_for_stmt or begin_range_for_stmt. */
991
992 tree
993 begin_for_scope (tree *init)
994 {
995 tree scope = do_pushlevel (sk_for);
996
997 if (processing_template_decl)
998 *init = push_stmt_list ();
999 else
1000 *init = NULL_TREE;
1001
1002 return scope;
1003 }
1004
1005 /* Begin a for-statement. Returns a new FOR_STMT.
1006 SCOPE and INIT should be the return of begin_for_scope,
1007 or both NULL_TREE */
1008
1009 tree
1010 begin_for_stmt (tree scope, tree init)
1011 {
1012 tree r;
1013
1014 r = build_stmt (input_location, FOR_STMT, NULL_TREE, NULL_TREE,
1015 NULL_TREE, NULL_TREE, NULL_TREE);
1016
1017 if (scope == NULL_TREE)
1018 {
1019 gcc_assert (!init);
1020 scope = begin_for_scope (&init);
1021 }
1022
1023 FOR_INIT_STMT (r) = init;
1024 FOR_SCOPE (r) = scope;
1025
1026 return r;
1027 }
1028
1029 /* Finish the init-statement of a for-statement, which may be
1030 given by FOR_STMT. */
1031
1032 void
1033 finish_init_stmt (tree for_stmt)
1034 {
1035 if (processing_template_decl)
1036 FOR_INIT_STMT (for_stmt) = pop_stmt_list (FOR_INIT_STMT (for_stmt));
1037 add_stmt (for_stmt);
1038 FOR_BODY (for_stmt) = do_pushlevel (sk_block);
1039 begin_cond (&FOR_COND (for_stmt));
1040 }
1041
1042 /* Finish the COND of a for-statement, which may be given by
1043 FOR_STMT. */
1044
1045 void
1046 finish_for_cond (tree cond, tree for_stmt, bool ivdep, unsigned short unroll)
1047 {
1048 cond = maybe_convert_cond (cond);
1049 finish_cond (&FOR_COND (for_stmt), cond);
1050 begin_maybe_infinite_loop (cond);
1051 if (ivdep && cond != error_mark_node)
1052 FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR,
1053 TREE_TYPE (FOR_COND (for_stmt)),
1054 FOR_COND (for_stmt),
1055 build_int_cst (integer_type_node,
1056 annot_expr_ivdep_kind),
1057 integer_zero_node);
1058 if (unroll && cond != error_mark_node)
1059 FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR,
1060 TREE_TYPE (FOR_COND (for_stmt)),
1061 FOR_COND (for_stmt),
1062 build_int_cst (integer_type_node,
1063 annot_expr_unroll_kind),
1064 build_int_cst (integer_type_node,
1065 unroll));
1066 simplify_loop_decl_cond (&FOR_COND (for_stmt), FOR_BODY (for_stmt));
1067 }
1068
1069 /* Finish the increment-EXPRESSION in a for-statement, which may be
1070 given by FOR_STMT. */
1071
1072 void
1073 finish_for_expr (tree expr, tree for_stmt)
1074 {
1075 if (!expr)
1076 return;
1077 /* If EXPR is an overloaded function, issue an error; there is no
1078 context available to use to perform overload resolution. */
1079 if (type_unknown_p (expr))
1080 {
1081 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
1082 expr = error_mark_node;
1083 }
1084 if (!processing_template_decl)
1085 {
1086 if (warn_sequence_point)
1087 verify_sequence_points (expr);
1088 expr = convert_to_void (expr, ICV_THIRD_IN_FOR,
1089 tf_warning_or_error);
1090 }
1091 else if (!type_dependent_expression_p (expr))
1092 convert_to_void (build_non_dependent_expr (expr), ICV_THIRD_IN_FOR,
1093 tf_warning_or_error);
1094 expr = maybe_cleanup_point_expr_void (expr);
1095 if (check_for_bare_parameter_packs (expr))
1096 expr = error_mark_node;
1097 FOR_EXPR (for_stmt) = expr;
1098 }
1099
1100 /* Finish the body of a for-statement, which may be given by
1101 FOR_STMT. The increment-EXPR for the loop must be
1102 provided.
1103 It can also finish RANGE_FOR_STMT. */
1104
1105 void
1106 finish_for_stmt (tree for_stmt)
1107 {
1108 end_maybe_infinite_loop (boolean_true_node);
1109
1110 if (TREE_CODE (for_stmt) == RANGE_FOR_STMT)
1111 RANGE_FOR_BODY (for_stmt) = do_poplevel (RANGE_FOR_BODY (for_stmt));
1112 else
1113 FOR_BODY (for_stmt) = do_poplevel (FOR_BODY (for_stmt));
1114
1115 /* Pop the scope for the body of the loop. */
1116 tree *scope_ptr = (TREE_CODE (for_stmt) == RANGE_FOR_STMT
1117 ? &RANGE_FOR_SCOPE (for_stmt)
1118 : &FOR_SCOPE (for_stmt));
1119 tree scope = *scope_ptr;
1120 *scope_ptr = NULL;
1121
1122 /* During parsing of the body, range for uses "__for_{range,begin,end} "
1123 decl names to make those unaccessible by code in the body.
1124 Change it to ones with underscore instead of space, so that it can
1125 be inspected in the debugger. */
1126 tree range_for_decl[3] = { NULL_TREE, NULL_TREE, NULL_TREE };
1127 gcc_assert (CPTI_FOR_BEGIN__IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 1
1128 && CPTI_FOR_END__IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 2
1129 && CPTI_FOR_RANGE_IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 3
1130 && CPTI_FOR_BEGIN_IDENTIFIER == CPTI_FOR_BEGIN__IDENTIFIER + 3
1131 && CPTI_FOR_END_IDENTIFIER == CPTI_FOR_END__IDENTIFIER + 3);
1132 for (int i = 0; i < 3; i++)
1133 {
1134 tree id = cp_global_trees[CPTI_FOR_RANGE__IDENTIFIER + i];
1135 if (IDENTIFIER_BINDING (id)
1136 && IDENTIFIER_BINDING (id)->scope == current_binding_level)
1137 {
1138 range_for_decl[i] = IDENTIFIER_BINDING (id)->value;
1139 gcc_assert (VAR_P (range_for_decl[i])
1140 && DECL_ARTIFICIAL (range_for_decl[i]));
1141 }
1142 }
1143
1144 add_stmt (do_poplevel (scope));
1145
1146 for (int i = 0; i < 3; i++)
1147 if (range_for_decl[i])
1148 DECL_NAME (range_for_decl[i])
1149 = cp_global_trees[CPTI_FOR_RANGE_IDENTIFIER + i];
1150 }
1151
1152 /* Begin a range-for-statement. Returns a new RANGE_FOR_STMT.
1153 SCOPE and INIT should be the return of begin_for_scope,
1154 or both NULL_TREE .
1155 To finish it call finish_for_stmt(). */
1156
1157 tree
1158 begin_range_for_stmt (tree scope, tree init)
1159 {
1160 begin_maybe_infinite_loop (boolean_false_node);
1161
1162 tree r = build_stmt (input_location, RANGE_FOR_STMT, NULL_TREE, NULL_TREE,
1163 NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE);
1164
1165 if (scope == NULL_TREE)
1166 {
1167 gcc_assert (!init);
1168 scope = begin_for_scope (&init);
1169 }
1170
1171 /* Since C++20, RANGE_FOR_STMTs can use the init tree, so save it. */
1172 RANGE_FOR_INIT_STMT (r) = init;
1173 RANGE_FOR_SCOPE (r) = scope;
1174
1175 return r;
1176 }
1177
1178 /* Finish the head of a range-based for statement, which may
1179 be given by RANGE_FOR_STMT. DECL must be the declaration
1180 and EXPR must be the loop expression. */
1181
1182 void
1183 finish_range_for_decl (tree range_for_stmt, tree decl, tree expr)
1184 {
1185 if (processing_template_decl)
1186 RANGE_FOR_INIT_STMT (range_for_stmt)
1187 = pop_stmt_list (RANGE_FOR_INIT_STMT (range_for_stmt));
1188 RANGE_FOR_DECL (range_for_stmt) = decl;
1189 RANGE_FOR_EXPR (range_for_stmt) = expr;
1190 add_stmt (range_for_stmt);
1191 RANGE_FOR_BODY (range_for_stmt) = do_pushlevel (sk_block);
1192 }
1193
1194 /* Finish a break-statement. */
1195
1196 tree
1197 finish_break_stmt (void)
1198 {
1199 /* In switch statements break is sometimes stylistically used after
1200 a return statement. This can lead to spurious warnings about
1201 control reaching the end of a non-void function when it is
1202 inlined. Note that we are calling block_may_fallthru with
1203 language specific tree nodes; this works because
1204 block_may_fallthru returns true when given something it does not
1205 understand. */
1206 if (!block_may_fallthru (cur_stmt_list))
1207 return void_node;
1208 note_break_stmt ();
1209 return add_stmt (build_stmt (input_location, BREAK_STMT));
1210 }
1211
1212 /* Finish a continue-statement. */
1213
1214 tree
1215 finish_continue_stmt (void)
1216 {
1217 return add_stmt (build_stmt (input_location, CONTINUE_STMT));
1218 }
1219
1220 /* Begin a switch-statement. Returns a new SWITCH_STMT if
1221 appropriate. */
1222
1223 tree
1224 begin_switch_stmt (void)
1225 {
1226 tree r, scope;
1227
1228 scope = do_pushlevel (sk_cond);
1229 r = build_stmt (input_location, SWITCH_STMT, NULL_TREE, NULL_TREE, NULL_TREE, scope);
1230
1231 begin_cond (&SWITCH_STMT_COND (r));
1232
1233 return r;
1234 }
1235
1236 /* Finish the cond of a switch-statement. */
1237
1238 void
1239 finish_switch_cond (tree cond, tree switch_stmt)
1240 {
1241 tree orig_type = NULL;
1242
1243 if (!processing_template_decl)
1244 {
1245 /* Convert the condition to an integer or enumeration type. */
1246 tree orig_cond = cond;
1247 cond = build_expr_type_conversion (WANT_INT | WANT_ENUM, cond, true);
1248 if (cond == NULL_TREE)
1249 {
1250 error_at (cp_expr_loc_or_input_loc (orig_cond),
1251 "switch quantity not an integer");
1252 cond = error_mark_node;
1253 }
1254 /* We want unlowered type here to handle enum bit-fields. */
1255 orig_type = unlowered_expr_type (cond);
1256 if (TREE_CODE (orig_type) != ENUMERAL_TYPE)
1257 orig_type = TREE_TYPE (cond);
1258 if (cond != error_mark_node)
1259 {
1260 /* [stmt.switch]
1261
1262 Integral promotions are performed. */
1263 cond = perform_integral_promotions (cond);
1264 cond = maybe_cleanup_point_expr (cond);
1265 }
1266 }
1267 if (check_for_bare_parameter_packs (cond))
1268 cond = error_mark_node;
1269 else if (!processing_template_decl && warn_sequence_point)
1270 verify_sequence_points (cond);
1271
1272 finish_cond (&SWITCH_STMT_COND (switch_stmt), cond);
1273 SWITCH_STMT_TYPE (switch_stmt) = orig_type;
1274 add_stmt (switch_stmt);
1275 push_switch (switch_stmt);
1276 SWITCH_STMT_BODY (switch_stmt) = push_stmt_list ();
1277 }
1278
1279 /* Finish the body of a switch-statement, which may be given by
1280 SWITCH_STMT. The COND to switch on is indicated. */
1281
1282 void
1283 finish_switch_stmt (tree switch_stmt)
1284 {
1285 tree scope;
1286
1287 SWITCH_STMT_BODY (switch_stmt) =
1288 pop_stmt_list (SWITCH_STMT_BODY (switch_stmt));
1289 pop_switch ();
1290
1291 scope = SWITCH_STMT_SCOPE (switch_stmt);
1292 SWITCH_STMT_SCOPE (switch_stmt) = NULL;
1293 add_stmt (do_poplevel (scope));
1294 }
1295
1296 /* Begin a try-block. Returns a newly-created TRY_BLOCK if
1297 appropriate. */
1298
1299 tree
1300 begin_try_block (void)
1301 {
1302 tree r = build_stmt (input_location, TRY_BLOCK, NULL_TREE, NULL_TREE);
1303 add_stmt (r);
1304 TRY_STMTS (r) = push_stmt_list ();
1305 return r;
1306 }
1307
1308 /* Likewise, for a function-try-block. The block returned in
1309 *COMPOUND_STMT is an artificial outer scope, containing the
1310 function-try-block. */
1311
1312 tree
1313 begin_function_try_block (tree *compound_stmt)
1314 {
1315 tree r;
1316 /* This outer scope does not exist in the C++ standard, but we need
1317 a place to put __FUNCTION__ and similar variables. */
1318 *compound_stmt = begin_compound_stmt (0);
1319 r = begin_try_block ();
1320 FN_TRY_BLOCK_P (r) = 1;
1321 return r;
1322 }
1323
1324 /* Finish a try-block, which may be given by TRY_BLOCK. */
1325
1326 void
1327 finish_try_block (tree try_block)
1328 {
1329 TRY_STMTS (try_block) = pop_stmt_list (TRY_STMTS (try_block));
1330 TRY_HANDLERS (try_block) = push_stmt_list ();
1331 }
1332
1333 /* Finish the body of a cleanup try-block, which may be given by
1334 TRY_BLOCK. */
1335
1336 void
1337 finish_cleanup_try_block (tree try_block)
1338 {
1339 TRY_STMTS (try_block) = pop_stmt_list (TRY_STMTS (try_block));
1340 }
1341
1342 /* Finish an implicitly generated try-block, with a cleanup is given
1343 by CLEANUP. */
1344
1345 void
1346 finish_cleanup (tree cleanup, tree try_block)
1347 {
1348 TRY_HANDLERS (try_block) = cleanup;
1349 CLEANUP_P (try_block) = 1;
1350 }
1351
1352 /* Likewise, for a function-try-block. */
1353
1354 void
1355 finish_function_try_block (tree try_block)
1356 {
1357 finish_try_block (try_block);
1358 /* FIXME : something queer about CTOR_INITIALIZER somehow following
1359 the try block, but moving it inside. */
1360 in_function_try_handler = 1;
1361 }
1362
1363 /* Finish a handler-sequence for a try-block, which may be given by
1364 TRY_BLOCK. */
1365
1366 void
1367 finish_handler_sequence (tree try_block)
1368 {
1369 TRY_HANDLERS (try_block) = pop_stmt_list (TRY_HANDLERS (try_block));
1370 check_handlers (TRY_HANDLERS (try_block));
1371 }
1372
1373 /* Finish the handler-seq for a function-try-block, given by
1374 TRY_BLOCK. COMPOUND_STMT is the outer block created by
1375 begin_function_try_block. */
1376
1377 void
1378 finish_function_handler_sequence (tree try_block, tree compound_stmt)
1379 {
1380 in_function_try_handler = 0;
1381 finish_handler_sequence (try_block);
1382 finish_compound_stmt (compound_stmt);
1383 }
1384
1385 /* Begin a handler. Returns a HANDLER if appropriate. */
1386
1387 tree
1388 begin_handler (void)
1389 {
1390 tree r;
1391
1392 r = build_stmt (input_location, HANDLER, NULL_TREE, NULL_TREE);
1393 add_stmt (r);
1394
1395 /* Create a binding level for the eh_info and the exception object
1396 cleanup. */
1397 HANDLER_BODY (r) = do_pushlevel (sk_catch);
1398
1399 return r;
1400 }
1401
1402 /* Finish the handler-parameters for a handler, which may be given by
1403 HANDLER. DECL is the declaration for the catch parameter, or NULL
1404 if this is a `catch (...)' clause. */
1405
1406 void
1407 finish_handler_parms (tree decl, tree handler)
1408 {
1409 tree type = NULL_TREE;
1410 if (processing_template_decl)
1411 {
1412 if (decl)
1413 {
1414 decl = pushdecl (decl);
1415 decl = push_template_decl (decl);
1416 HANDLER_PARMS (handler) = decl;
1417 type = TREE_TYPE (decl);
1418 }
1419 }
1420 else
1421 {
1422 type = expand_start_catch_block (decl);
1423 if (warn_catch_value
1424 && type != NULL_TREE
1425 && type != error_mark_node
1426 && !TYPE_REF_P (TREE_TYPE (decl)))
1427 {
1428 tree orig_type = TREE_TYPE (decl);
1429 if (CLASS_TYPE_P (orig_type))
1430 {
1431 if (TYPE_POLYMORPHIC_P (orig_type))
1432 warning (OPT_Wcatch_value_,
1433 "catching polymorphic type %q#T by value", orig_type);
1434 else if (warn_catch_value > 1)
1435 warning (OPT_Wcatch_value_,
1436 "catching type %q#T by value", orig_type);
1437 }
1438 else if (warn_catch_value > 2)
1439 warning (OPT_Wcatch_value_,
1440 "catching non-reference type %q#T", orig_type);
1441 }
1442 }
1443 HANDLER_TYPE (handler) = type;
1444 }
1445
1446 /* Finish a handler, which may be given by HANDLER. The BLOCKs are
1447 the return value from the matching call to finish_handler_parms. */
1448
1449 void
1450 finish_handler (tree handler)
1451 {
1452 if (!processing_template_decl)
1453 expand_end_catch_block ();
1454 HANDLER_BODY (handler) = do_poplevel (HANDLER_BODY (handler));
1455 }
1456
1457 /* Begin a compound statement. FLAGS contains some bits that control the
1458 behavior and context. If BCS_NO_SCOPE is set, the compound statement
1459 does not define a scope. If BCS_FN_BODY is set, this is the outermost
1460 block of a function. If BCS_TRY_BLOCK is set, this is the block
1461 created on behalf of a TRY statement. Returns a token to be passed to
1462 finish_compound_stmt. */
1463
1464 tree
1465 begin_compound_stmt (unsigned int flags)
1466 {
1467 tree r;
1468
1469 if (flags & BCS_NO_SCOPE)
1470 {
1471 r = push_stmt_list ();
1472 STATEMENT_LIST_NO_SCOPE (r) = 1;
1473
1474 /* Normally, we try hard to keep the BLOCK for a statement-expression.
1475 But, if it's a statement-expression with a scopeless block, there's
1476 nothing to keep, and we don't want to accidentally keep a block
1477 *inside* the scopeless block. */
1478 keep_next_level (false);
1479 }
1480 else
1481 {
1482 scope_kind sk = sk_block;
1483 if (flags & BCS_TRY_BLOCK)
1484 sk = sk_try;
1485 else if (flags & BCS_TRANSACTION)
1486 sk = sk_transaction;
1487 r = do_pushlevel (sk);
1488 }
1489
1490 /* When processing a template, we need to remember where the braces were,
1491 so that we can set up identical scopes when instantiating the template
1492 later. BIND_EXPR is a handy candidate for this.
1493 Note that do_poplevel won't create a BIND_EXPR itself here (and thus
1494 result in nested BIND_EXPRs), since we don't build BLOCK nodes when
1495 processing templates. */
1496 if (processing_template_decl)
1497 {
1498 r = build3 (BIND_EXPR, NULL, NULL, r, NULL);
1499 BIND_EXPR_TRY_BLOCK (r) = (flags & BCS_TRY_BLOCK) != 0;
1500 BIND_EXPR_BODY_BLOCK (r) = (flags & BCS_FN_BODY) != 0;
1501 TREE_SIDE_EFFECTS (r) = 1;
1502 }
1503
1504 return r;
1505 }
1506
1507 /* Finish a compound-statement, which is given by STMT. */
1508
1509 void
1510 finish_compound_stmt (tree stmt)
1511 {
1512 if (TREE_CODE (stmt) == BIND_EXPR)
1513 {
1514 tree body = do_poplevel (BIND_EXPR_BODY (stmt));
1515 /* If the STATEMENT_LIST is empty and this BIND_EXPR isn't special,
1516 discard the BIND_EXPR so it can be merged with the containing
1517 STATEMENT_LIST. */
1518 if (TREE_CODE (body) == STATEMENT_LIST
1519 && STATEMENT_LIST_HEAD (body) == NULL
1520 && !BIND_EXPR_BODY_BLOCK (stmt)
1521 && !BIND_EXPR_TRY_BLOCK (stmt))
1522 stmt = body;
1523 else
1524 BIND_EXPR_BODY (stmt) = body;
1525 }
1526 else if (STATEMENT_LIST_NO_SCOPE (stmt))
1527 stmt = pop_stmt_list (stmt);
1528 else
1529 {
1530 /* Destroy any ObjC "super" receivers that may have been
1531 created. */
1532 objc_clear_super_receiver ();
1533
1534 stmt = do_poplevel (stmt);
1535 }
1536
1537 /* ??? See c_end_compound_stmt wrt statement expressions. */
1538 add_stmt (stmt);
1539 }
1540
1541 /* Finish an asm-statement, whose components are a STRING, some
1542 OUTPUT_OPERANDS, some INPUT_OPERANDS, some CLOBBERS and some
1543 LABELS. Also note whether the asm-statement should be
1544 considered volatile, and whether it is asm inline. */
1545
1546 tree
1547 finish_asm_stmt (location_t loc, int volatile_p, tree string,
1548 tree output_operands, tree input_operands, tree clobbers,
1549 tree labels, bool inline_p)
1550 {
1551 tree r;
1552 tree t;
1553 int ninputs = list_length (input_operands);
1554 int noutputs = list_length (output_operands);
1555
1556 if (!processing_template_decl)
1557 {
1558 const char *constraint;
1559 const char **oconstraints;
1560 bool allows_mem, allows_reg, is_inout;
1561 tree operand;
1562 int i;
1563
1564 oconstraints = XALLOCAVEC (const char *, noutputs);
1565
1566 string = resolve_asm_operand_names (string, output_operands,
1567 input_operands, labels);
1568
1569 for (i = 0, t = output_operands; t; t = TREE_CHAIN (t), ++i)
1570 {
1571 operand = TREE_VALUE (t);
1572
1573 /* ??? Really, this should not be here. Users should be using a
1574 proper lvalue, dammit. But there's a long history of using
1575 casts in the output operands. In cases like longlong.h, this
1576 becomes a primitive form of typechecking -- if the cast can be
1577 removed, then the output operand had a type of the proper width;
1578 otherwise we'll get an error. Gross, but ... */
1579 STRIP_NOPS (operand);
1580
1581 operand = mark_lvalue_use (operand);
1582
1583 if (!lvalue_or_else (operand, lv_asm, tf_warning_or_error))
1584 operand = error_mark_node;
1585
1586 if (operand != error_mark_node
1587 && (TREE_READONLY (operand)
1588 || CP_TYPE_CONST_P (TREE_TYPE (operand))
1589 /* Functions are not modifiable, even though they are
1590 lvalues. */
1591 || FUNC_OR_METHOD_TYPE_P (TREE_TYPE (operand))
1592 /* If it's an aggregate and any field is const, then it is
1593 effectively const. */
1594 || (CLASS_TYPE_P (TREE_TYPE (operand))
1595 && C_TYPE_FIELDS_READONLY (TREE_TYPE (operand)))))
1596 cxx_readonly_error (loc, operand, lv_asm);
1597
1598 tree *op = &operand;
1599 while (TREE_CODE (*op) == COMPOUND_EXPR)
1600 op = &TREE_OPERAND (*op, 1);
1601 switch (TREE_CODE (*op))
1602 {
1603 case PREINCREMENT_EXPR:
1604 case PREDECREMENT_EXPR:
1605 case MODIFY_EXPR:
1606 *op = genericize_compound_lvalue (*op);
1607 op = &TREE_OPERAND (*op, 1);
1608 break;
1609 default:
1610 break;
1611 }
1612
1613 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1614 oconstraints[i] = constraint;
1615
1616 if (parse_output_constraint (&constraint, i, ninputs, noutputs,
1617 &allows_mem, &allows_reg, &is_inout))
1618 {
1619 /* If the operand is going to end up in memory,
1620 mark it addressable. */
1621 if (!allows_reg && !cxx_mark_addressable (*op))
1622 operand = error_mark_node;
1623 }
1624 else
1625 operand = error_mark_node;
1626
1627 TREE_VALUE (t) = operand;
1628 }
1629
1630 for (i = 0, t = input_operands; t; ++i, t = TREE_CHAIN (t))
1631 {
1632 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1633 bool constraint_parsed
1634 = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
1635 oconstraints, &allows_mem, &allows_reg);
1636 /* If the operand is going to end up in memory, don't call
1637 decay_conversion. */
1638 if (constraint_parsed && !allows_reg && allows_mem)
1639 operand = mark_lvalue_use (TREE_VALUE (t));
1640 else
1641 operand = decay_conversion (TREE_VALUE (t), tf_warning_or_error);
1642
1643 /* If the type of the operand hasn't been determined (e.g.,
1644 because it involves an overloaded function), then issue
1645 an error message. There's no context available to
1646 resolve the overloading. */
1647 if (TREE_TYPE (operand) == unknown_type_node)
1648 {
1649 error_at (loc,
1650 "type of %<asm%> operand %qE could not be determined",
1651 TREE_VALUE (t));
1652 operand = error_mark_node;
1653 }
1654
1655 if (constraint_parsed)
1656 {
1657 /* If the operand is going to end up in memory,
1658 mark it addressable. */
1659 if (!allows_reg && allows_mem)
1660 {
1661 /* Strip the nops as we allow this case. FIXME, this really
1662 should be rejected or made deprecated. */
1663 STRIP_NOPS (operand);
1664
1665 tree *op = &operand;
1666 while (TREE_CODE (*op) == COMPOUND_EXPR)
1667 op = &TREE_OPERAND (*op, 1);
1668 switch (TREE_CODE (*op))
1669 {
1670 case PREINCREMENT_EXPR:
1671 case PREDECREMENT_EXPR:
1672 case MODIFY_EXPR:
1673 *op = genericize_compound_lvalue (*op);
1674 op = &TREE_OPERAND (*op, 1);
1675 break;
1676 default:
1677 break;
1678 }
1679
1680 if (!cxx_mark_addressable (*op))
1681 operand = error_mark_node;
1682 }
1683 else if (!allows_reg && !allows_mem)
1684 {
1685 /* If constraint allows neither register nor memory,
1686 try harder to get a constant. */
1687 tree constop = maybe_constant_value (operand);
1688 if (TREE_CONSTANT (constop))
1689 operand = constop;
1690 }
1691 }
1692 else
1693 operand = error_mark_node;
1694
1695 TREE_VALUE (t) = operand;
1696 }
1697 }
1698
1699 r = build_stmt (loc, ASM_EXPR, string,
1700 output_operands, input_operands,
1701 clobbers, labels);
1702 ASM_VOLATILE_P (r) = volatile_p || noutputs == 0;
1703 ASM_INLINE_P (r) = inline_p;
1704 r = maybe_cleanup_point_expr_void (r);
1705 return add_stmt (r);
1706 }
1707
1708 /* Finish a label with the indicated NAME. Returns the new label. */
1709
1710 tree
1711 finish_label_stmt (tree name)
1712 {
1713 tree decl = define_label (input_location, name);
1714
1715 if (decl == error_mark_node)
1716 return error_mark_node;
1717
1718 add_stmt (build_stmt (input_location, LABEL_EXPR, decl));
1719
1720 return decl;
1721 }
1722
1723 /* Finish a series of declarations for local labels. G++ allows users
1724 to declare "local" labels, i.e., labels with scope. This extension
1725 is useful when writing code involving statement-expressions. */
1726
1727 void
1728 finish_label_decl (tree name)
1729 {
1730 if (!at_function_scope_p ())
1731 {
1732 error ("%<__label__%> declarations are only allowed in function scopes");
1733 return;
1734 }
1735
1736 add_decl_expr (declare_local_label (name));
1737 }
1738
1739 /* When DECL goes out of scope, make sure that CLEANUP is executed. */
1740
1741 void
1742 finish_decl_cleanup (tree decl, tree cleanup)
1743 {
1744 push_cleanup (decl, cleanup, false);
1745 }
1746
1747 /* If the current scope exits with an exception, run CLEANUP. */
1748
1749 void
1750 finish_eh_cleanup (tree cleanup)
1751 {
1752 push_cleanup (NULL, cleanup, true);
1753 }
1754
1755 /* The MEM_INITS is a list of mem-initializers, in reverse of the
1756 order they were written by the user. Each node is as for
1757 emit_mem_initializers. */
1758
1759 void
1760 finish_mem_initializers (tree mem_inits)
1761 {
1762 /* Reorder the MEM_INITS so that they are in the order they appeared
1763 in the source program. */
1764 mem_inits = nreverse (mem_inits);
1765
1766 if (processing_template_decl)
1767 {
1768 tree mem;
1769
1770 for (mem = mem_inits; mem; mem = TREE_CHAIN (mem))
1771 {
1772 /* If the TREE_PURPOSE is a TYPE_PACK_EXPANSION, skip the
1773 check for bare parameter packs in the TREE_VALUE, because
1774 any parameter packs in the TREE_VALUE have already been
1775 bound as part of the TREE_PURPOSE. See
1776 make_pack_expansion for more information. */
1777 if (TREE_CODE (TREE_PURPOSE (mem)) != TYPE_PACK_EXPANSION
1778 && check_for_bare_parameter_packs (TREE_VALUE (mem)))
1779 TREE_VALUE (mem) = error_mark_node;
1780 }
1781
1782 add_stmt (build_min_nt_loc (UNKNOWN_LOCATION,
1783 CTOR_INITIALIZER, mem_inits));
1784 }
1785 else
1786 emit_mem_initializers (mem_inits);
1787 }
1788
1789 /* Obfuscate EXPR if it looks like an id-expression or member access so
1790 that the call to finish_decltype in do_auto_deduction will give the
1791 right result. If EVEN_UNEVAL, do this even in unevaluated context. */
1792
1793 tree
1794 force_paren_expr (tree expr, bool even_uneval)
1795 {
1796 /* This is only needed for decltype(auto) in C++14. */
1797 if (cxx_dialect < cxx14)
1798 return expr;
1799
1800 /* If we're in unevaluated context, we can't be deducing a
1801 return/initializer type, so we don't need to mess with this. */
1802 if (cp_unevaluated_operand && !even_uneval)
1803 return expr;
1804
1805 if (!DECL_P (tree_strip_any_location_wrapper (expr))
1806 && TREE_CODE (expr) != COMPONENT_REF
1807 && TREE_CODE (expr) != SCOPE_REF)
1808 return expr;
1809
1810 location_t loc = cp_expr_location (expr);
1811
1812 if (TREE_CODE (expr) == COMPONENT_REF
1813 || TREE_CODE (expr) == SCOPE_REF)
1814 REF_PARENTHESIZED_P (expr) = true;
1815 else if (processing_template_decl)
1816 expr = build1_loc (loc, PAREN_EXPR, TREE_TYPE (expr), expr);
1817 else
1818 {
1819 expr = build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (expr), expr);
1820 REF_PARENTHESIZED_P (expr) = true;
1821 }
1822
1823 return expr;
1824 }
1825
1826 /* If T is an id-expression obfuscated by force_paren_expr, undo the
1827 obfuscation and return the underlying id-expression. Otherwise
1828 return T. */
1829
1830 tree
1831 maybe_undo_parenthesized_ref (tree t)
1832 {
1833 if (cxx_dialect < cxx14)
1834 return t;
1835
1836 if (INDIRECT_REF_P (t) && REF_PARENTHESIZED_P (t))
1837 {
1838 t = TREE_OPERAND (t, 0);
1839 while (TREE_CODE (t) == NON_LVALUE_EXPR
1840 || TREE_CODE (t) == NOP_EXPR)
1841 t = TREE_OPERAND (t, 0);
1842
1843 gcc_assert (TREE_CODE (t) == ADDR_EXPR
1844 || TREE_CODE (t) == STATIC_CAST_EXPR);
1845 t = TREE_OPERAND (t, 0);
1846 }
1847 else if (TREE_CODE (t) == PAREN_EXPR)
1848 t = TREE_OPERAND (t, 0);
1849 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR
1850 && REF_PARENTHESIZED_P (t))
1851 t = TREE_OPERAND (t, 0);
1852
1853 return t;
1854 }
1855
1856 /* Finish a parenthesized expression EXPR. */
1857
1858 cp_expr
1859 finish_parenthesized_expr (cp_expr expr)
1860 {
1861 if (EXPR_P (expr))
1862 /* This inhibits warnings in c_common_truthvalue_conversion. */
1863 TREE_NO_WARNING (expr) = 1;
1864
1865 if (TREE_CODE (expr) == OFFSET_REF
1866 || TREE_CODE (expr) == SCOPE_REF)
1867 /* [expr.unary.op]/3 The qualified id of a pointer-to-member must not be
1868 enclosed in parentheses. */
1869 PTRMEM_OK_P (expr) = 0;
1870
1871 tree stripped_expr = tree_strip_any_location_wrapper (expr);
1872 if (TREE_CODE (stripped_expr) == STRING_CST)
1873 PAREN_STRING_LITERAL_P (stripped_expr) = 1;
1874
1875 expr = cp_expr (force_paren_expr (expr), expr.get_location ());
1876
1877 return expr;
1878 }
1879
1880 /* Finish a reference to a non-static data member (DECL) that is not
1881 preceded by `.' or `->'. */
1882
1883 tree
1884 finish_non_static_data_member (tree decl, tree object, tree qualifying_scope)
1885 {
1886 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
1887 bool try_omp_private = !object && omp_private_member_map;
1888 tree ret;
1889
1890 if (!object)
1891 {
1892 tree scope = qualifying_scope;
1893 if (scope == NULL_TREE)
1894 {
1895 scope = context_for_name_lookup (decl);
1896 if (!TYPE_P (scope))
1897 {
1898 /* Can happen during error recovery (c++/85014). */
1899 gcc_assert (seen_error ());
1900 return error_mark_node;
1901 }
1902 }
1903 object = maybe_dummy_object (scope, NULL);
1904 }
1905
1906 object = maybe_resolve_dummy (object, true);
1907 if (object == error_mark_node)
1908 return error_mark_node;
1909
1910 /* DR 613/850: Can use non-static data members without an associated
1911 object in sizeof/decltype/alignof. */
1912 if (is_dummy_object (object) && cp_unevaluated_operand == 0
1913 && (!processing_template_decl || !current_class_ref))
1914 {
1915 if (current_function_decl
1916 && DECL_STATIC_FUNCTION_P (current_function_decl))
1917 error ("invalid use of member %qD in static member function", decl);
1918 else
1919 error ("invalid use of non-static data member %qD", decl);
1920 inform (DECL_SOURCE_LOCATION (decl), "declared here");
1921
1922 return error_mark_node;
1923 }
1924
1925 if (current_class_ptr)
1926 TREE_USED (current_class_ptr) = 1;
1927 if (processing_template_decl)
1928 {
1929 tree type = TREE_TYPE (decl);
1930
1931 if (TYPE_REF_P (type))
1932 /* Quals on the object don't matter. */;
1933 else if (PACK_EXPANSION_P (type))
1934 /* Don't bother trying to represent this. */
1935 type = NULL_TREE;
1936 else
1937 {
1938 /* Set the cv qualifiers. */
1939 int quals = cp_type_quals (TREE_TYPE (object));
1940
1941 if (DECL_MUTABLE_P (decl))
1942 quals &= ~TYPE_QUAL_CONST;
1943
1944 quals |= cp_type_quals (TREE_TYPE (decl));
1945 type = cp_build_qualified_type (type, quals);
1946 }
1947
1948 if (qualifying_scope)
1949 /* Wrap this in a SCOPE_REF for now. */
1950 ret = build_qualified_name (type, qualifying_scope, decl,
1951 /*template_p=*/false);
1952 else
1953 ret = (convert_from_reference
1954 (build_min (COMPONENT_REF, type, object, decl, NULL_TREE)));
1955 }
1956 /* If PROCESSING_TEMPLATE_DECL is nonzero here, then
1957 QUALIFYING_SCOPE is also non-null. */
1958 else
1959 {
1960 tree access_type = TREE_TYPE (object);
1961
1962 perform_or_defer_access_check (TYPE_BINFO (access_type), decl,
1963 decl, tf_warning_or_error);
1964
1965 /* If the data member was named `C::M', convert `*this' to `C'
1966 first. */
1967 if (qualifying_scope)
1968 {
1969 tree binfo = NULL_TREE;
1970 object = build_scoped_ref (object, qualifying_scope,
1971 &binfo);
1972 }
1973
1974 ret = build_class_member_access_expr (object, decl,
1975 /*access_path=*/NULL_TREE,
1976 /*preserve_reference=*/false,
1977 tf_warning_or_error);
1978 }
1979 if (try_omp_private)
1980 {
1981 tree *v = omp_private_member_map->get (decl);
1982 if (v)
1983 ret = convert_from_reference (*v);
1984 }
1985 return ret;
1986 }
1987
1988 /* If we are currently parsing a template and we encountered a typedef
1989 TYPEDEF_DECL that is being accessed though CONTEXT, this function
1990 adds the typedef to a list tied to the current template.
1991 At template instantiation time, that list is walked and access check
1992 performed for each typedef.
1993 LOCATION is the location of the usage point of TYPEDEF_DECL. */
1994
1995 void
1996 add_typedef_to_current_template_for_access_check (tree typedef_decl,
1997 tree context,
1998 location_t location)
1999 {
2000 tree template_info = NULL;
2001 tree cs = current_scope ();
2002
2003 if (!is_typedef_decl (typedef_decl)
2004 || !context
2005 || !CLASS_TYPE_P (context)
2006 || !cs)
2007 return;
2008
2009 if (CLASS_TYPE_P (cs) || TREE_CODE (cs) == FUNCTION_DECL)
2010 template_info = get_template_info (cs);
2011
2012 if (template_info
2013 && TI_TEMPLATE (template_info)
2014 && !currently_open_class (context))
2015 append_type_to_template_for_access_check (cs, typedef_decl,
2016 context, location);
2017 }
2018
2019 /* DECL was the declaration to which a qualified-id resolved. Issue
2020 an error message if it is not accessible. If OBJECT_TYPE is
2021 non-NULL, we have just seen `x->' or `x.' and OBJECT_TYPE is the
2022 type of `*x', or `x', respectively. If the DECL was named as
2023 `A::B' then NESTED_NAME_SPECIFIER is `A'. */
2024
2025 void
2026 check_accessibility_of_qualified_id (tree decl,
2027 tree object_type,
2028 tree nested_name_specifier)
2029 {
2030 tree scope;
2031 tree qualifying_type = NULL_TREE;
2032
2033 /* If we are parsing a template declaration and if decl is a typedef,
2034 add it to a list tied to the template.
2035 At template instantiation time, that list will be walked and
2036 access check performed. */
2037 add_typedef_to_current_template_for_access_check (decl,
2038 nested_name_specifier
2039 ? nested_name_specifier
2040 : DECL_CONTEXT (decl),
2041 input_location);
2042
2043 /* If we're not checking, return immediately. */
2044 if (deferred_access_no_check)
2045 return;
2046
2047 /* Determine the SCOPE of DECL. */
2048 scope = context_for_name_lookup (decl);
2049 /* If the SCOPE is not a type, then DECL is not a member. */
2050 if (!TYPE_P (scope))
2051 return;
2052 /* Compute the scope through which DECL is being accessed. */
2053 if (object_type
2054 /* OBJECT_TYPE might not be a class type; consider:
2055
2056 class A { typedef int I; };
2057 I *p;
2058 p->A::I::~I();
2059
2060 In this case, we will have "A::I" as the DECL, but "I" as the
2061 OBJECT_TYPE. */
2062 && CLASS_TYPE_P (object_type)
2063 && DERIVED_FROM_P (scope, object_type))
2064 /* If we are processing a `->' or `.' expression, use the type of the
2065 left-hand side. */
2066 qualifying_type = object_type;
2067 else if (nested_name_specifier)
2068 {
2069 /* If the reference is to a non-static member of the
2070 current class, treat it as if it were referenced through
2071 `this'. */
2072 tree ct;
2073 if (DECL_NONSTATIC_MEMBER_P (decl)
2074 && current_class_ptr
2075 && DERIVED_FROM_P (scope, ct = current_nonlambda_class_type ()))
2076 qualifying_type = ct;
2077 /* Otherwise, use the type indicated by the
2078 nested-name-specifier. */
2079 else
2080 qualifying_type = nested_name_specifier;
2081 }
2082 else
2083 /* Otherwise, the name must be from the current class or one of
2084 its bases. */
2085 qualifying_type = currently_open_derived_class (scope);
2086
2087 if (qualifying_type
2088 /* It is possible for qualifying type to be a TEMPLATE_TYPE_PARM
2089 or similar in a default argument value. */
2090 && CLASS_TYPE_P (qualifying_type)
2091 && !dependent_type_p (qualifying_type))
2092 perform_or_defer_access_check (TYPE_BINFO (qualifying_type), decl,
2093 decl, tf_warning_or_error);
2094 }
2095
2096 /* EXPR is the result of a qualified-id. The QUALIFYING_CLASS was the
2097 class named to the left of the "::" operator. DONE is true if this
2098 expression is a complete postfix-expression; it is false if this
2099 expression is followed by '->', '[', '(', etc. ADDRESS_P is true
2100 iff this expression is the operand of '&'. TEMPLATE_P is true iff
2101 the qualified-id was of the form "A::template B". TEMPLATE_ARG_P
2102 is true iff this qualified name appears as a template argument. */
2103
2104 tree
2105 finish_qualified_id_expr (tree qualifying_class,
2106 tree expr,
2107 bool done,
2108 bool address_p,
2109 bool template_p,
2110 bool template_arg_p,
2111 tsubst_flags_t complain)
2112 {
2113 gcc_assert (TYPE_P (qualifying_class));
2114
2115 if (error_operand_p (expr))
2116 return error_mark_node;
2117
2118 if ((DECL_P (expr) || BASELINK_P (expr))
2119 && !mark_used (expr, complain))
2120 return error_mark_node;
2121
2122 if (template_p)
2123 {
2124 if (TREE_CODE (expr) == UNBOUND_CLASS_TEMPLATE)
2125 {
2126 /* cp_parser_lookup_name thought we were looking for a type,
2127 but we're actually looking for a declaration. */
2128 qualifying_class = TYPE_CONTEXT (expr);
2129 expr = TYPE_IDENTIFIER (expr);
2130 }
2131 else
2132 check_template_keyword (expr);
2133 }
2134
2135 /* If EXPR occurs as the operand of '&', use special handling that
2136 permits a pointer-to-member. */
2137 if (address_p && done)
2138 {
2139 if (TREE_CODE (expr) == SCOPE_REF)
2140 expr = TREE_OPERAND (expr, 1);
2141 expr = build_offset_ref (qualifying_class, expr,
2142 /*address_p=*/true, complain);
2143 return expr;
2144 }
2145
2146 /* No need to check access within an enum. */
2147 if (TREE_CODE (qualifying_class) == ENUMERAL_TYPE
2148 && TREE_CODE (expr) != IDENTIFIER_NODE)
2149 return expr;
2150
2151 /* Within the scope of a class, turn references to non-static
2152 members into expression of the form "this->...". */
2153 if (template_arg_p)
2154 /* But, within a template argument, we do not want make the
2155 transformation, as there is no "this" pointer. */
2156 ;
2157 else if (TREE_CODE (expr) == FIELD_DECL)
2158 {
2159 push_deferring_access_checks (dk_no_check);
2160 expr = finish_non_static_data_member (expr, NULL_TREE,
2161 qualifying_class);
2162 pop_deferring_access_checks ();
2163 }
2164 else if (BASELINK_P (expr))
2165 {
2166 /* See if any of the functions are non-static members. */
2167 /* If so, the expression may be relative to 'this'. */
2168 if ((type_dependent_expression_p (expr)
2169 || !shared_member_p (expr))
2170 && current_class_ptr
2171 && DERIVED_FROM_P (qualifying_class,
2172 current_nonlambda_class_type ()))
2173 expr = (build_class_member_access_expr
2174 (maybe_dummy_object (qualifying_class, NULL),
2175 expr,
2176 BASELINK_ACCESS_BINFO (expr),
2177 /*preserve_reference=*/false,
2178 complain));
2179 else if (done)
2180 /* The expression is a qualified name whose address is not
2181 being taken. */
2182 expr = build_offset_ref (qualifying_class, expr, /*address_p=*/false,
2183 complain);
2184 }
2185 else if (!template_p
2186 && TREE_CODE (expr) == TEMPLATE_DECL
2187 && !DECL_FUNCTION_TEMPLATE_P (expr))
2188 {
2189 if (complain & tf_error)
2190 error ("%qE missing template arguments", expr);
2191 return error_mark_node;
2192 }
2193 else
2194 {
2195 /* In a template, return a SCOPE_REF for most qualified-ids
2196 so that we can check access at instantiation time. But if
2197 we're looking at a member of the current instantiation, we
2198 know we have access and building up the SCOPE_REF confuses
2199 non-type template argument handling. */
2200 if (processing_template_decl
2201 && (!currently_open_class (qualifying_class)
2202 || TREE_CODE (expr) == IDENTIFIER_NODE
2203 || TREE_CODE (expr) == TEMPLATE_ID_EXPR
2204 || TREE_CODE (expr) == BIT_NOT_EXPR))
2205 expr = build_qualified_name (TREE_TYPE (expr),
2206 qualifying_class, expr,
2207 template_p);
2208 else if (tree wrap = maybe_get_tls_wrapper_call (expr))
2209 expr = wrap;
2210
2211 expr = convert_from_reference (expr);
2212 }
2213
2214 return expr;
2215 }
2216
2217 /* Begin a statement-expression. The value returned must be passed to
2218 finish_stmt_expr. */
2219
2220 tree
2221 begin_stmt_expr (void)
2222 {
2223 return push_stmt_list ();
2224 }
2225
2226 /* Process the final expression of a statement expression. EXPR can be
2227 NULL, if the final expression is empty. Return a STATEMENT_LIST
2228 containing all the statements in the statement-expression, or
2229 ERROR_MARK_NODE if there was an error. */
2230
2231 tree
2232 finish_stmt_expr_expr (tree expr, tree stmt_expr)
2233 {
2234 if (error_operand_p (expr))
2235 {
2236 /* The type of the statement-expression is the type of the last
2237 expression. */
2238 TREE_TYPE (stmt_expr) = error_mark_node;
2239 return error_mark_node;
2240 }
2241
2242 /* If the last statement does not have "void" type, then the value
2243 of the last statement is the value of the entire expression. */
2244 if (expr)
2245 {
2246 tree type = TREE_TYPE (expr);
2247
2248 if (type && type_unknown_p (type))
2249 {
2250 error ("a statement expression is an insufficient context"
2251 " for overload resolution");
2252 TREE_TYPE (stmt_expr) = error_mark_node;
2253 return error_mark_node;
2254 }
2255 else if (processing_template_decl)
2256 {
2257 expr = build_stmt (input_location, EXPR_STMT, expr);
2258 expr = add_stmt (expr);
2259 /* Mark the last statement so that we can recognize it as such at
2260 template-instantiation time. */
2261 EXPR_STMT_STMT_EXPR_RESULT (expr) = 1;
2262 }
2263 else if (VOID_TYPE_P (type))
2264 {
2265 /* Just treat this like an ordinary statement. */
2266 expr = finish_expr_stmt (expr);
2267 }
2268 else
2269 {
2270 /* It actually has a value we need to deal with. First, force it
2271 to be an rvalue so that we won't need to build up a copy
2272 constructor call later when we try to assign it to something. */
2273 expr = force_rvalue (expr, tf_warning_or_error);
2274 if (error_operand_p (expr))
2275 return error_mark_node;
2276
2277 /* Update for array-to-pointer decay. */
2278 type = TREE_TYPE (expr);
2279
2280 /* Wrap it in a CLEANUP_POINT_EXPR and add it to the list like a
2281 normal statement, but don't convert to void or actually add
2282 the EXPR_STMT. */
2283 if (TREE_CODE (expr) != CLEANUP_POINT_EXPR)
2284 expr = maybe_cleanup_point_expr (expr);
2285 add_stmt (expr);
2286 }
2287
2288 /* The type of the statement-expression is the type of the last
2289 expression. */
2290 TREE_TYPE (stmt_expr) = type;
2291 }
2292
2293 return stmt_expr;
2294 }
2295
2296 /* Finish a statement-expression. EXPR should be the value returned
2297 by the previous begin_stmt_expr. Returns an expression
2298 representing the statement-expression. */
2299
2300 tree
2301 finish_stmt_expr (tree stmt_expr, bool has_no_scope)
2302 {
2303 tree type;
2304 tree result;
2305
2306 if (error_operand_p (stmt_expr))
2307 {
2308 pop_stmt_list (stmt_expr);
2309 return error_mark_node;
2310 }
2311
2312 gcc_assert (TREE_CODE (stmt_expr) == STATEMENT_LIST);
2313
2314 type = TREE_TYPE (stmt_expr);
2315 result = pop_stmt_list (stmt_expr);
2316 TREE_TYPE (result) = type;
2317
2318 if (processing_template_decl)
2319 {
2320 result = build_min (STMT_EXPR, type, result);
2321 TREE_SIDE_EFFECTS (result) = 1;
2322 STMT_EXPR_NO_SCOPE (result) = has_no_scope;
2323 }
2324 else if (CLASS_TYPE_P (type))
2325 {
2326 /* Wrap the statement-expression in a TARGET_EXPR so that the
2327 temporary object created by the final expression is destroyed at
2328 the end of the full-expression containing the
2329 statement-expression. */
2330 result = force_target_expr (type, result, tf_warning_or_error);
2331 }
2332
2333 return result;
2334 }
2335
2336 /* Returns the expression which provides the value of STMT_EXPR. */
2337
2338 tree
2339 stmt_expr_value_expr (tree stmt_expr)
2340 {
2341 tree t = STMT_EXPR_STMT (stmt_expr);
2342
2343 if (TREE_CODE (t) == BIND_EXPR)
2344 t = BIND_EXPR_BODY (t);
2345
2346 if (TREE_CODE (t) == STATEMENT_LIST && STATEMENT_LIST_TAIL (t))
2347 t = STATEMENT_LIST_TAIL (t)->stmt;
2348
2349 if (TREE_CODE (t) == EXPR_STMT)
2350 t = EXPR_STMT_EXPR (t);
2351
2352 return t;
2353 }
2354
2355 /* Return TRUE iff EXPR_STMT is an empty list of
2356 expression statements. */
2357
2358 bool
2359 empty_expr_stmt_p (tree expr_stmt)
2360 {
2361 tree body = NULL_TREE;
2362
2363 if (expr_stmt == void_node)
2364 return true;
2365
2366 if (expr_stmt)
2367 {
2368 if (TREE_CODE (expr_stmt) == EXPR_STMT)
2369 body = EXPR_STMT_EXPR (expr_stmt);
2370 else if (TREE_CODE (expr_stmt) == STATEMENT_LIST)
2371 body = expr_stmt;
2372 }
2373
2374 if (body)
2375 {
2376 if (TREE_CODE (body) == STATEMENT_LIST)
2377 return tsi_end_p (tsi_start (body));
2378 else
2379 return empty_expr_stmt_p (body);
2380 }
2381 return false;
2382 }
2383
2384 /* Perform Koenig lookup. FN_EXPR is the postfix-expression representing
2385 the function (or functions) to call; ARGS are the arguments to the
2386 call. Returns the functions to be considered by overload resolution. */
2387
2388 cp_expr
2389 perform_koenig_lookup (cp_expr fn_expr, vec<tree, va_gc> *args,
2390 tsubst_flags_t complain)
2391 {
2392 tree identifier = NULL_TREE;
2393 tree functions = NULL_TREE;
2394 tree tmpl_args = NULL_TREE;
2395 bool template_id = false;
2396 location_t loc = fn_expr.get_location ();
2397 tree fn = fn_expr.get_value ();
2398
2399 STRIP_ANY_LOCATION_WRAPPER (fn);
2400
2401 if (TREE_CODE (fn) == TEMPLATE_ID_EXPR)
2402 {
2403 /* Use a separate flag to handle null args. */
2404 template_id = true;
2405 tmpl_args = TREE_OPERAND (fn, 1);
2406 fn = TREE_OPERAND (fn, 0);
2407 }
2408
2409 /* Find the name of the overloaded function. */
2410 if (identifier_p (fn))
2411 identifier = fn;
2412 else
2413 {
2414 functions = fn;
2415 identifier = OVL_NAME (functions);
2416 }
2417
2418 /* A call to a namespace-scope function using an unqualified name.
2419
2420 Do Koenig lookup -- unless any of the arguments are
2421 type-dependent. */
2422 if (!any_type_dependent_arguments_p (args)
2423 && !any_dependent_template_arguments_p (tmpl_args))
2424 {
2425 fn = lookup_arg_dependent (identifier, functions, args);
2426 if (!fn)
2427 {
2428 /* The unqualified name could not be resolved. */
2429 if (complain & tf_error)
2430 fn = unqualified_fn_lookup_error (cp_expr (identifier, loc));
2431 else
2432 fn = identifier;
2433 }
2434 }
2435
2436 if (fn && template_id && fn != error_mark_node)
2437 fn = build2 (TEMPLATE_ID_EXPR, unknown_type_node, fn, tmpl_args);
2438
2439 return cp_expr (fn, loc);
2440 }
2441
2442 /* Generate an expression for `FN (ARGS)'. This may change the
2443 contents of ARGS.
2444
2445 If DISALLOW_VIRTUAL is true, the call to FN will be not generated
2446 as a virtual call, even if FN is virtual. (This flag is set when
2447 encountering an expression where the function name is explicitly
2448 qualified. For example a call to `X::f' never generates a virtual
2449 call.)
2450
2451 Returns code for the call. */
2452
2453 tree
2454 finish_call_expr (tree fn, vec<tree, va_gc> **args, bool disallow_virtual,
2455 bool koenig_p, tsubst_flags_t complain)
2456 {
2457 tree result;
2458 tree orig_fn;
2459 vec<tree, va_gc> *orig_args = *args;
2460
2461 if (fn == error_mark_node)
2462 return error_mark_node;
2463
2464 gcc_assert (!TYPE_P (fn));
2465
2466 /* If FN may be a FUNCTION_DECL obfuscated by force_paren_expr, undo
2467 it so that we can tell this is a call to a known function. */
2468 fn = maybe_undo_parenthesized_ref (fn);
2469
2470 STRIP_ANY_LOCATION_WRAPPER (fn);
2471
2472 orig_fn = fn;
2473
2474 if (processing_template_decl)
2475 {
2476 /* If FN is a local extern declaration or set thereof, look them up
2477 again at instantiation time. */
2478 if (is_overloaded_fn (fn))
2479 {
2480 tree ifn = get_first_fn (fn);
2481 if (TREE_CODE (ifn) == FUNCTION_DECL
2482 && DECL_LOCAL_FUNCTION_P (ifn))
2483 orig_fn = DECL_NAME (ifn);
2484 }
2485
2486 /* If the call expression is dependent, build a CALL_EXPR node
2487 with no type; type_dependent_expression_p recognizes
2488 expressions with no type as being dependent. */
2489 if (type_dependent_expression_p (fn)
2490 || any_type_dependent_arguments_p (*args))
2491 {
2492 result = build_min_nt_call_vec (orig_fn, *args);
2493 SET_EXPR_LOCATION (result, cp_expr_loc_or_input_loc (fn));
2494 KOENIG_LOOKUP_P (result) = koenig_p;
2495 if (is_overloaded_fn (fn))
2496 fn = get_fns (fn);
2497
2498 if (cfun)
2499 {
2500 bool abnormal = true;
2501 for (lkp_iterator iter (fn); abnormal && iter; ++iter)
2502 {
2503 tree fndecl = *iter;
2504 if (TREE_CODE (fndecl) != FUNCTION_DECL
2505 || !TREE_THIS_VOLATILE (fndecl))
2506 abnormal = false;
2507 }
2508 /* FIXME: Stop warning about falling off end of non-void
2509 function. But this is wrong. Even if we only see
2510 no-return fns at this point, we could select a
2511 future-defined return fn during instantiation. Or
2512 vice-versa. */
2513 if (abnormal)
2514 current_function_returns_abnormally = 1;
2515 }
2516 return result;
2517 }
2518 orig_args = make_tree_vector_copy (*args);
2519 if (!BASELINK_P (fn)
2520 && TREE_CODE (fn) != PSEUDO_DTOR_EXPR
2521 && TREE_TYPE (fn) != unknown_type_node)
2522 fn = build_non_dependent_expr (fn);
2523 make_args_non_dependent (*args);
2524 }
2525
2526 if (TREE_CODE (fn) == COMPONENT_REF)
2527 {
2528 tree member = TREE_OPERAND (fn, 1);
2529 if (BASELINK_P (member))
2530 {
2531 tree object = TREE_OPERAND (fn, 0);
2532 return build_new_method_call (object, member,
2533 args, NULL_TREE,
2534 (disallow_virtual
2535 ? LOOKUP_NORMAL | LOOKUP_NONVIRTUAL
2536 : LOOKUP_NORMAL),
2537 /*fn_p=*/NULL,
2538 complain);
2539 }
2540 }
2541
2542 /* Per 13.3.1.1, '(&f)(...)' is the same as '(f)(...)'. */
2543 if (TREE_CODE (fn) == ADDR_EXPR
2544 && TREE_CODE (TREE_OPERAND (fn, 0)) == OVERLOAD)
2545 fn = TREE_OPERAND (fn, 0);
2546
2547 if (is_overloaded_fn (fn))
2548 fn = baselink_for_fns (fn);
2549
2550 result = NULL_TREE;
2551 if (BASELINK_P (fn))
2552 {
2553 tree object;
2554
2555 /* A call to a member function. From [over.call.func]:
2556
2557 If the keyword this is in scope and refers to the class of
2558 that member function, or a derived class thereof, then the
2559 function call is transformed into a qualified function call
2560 using (*this) as the postfix-expression to the left of the
2561 . operator.... [Otherwise] a contrived object of type T
2562 becomes the implied object argument.
2563
2564 In this situation:
2565
2566 struct A { void f(); };
2567 struct B : public A {};
2568 struct C : public A { void g() { B::f(); }};
2569
2570 "the class of that member function" refers to `A'. But 11.2
2571 [class.access.base] says that we need to convert 'this' to B* as
2572 part of the access, so we pass 'B' to maybe_dummy_object. */
2573
2574 if (DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (get_first_fn (fn)))
2575 {
2576 /* A constructor call always uses a dummy object. (This constructor
2577 call which has the form A::A () is actually invalid and we are
2578 going to reject it later in build_new_method_call.) */
2579 object = build_dummy_object (BINFO_TYPE (BASELINK_ACCESS_BINFO (fn)));
2580 }
2581 else
2582 object = maybe_dummy_object (BINFO_TYPE (BASELINK_ACCESS_BINFO (fn)),
2583 NULL);
2584
2585 result = build_new_method_call (object, fn, args, NULL_TREE,
2586 (disallow_virtual
2587 ? LOOKUP_NORMAL|LOOKUP_NONVIRTUAL
2588 : LOOKUP_NORMAL),
2589 /*fn_p=*/NULL,
2590 complain);
2591 }
2592 else if (concept_check_p (fn))
2593 {
2594 /* FN is actually a template-id referring to a concept definition. */
2595 tree id = unpack_concept_check (fn);
2596 tree tmpl = TREE_OPERAND (id, 0);
2597 tree args = TREE_OPERAND (id, 1);
2598
2599 if (!function_concept_p (tmpl))
2600 {
2601 error_at (EXPR_LOC_OR_LOC (fn, input_location),
2602 "cannot call a concept as a function");
2603 return error_mark_node;
2604 }
2605
2606 /* Ensure the result is wrapped as a call expression. */
2607 result = build_concept_check (tmpl, args, tf_warning_or_error);
2608 }
2609 else if (is_overloaded_fn (fn))
2610 {
2611 /* If the function is an overloaded builtin, resolve it. */
2612 if (TREE_CODE (fn) == FUNCTION_DECL
2613 && (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2614 || DECL_BUILT_IN_CLASS (fn) == BUILT_IN_MD))
2615 result = resolve_overloaded_builtin (input_location, fn, *args);
2616
2617 if (!result)
2618 {
2619 if (warn_sizeof_pointer_memaccess
2620 && (complain & tf_warning)
2621 && !vec_safe_is_empty (*args)
2622 && !processing_template_decl)
2623 {
2624 location_t sizeof_arg_loc[3];
2625 tree sizeof_arg[3];
2626 unsigned int i;
2627 for (i = 0; i < 3; i++)
2628 {
2629 tree t;
2630
2631 sizeof_arg_loc[i] = UNKNOWN_LOCATION;
2632 sizeof_arg[i] = NULL_TREE;
2633 if (i >= (*args)->length ())
2634 continue;
2635 t = (**args)[i];
2636 if (TREE_CODE (t) != SIZEOF_EXPR)
2637 continue;
2638 if (SIZEOF_EXPR_TYPE_P (t))
2639 sizeof_arg[i] = TREE_TYPE (TREE_OPERAND (t, 0));
2640 else
2641 sizeof_arg[i] = TREE_OPERAND (t, 0);
2642 sizeof_arg_loc[i] = EXPR_LOCATION (t);
2643 }
2644 sizeof_pointer_memaccess_warning
2645 (sizeof_arg_loc, fn, *args,
2646 sizeof_arg, same_type_ignoring_top_level_qualifiers_p);
2647 }
2648
2649 if ((complain & tf_warning)
2650 && TREE_CODE (fn) == FUNCTION_DECL
2651 && fndecl_built_in_p (fn, BUILT_IN_MEMSET)
2652 && vec_safe_length (*args) == 3
2653 && !any_type_dependent_arguments_p (*args))
2654 {
2655 tree arg0 = (*orig_args)[0];
2656 tree arg1 = (*orig_args)[1];
2657 tree arg2 = (*orig_args)[2];
2658 int literal_mask = ((literal_integer_zerop (arg1) << 1)
2659 | (literal_integer_zerop (arg2) << 2));
2660 arg2 = instantiate_non_dependent_expr (arg2);
2661 warn_for_memset (input_location, arg0, arg2, literal_mask);
2662 }
2663
2664 /* A call to a namespace-scope function. */
2665 result = build_new_function_call (fn, args, complain);
2666 }
2667 }
2668 else if (TREE_CODE (fn) == PSEUDO_DTOR_EXPR)
2669 {
2670 if (!vec_safe_is_empty (*args))
2671 error ("arguments to destructor are not allowed");
2672 /* Mark the pseudo-destructor call as having side-effects so
2673 that we do not issue warnings about its use. */
2674 result = build1 (NOP_EXPR,
2675 void_type_node,
2676 TREE_OPERAND (fn, 0));
2677 TREE_SIDE_EFFECTS (result) = 1;
2678 }
2679 else if (CLASS_TYPE_P (TREE_TYPE (fn)))
2680 /* If the "function" is really an object of class type, it might
2681 have an overloaded `operator ()'. */
2682 result = build_op_call (fn, args, complain);
2683
2684 if (!result)
2685 /* A call where the function is unknown. */
2686 result = cp_build_function_call_vec (fn, args, complain);
2687
2688 if (processing_template_decl && result != error_mark_node)
2689 {
2690 if (INDIRECT_REF_P (result))
2691 result = TREE_OPERAND (result, 0);
2692 result = build_call_vec (TREE_TYPE (result), orig_fn, orig_args);
2693 SET_EXPR_LOCATION (result, input_location);
2694 KOENIG_LOOKUP_P (result) = koenig_p;
2695 release_tree_vector (orig_args);
2696 result = convert_from_reference (result);
2697 }
2698
2699 return result;
2700 }
2701
2702 /* Finish a call to a postfix increment or decrement or EXPR. (Which
2703 is indicated by CODE, which should be POSTINCREMENT_EXPR or
2704 POSTDECREMENT_EXPR.) */
2705
2706 cp_expr
2707 finish_increment_expr (cp_expr expr, enum tree_code code)
2708 {
2709 /* input_location holds the location of the trailing operator token.
2710 Build a location of the form:
2711 expr++
2712 ~~~~^~
2713 with the caret at the operator token, ranging from the start
2714 of EXPR to the end of the operator token. */
2715 location_t combined_loc = make_location (input_location,
2716 expr.get_start (),
2717 get_finish (input_location));
2718 cp_expr result = build_x_unary_op (combined_loc, code, expr,
2719 tf_warning_or_error);
2720 /* TODO: build_x_unary_op doesn't honor the location, so set it here. */
2721 result.set_location (combined_loc);
2722 return result;
2723 }
2724
2725 /* Finish a use of `this'. Returns an expression for `this'. */
2726
2727 tree
2728 finish_this_expr (void)
2729 {
2730 tree result = NULL_TREE;
2731
2732 if (current_class_ptr)
2733 {
2734 tree type = TREE_TYPE (current_class_ref);
2735
2736 /* In a lambda expression, 'this' refers to the captured 'this'. */
2737 if (LAMBDA_TYPE_P (type))
2738 result = lambda_expr_this_capture (CLASSTYPE_LAMBDA_EXPR (type), true);
2739 else
2740 result = current_class_ptr;
2741 }
2742
2743 if (result)
2744 /* The keyword 'this' is a prvalue expression. */
2745 return rvalue (result);
2746
2747 tree fn = current_nonlambda_function ();
2748 if (fn && DECL_STATIC_FUNCTION_P (fn))
2749 error ("%<this%> is unavailable for static member functions");
2750 else if (fn)
2751 error ("invalid use of %<this%> in non-member function");
2752 else
2753 error ("invalid use of %<this%> at top level");
2754 return error_mark_node;
2755 }
2756
2757 /* Finish a pseudo-destructor expression. If SCOPE is NULL, the
2758 expression was of the form `OBJECT.~DESTRUCTOR' where DESTRUCTOR is
2759 the TYPE for the type given. If SCOPE is non-NULL, the expression
2760 was of the form `OBJECT.SCOPE::~DESTRUCTOR'. */
2761
2762 tree
2763 finish_pseudo_destructor_expr (tree object, tree scope, tree destructor,
2764 location_t loc)
2765 {
2766 if (object == error_mark_node || destructor == error_mark_node)
2767 return error_mark_node;
2768
2769 gcc_assert (TYPE_P (destructor));
2770
2771 if (!processing_template_decl)
2772 {
2773 if (scope == error_mark_node)
2774 {
2775 error_at (loc, "invalid qualifying scope in pseudo-destructor name");
2776 return error_mark_node;
2777 }
2778 if (is_auto (destructor))
2779 destructor = TREE_TYPE (object);
2780 if (scope && TYPE_P (scope) && !check_dtor_name (scope, destructor))
2781 {
2782 error_at (loc,
2783 "qualified type %qT does not match destructor name ~%qT",
2784 scope, destructor);
2785 return error_mark_node;
2786 }
2787
2788
2789 /* [expr.pseudo] says both:
2790
2791 The type designated by the pseudo-destructor-name shall be
2792 the same as the object type.
2793
2794 and:
2795
2796 The cv-unqualified versions of the object type and of the
2797 type designated by the pseudo-destructor-name shall be the
2798 same type.
2799
2800 We implement the more generous second sentence, since that is
2801 what most other compilers do. */
2802 if (!same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (object),
2803 destructor))
2804 {
2805 error_at (loc, "%qE is not of type %qT", object, destructor);
2806 return error_mark_node;
2807 }
2808 }
2809
2810 return build3_loc (loc, PSEUDO_DTOR_EXPR, void_type_node, object,
2811 scope, destructor);
2812 }
2813
2814 /* Finish an expression of the form CODE EXPR. */
2815
2816 cp_expr
2817 finish_unary_op_expr (location_t op_loc, enum tree_code code, cp_expr expr,
2818 tsubst_flags_t complain)
2819 {
2820 /* Build a location of the form:
2821 ++expr
2822 ^~~~~~
2823 with the caret at the operator token, ranging from the start
2824 of the operator token to the end of EXPR. */
2825 location_t combined_loc = make_location (op_loc,
2826 op_loc, expr.get_finish ());
2827 cp_expr result = build_x_unary_op (combined_loc, code, expr, complain);
2828 /* TODO: build_x_unary_op doesn't always honor the location. */
2829 result.set_location (combined_loc);
2830
2831 if (result == error_mark_node)
2832 return result;
2833
2834 if (!(complain & tf_warning))
2835 return result;
2836
2837 tree result_ovl = result;
2838 tree expr_ovl = expr;
2839
2840 if (!processing_template_decl)
2841 expr_ovl = cp_fully_fold (expr_ovl);
2842
2843 if (!CONSTANT_CLASS_P (expr_ovl)
2844 || TREE_OVERFLOW_P (expr_ovl))
2845 return result;
2846
2847 if (!processing_template_decl)
2848 result_ovl = cp_fully_fold (result_ovl);
2849
2850 if (CONSTANT_CLASS_P (result_ovl) && TREE_OVERFLOW_P (result_ovl))
2851 overflow_warning (combined_loc, result_ovl);
2852
2853 return result;
2854 }
2855
2856 /* Finish a compound-literal expression or C++11 functional cast with aggregate
2857 initializer. TYPE is the type to which the CONSTRUCTOR in COMPOUND_LITERAL
2858 is being cast. */
2859
2860 tree
2861 finish_compound_literal (tree type, tree compound_literal,
2862 tsubst_flags_t complain,
2863 fcl_t fcl_context)
2864 {
2865 if (type == error_mark_node)
2866 return error_mark_node;
2867
2868 if (TYPE_REF_P (type))
2869 {
2870 compound_literal
2871 = finish_compound_literal (TREE_TYPE (type), compound_literal,
2872 complain, fcl_context);
2873 /* The prvalue is then used to direct-initialize the reference. */
2874 tree r = (perform_implicit_conversion_flags
2875 (type, compound_literal, complain, LOOKUP_NORMAL));
2876 return convert_from_reference (r);
2877 }
2878
2879 if (!TYPE_OBJ_P (type))
2880 {
2881 if (complain & tf_error)
2882 error ("compound literal of non-object type %qT", type);
2883 return error_mark_node;
2884 }
2885
2886 if (tree anode = type_uses_auto (type))
2887 if (CLASS_PLACEHOLDER_TEMPLATE (anode))
2888 {
2889 type = do_auto_deduction (type, compound_literal, anode, complain,
2890 adc_variable_type);
2891 if (type == error_mark_node)
2892 return error_mark_node;
2893 }
2894
2895 /* Used to hold a copy of the compound literal in a template. */
2896 tree orig_cl = NULL_TREE;
2897
2898 if (processing_template_decl)
2899 {
2900 const bool dependent_p
2901 = (instantiation_dependent_expression_p (compound_literal)
2902 || dependent_type_p (type));
2903 if (dependent_p)
2904 /* We're about to return, no need to copy. */
2905 orig_cl = compound_literal;
2906 else
2907 /* We're going to need a copy. */
2908 orig_cl = unshare_constructor (compound_literal);
2909 TREE_TYPE (orig_cl) = type;
2910 /* Mark the expression as a compound literal. */
2911 TREE_HAS_CONSTRUCTOR (orig_cl) = 1;
2912 /* And as instantiation-dependent. */
2913 CONSTRUCTOR_IS_DEPENDENT (orig_cl) = dependent_p;
2914 if (fcl_context == fcl_c99)
2915 CONSTRUCTOR_C99_COMPOUND_LITERAL (orig_cl) = 1;
2916 /* If the compound literal is dependent, we're done for now. */
2917 if (dependent_p)
2918 return orig_cl;
2919 /* Otherwise, do go on to e.g. check narrowing. */
2920 }
2921
2922 type = complete_type (type);
2923
2924 if (TYPE_NON_AGGREGATE_CLASS (type))
2925 {
2926 /* Trying to deal with a CONSTRUCTOR instead of a TREE_LIST
2927 everywhere that deals with function arguments would be a pain, so
2928 just wrap it in a TREE_LIST. The parser set a flag so we know
2929 that it came from T{} rather than T({}). */
2930 CONSTRUCTOR_IS_DIRECT_INIT (compound_literal) = 1;
2931 compound_literal = build_tree_list (NULL_TREE, compound_literal);
2932 return build_functional_cast (type, compound_literal, complain);
2933 }
2934
2935 if (TREE_CODE (type) == ARRAY_TYPE
2936 && check_array_initializer (NULL_TREE, type, compound_literal))
2937 return error_mark_node;
2938 compound_literal = reshape_init (type, compound_literal, complain);
2939 if (SCALAR_TYPE_P (type)
2940 && !BRACE_ENCLOSED_INITIALIZER_P (compound_literal))
2941 {
2942 tree t = instantiate_non_dependent_expr_sfinae (compound_literal,
2943 complain);
2944 if (!check_narrowing (type, t, complain))
2945 return error_mark_node;
2946 }
2947 if (TREE_CODE (type) == ARRAY_TYPE
2948 && TYPE_DOMAIN (type) == NULL_TREE)
2949 {
2950 cp_complete_array_type_or_error (&type, compound_literal,
2951 false, complain);
2952 if (type == error_mark_node)
2953 return error_mark_node;
2954 }
2955 compound_literal = digest_init_flags (type, compound_literal,
2956 LOOKUP_NORMAL | LOOKUP_NO_NARROWING,
2957 complain);
2958 if (compound_literal == error_mark_node)
2959 return error_mark_node;
2960
2961 /* If we're in a template, return the original compound literal. */
2962 if (orig_cl)
2963 {
2964 if (!VECTOR_TYPE_P (type))
2965 return get_target_expr_sfinae (orig_cl, complain);
2966 else
2967 return orig_cl;
2968 }
2969
2970 if (TREE_CODE (compound_literal) == CONSTRUCTOR)
2971 {
2972 TREE_HAS_CONSTRUCTOR (compound_literal) = true;
2973 if (fcl_context == fcl_c99)
2974 CONSTRUCTOR_C99_COMPOUND_LITERAL (compound_literal) = 1;
2975 }
2976
2977 /* Put static/constant array temporaries in static variables. */
2978 /* FIXME all C99 compound literals should be variables rather than C++
2979 temporaries, unless they are used as an aggregate initializer. */
2980 if ((!at_function_scope_p () || CP_TYPE_CONST_P (type))
2981 && fcl_context == fcl_c99
2982 && TREE_CODE (type) == ARRAY_TYPE
2983 && !TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type)
2984 && initializer_constant_valid_p (compound_literal, type))
2985 {
2986 tree decl = create_temporary_var (type);
2987 DECL_INITIAL (decl) = compound_literal;
2988 TREE_STATIC (decl) = 1;
2989 if (literal_type_p (type) && CP_TYPE_CONST_NON_VOLATILE_P (type))
2990 {
2991 /* 5.19 says that a constant expression can include an
2992 lvalue-rvalue conversion applied to "a glvalue of literal type
2993 that refers to a non-volatile temporary object initialized
2994 with a constant expression". Rather than try to communicate
2995 that this VAR_DECL is a temporary, just mark it constexpr. */
2996 DECL_DECLARED_CONSTEXPR_P (decl) = true;
2997 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl) = true;
2998 TREE_CONSTANT (decl) = true;
2999 }
3000 cp_apply_type_quals_to_decl (cp_type_quals (type), decl);
3001 decl = pushdecl_top_level (decl);
3002 DECL_NAME (decl) = make_anon_name ();
3003 SET_DECL_ASSEMBLER_NAME (decl, DECL_NAME (decl));
3004 /* Make sure the destructor is callable. */
3005 tree clean = cxx_maybe_build_cleanup (decl, complain);
3006 if (clean == error_mark_node)
3007 return error_mark_node;
3008 return decl;
3009 }
3010
3011 /* Represent other compound literals with TARGET_EXPR so we produce
3012 an lvalue, but can elide copies. */
3013 if (!VECTOR_TYPE_P (type))
3014 compound_literal = get_target_expr_sfinae (compound_literal, complain);
3015
3016 return compound_literal;
3017 }
3018
3019 /* Return the declaration for the function-name variable indicated by
3020 ID. */
3021
3022 tree
3023 finish_fname (tree id)
3024 {
3025 tree decl;
3026
3027 decl = fname_decl (input_location, C_RID_CODE (id), id);
3028 if (processing_template_decl && current_function_decl
3029 && decl != error_mark_node)
3030 decl = DECL_NAME (decl);
3031 return decl;
3032 }
3033
3034 /* Finish a translation unit. */
3035
3036 void
3037 finish_translation_unit (void)
3038 {
3039 /* In case there were missing closebraces,
3040 get us back to the global binding level. */
3041 pop_everything ();
3042 while (current_namespace != global_namespace)
3043 pop_namespace ();
3044
3045 /* Do file scope __FUNCTION__ et al. */
3046 finish_fname_decls ();
3047
3048 if (scope_chain->omp_declare_target_attribute)
3049 {
3050 if (!errorcount)
3051 error ("%<#pragma omp declare target%> without corresponding "
3052 "%<#pragma omp end declare target%>");
3053 scope_chain->omp_declare_target_attribute = 0;
3054 }
3055 }
3056
3057 /* Finish a template type parameter, specified as AGGR IDENTIFIER.
3058 Returns the parameter. */
3059
3060 tree
3061 finish_template_type_parm (tree aggr, tree identifier)
3062 {
3063 if (aggr != class_type_node)
3064 {
3065 permerror (input_location, "template type parameters must use the keyword %<class%> or %<typename%>");
3066 aggr = class_type_node;
3067 }
3068
3069 return build_tree_list (aggr, identifier);
3070 }
3071
3072 /* Finish a template template parameter, specified as AGGR IDENTIFIER.
3073 Returns the parameter. */
3074
3075 tree
3076 finish_template_template_parm (tree aggr, tree identifier)
3077 {
3078 tree decl = build_decl (input_location,
3079 TYPE_DECL, identifier, NULL_TREE);
3080
3081 tree tmpl = build_lang_decl (TEMPLATE_DECL, identifier, NULL_TREE);
3082 DECL_TEMPLATE_PARMS (tmpl) = current_template_parms;
3083 DECL_TEMPLATE_RESULT (tmpl) = decl;
3084 DECL_ARTIFICIAL (decl) = 1;
3085
3086 /* Associate the constraints with the underlying declaration,
3087 not the template. */
3088 tree reqs = TEMPLATE_PARMS_CONSTRAINTS (current_template_parms);
3089 tree constr = build_constraints (reqs, NULL_TREE);
3090 set_constraints (decl, constr);
3091
3092 end_template_decl ();
3093
3094 gcc_assert (DECL_TEMPLATE_PARMS (tmpl));
3095
3096 check_default_tmpl_args (decl, DECL_TEMPLATE_PARMS (tmpl),
3097 /*is_primary=*/true, /*is_partial=*/false,
3098 /*is_friend=*/0);
3099
3100 return finish_template_type_parm (aggr, tmpl);
3101 }
3102
3103 /* ARGUMENT is the default-argument value for a template template
3104 parameter. If ARGUMENT is invalid, issue error messages and return
3105 the ERROR_MARK_NODE. Otherwise, ARGUMENT itself is returned. */
3106
3107 tree
3108 check_template_template_default_arg (tree argument)
3109 {
3110 if (TREE_CODE (argument) != TEMPLATE_DECL
3111 && TREE_CODE (argument) != TEMPLATE_TEMPLATE_PARM
3112 && TREE_CODE (argument) != UNBOUND_CLASS_TEMPLATE)
3113 {
3114 if (TREE_CODE (argument) == TYPE_DECL)
3115 error ("invalid use of type %qT as a default value for a template "
3116 "template-parameter", TREE_TYPE (argument));
3117 else
3118 error ("invalid default argument for a template template parameter");
3119 return error_mark_node;
3120 }
3121
3122 return argument;
3123 }
3124
3125 /* Begin a class definition, as indicated by T. */
3126
3127 tree
3128 begin_class_definition (tree t)
3129 {
3130 if (error_operand_p (t) || error_operand_p (TYPE_MAIN_DECL (t)))
3131 return error_mark_node;
3132
3133 if (processing_template_parmlist && !LAMBDA_TYPE_P (t))
3134 {
3135 error ("definition of %q#T inside template parameter list", t);
3136 return error_mark_node;
3137 }
3138
3139 /* According to the C++ ABI, decimal classes defined in ISO/IEC TR 24733
3140 are passed the same as decimal scalar types. */
3141 if (TREE_CODE (t) == RECORD_TYPE
3142 && !processing_template_decl)
3143 {
3144 tree ns = TYPE_CONTEXT (t);
3145 if (ns && TREE_CODE (ns) == NAMESPACE_DECL
3146 && DECL_CONTEXT (ns) == std_node
3147 && DECL_NAME (ns)
3148 && id_equal (DECL_NAME (ns), "decimal"))
3149 {
3150 const char *n = TYPE_NAME_STRING (t);
3151 if ((strcmp (n, "decimal32") == 0)
3152 || (strcmp (n, "decimal64") == 0)
3153 || (strcmp (n, "decimal128") == 0))
3154 TYPE_TRANSPARENT_AGGR (t) = 1;
3155 }
3156 }
3157
3158 /* A non-implicit typename comes from code like:
3159
3160 template <typename T> struct A {
3161 template <typename U> struct A<T>::B ...
3162
3163 This is erroneous. */
3164 else if (TREE_CODE (t) == TYPENAME_TYPE)
3165 {
3166 error ("invalid definition of qualified type %qT", t);
3167 t = error_mark_node;
3168 }
3169
3170 if (t == error_mark_node || ! MAYBE_CLASS_TYPE_P (t))
3171 {
3172 t = make_class_type (RECORD_TYPE);
3173 pushtag (make_anon_name (), t, /*tag_scope=*/ts_current);
3174 }
3175
3176 if (TYPE_BEING_DEFINED (t))
3177 {
3178 t = make_class_type (TREE_CODE (t));
3179 pushtag (TYPE_IDENTIFIER (t), t, /*tag_scope=*/ts_current);
3180 }
3181 maybe_process_partial_specialization (t);
3182 pushclass (t);
3183 TYPE_BEING_DEFINED (t) = 1;
3184 class_binding_level->defining_class_p = 1;
3185
3186 if (flag_pack_struct)
3187 {
3188 tree v;
3189 TYPE_PACKED (t) = 1;
3190 /* Even though the type is being defined for the first time
3191 here, there might have been a forward declaration, so there
3192 might be cv-qualified variants of T. */
3193 for (v = TYPE_NEXT_VARIANT (t); v; v = TYPE_NEXT_VARIANT (v))
3194 TYPE_PACKED (v) = 1;
3195 }
3196 /* Reset the interface data, at the earliest possible
3197 moment, as it might have been set via a class foo;
3198 before. */
3199 if (! TYPE_UNNAMED_P (t))
3200 {
3201 struct c_fileinfo *finfo = \
3202 get_fileinfo (LOCATION_FILE (input_location));
3203 CLASSTYPE_INTERFACE_ONLY (t) = finfo->interface_only;
3204 SET_CLASSTYPE_INTERFACE_UNKNOWN_X
3205 (t, finfo->interface_unknown);
3206 }
3207 reset_specialization();
3208
3209 /* Make a declaration for this class in its own scope. */
3210 build_self_reference ();
3211
3212 return t;
3213 }
3214
3215 /* Finish the member declaration given by DECL. */
3216
3217 void
3218 finish_member_declaration (tree decl)
3219 {
3220 if (decl == error_mark_node || decl == NULL_TREE)
3221 return;
3222
3223 if (decl == void_type_node)
3224 /* The COMPONENT was a friend, not a member, and so there's
3225 nothing for us to do. */
3226 return;
3227
3228 /* We should see only one DECL at a time. */
3229 gcc_assert (DECL_CHAIN (decl) == NULL_TREE);
3230
3231 /* Don't add decls after definition. */
3232 gcc_assert (TYPE_BEING_DEFINED (current_class_type)
3233 /* We can add lambda types when late parsing default
3234 arguments. */
3235 || LAMBDA_TYPE_P (TREE_TYPE (decl)));
3236
3237 /* Set up access control for DECL. */
3238 TREE_PRIVATE (decl)
3239 = (current_access_specifier == access_private_node);
3240 TREE_PROTECTED (decl)
3241 = (current_access_specifier == access_protected_node);
3242 if (TREE_CODE (decl) == TEMPLATE_DECL)
3243 {
3244 TREE_PRIVATE (DECL_TEMPLATE_RESULT (decl)) = TREE_PRIVATE (decl);
3245 TREE_PROTECTED (DECL_TEMPLATE_RESULT (decl)) = TREE_PROTECTED (decl);
3246 }
3247
3248 /* Mark the DECL as a member of the current class, unless it's
3249 a member of an enumeration. */
3250 if (TREE_CODE (decl) != CONST_DECL)
3251 DECL_CONTEXT (decl) = current_class_type;
3252
3253 if (TREE_CODE (decl) == USING_DECL)
3254 /* For now, ignore class-scope USING_DECLS, so that debugging
3255 backends do not see them. */
3256 DECL_IGNORED_P (decl) = 1;
3257
3258 /* Check for bare parameter packs in the non-static data member
3259 declaration. */
3260 if (TREE_CODE (decl) == FIELD_DECL)
3261 {
3262 if (check_for_bare_parameter_packs (TREE_TYPE (decl)))
3263 TREE_TYPE (decl) = error_mark_node;
3264 if (check_for_bare_parameter_packs (DECL_ATTRIBUTES (decl)))
3265 DECL_ATTRIBUTES (decl) = NULL_TREE;
3266 }
3267
3268 /* [dcl.link]
3269
3270 A C language linkage is ignored for the names of class members
3271 and the member function type of class member functions. */
3272 if (DECL_LANG_SPECIFIC (decl))
3273 SET_DECL_LANGUAGE (decl, lang_cplusplus);
3274
3275 bool add = false;
3276
3277 /* Functions and non-functions are added differently. */
3278 if (DECL_DECLARES_FUNCTION_P (decl))
3279 add = add_method (current_class_type, decl, false);
3280 /* Enter the DECL into the scope of the class, if the class
3281 isn't a closure (whose fields are supposed to be unnamed). */
3282 else if (CLASSTYPE_LAMBDA_EXPR (current_class_type)
3283 || pushdecl_class_level (decl))
3284 add = true;
3285
3286 if (add)
3287 {
3288 /* All TYPE_DECLs go at the end of TYPE_FIELDS. Ordinary fields
3289 go at the beginning. The reason is that
3290 legacy_nonfn_member_lookup searches the list in order, and we
3291 want a field name to override a type name so that the "struct
3292 stat hack" will work. In particular:
3293
3294 struct S { enum E { }; static const int E = 5; int ary[S::E]; } s;
3295
3296 is valid. */
3297
3298 if (TREE_CODE (decl) == TYPE_DECL)
3299 TYPE_FIELDS (current_class_type)
3300 = chainon (TYPE_FIELDS (current_class_type), decl);
3301 else
3302 {
3303 DECL_CHAIN (decl) = TYPE_FIELDS (current_class_type);
3304 TYPE_FIELDS (current_class_type) = decl;
3305 }
3306
3307 maybe_add_class_template_decl_list (current_class_type, decl,
3308 /*friend_p=*/0);
3309 }
3310 }
3311
3312 /* Finish processing a complete template declaration. The PARMS are
3313 the template parameters. */
3314
3315 void
3316 finish_template_decl (tree parms)
3317 {
3318 if (parms)
3319 end_template_decl ();
3320 else
3321 end_specialization ();
3322 }
3323
3324 // Returns the template type of the class scope being entered. If we're
3325 // entering a constrained class scope. TYPE is the class template
3326 // scope being entered and we may need to match the intended type with
3327 // a constrained specialization. For example:
3328 //
3329 // template<Object T>
3330 // struct S { void f(); }; #1
3331 //
3332 // template<Object T>
3333 // void S<T>::f() { } #2
3334 //
3335 // We check, in #2, that S<T> refers precisely to the type declared by
3336 // #1 (i.e., that the constraints match). Note that the following should
3337 // be an error since there is no specialization of S<T> that is
3338 // unconstrained, but this is not diagnosed here.
3339 //
3340 // template<typename T>
3341 // void S<T>::f() { }
3342 //
3343 // We cannot diagnose this problem here since this function also matches
3344 // qualified template names that are not part of a definition. For example:
3345 //
3346 // template<Integral T, Floating_point U>
3347 // typename pair<T, U>::first_type void f(T, U);
3348 //
3349 // Here, it is unlikely that there is a partial specialization of
3350 // pair constrained for for Integral and Floating_point arguments.
3351 //
3352 // The general rule is: if a constrained specialization with matching
3353 // constraints is found return that type. Also note that if TYPE is not a
3354 // class-type (e.g. a typename type), then no fixup is needed.
3355
3356 static tree
3357 fixup_template_type (tree type)
3358 {
3359 // Find the template parameter list at the a depth appropriate to
3360 // the scope we're trying to enter.
3361 tree parms = current_template_parms;
3362 int depth = template_class_depth (type);
3363 for (int n = processing_template_decl; n > depth && parms; --n)
3364 parms = TREE_CHAIN (parms);
3365 if (!parms)
3366 return type;
3367 tree cur_reqs = TEMPLATE_PARMS_CONSTRAINTS (parms);
3368 tree cur_constr = build_constraints (cur_reqs, NULL_TREE);
3369
3370 // Search for a specialization whose type and constraints match.
3371 tree tmpl = CLASSTYPE_TI_TEMPLATE (type);
3372 tree specs = DECL_TEMPLATE_SPECIALIZATIONS (tmpl);
3373 while (specs)
3374 {
3375 tree spec_constr = get_constraints (TREE_VALUE (specs));
3376
3377 // If the type and constraints match a specialization, then we
3378 // are entering that type.
3379 if (same_type_p (type, TREE_TYPE (specs))
3380 && equivalent_constraints (cur_constr, spec_constr))
3381 return TREE_TYPE (specs);
3382 specs = TREE_CHAIN (specs);
3383 }
3384
3385 // If no specialization matches, then must return the type
3386 // previously found.
3387 return type;
3388 }
3389
3390 /* Finish processing a template-id (which names a type) of the form
3391 NAME < ARGS >. Return the TYPE_DECL for the type named by the
3392 template-id. If ENTERING_SCOPE is nonzero we are about to enter
3393 the scope of template-id indicated. */
3394
3395 tree
3396 finish_template_type (tree name, tree args, int entering_scope)
3397 {
3398 tree type;
3399
3400 type = lookup_template_class (name, args,
3401 NULL_TREE, NULL_TREE, entering_scope,
3402 tf_warning_or_error | tf_user);
3403
3404 /* If we might be entering the scope of a partial specialization,
3405 find the one with the right constraints. */
3406 if (flag_concepts
3407 && entering_scope
3408 && CLASS_TYPE_P (type)
3409 && CLASSTYPE_TEMPLATE_INFO (type)
3410 && dependent_type_p (type)
3411 && PRIMARY_TEMPLATE_P (CLASSTYPE_TI_TEMPLATE (type)))
3412 type = fixup_template_type (type);
3413
3414 if (type == error_mark_node)
3415 return type;
3416 else if (CLASS_TYPE_P (type) && !alias_type_or_template_p (type))
3417 return TYPE_STUB_DECL (type);
3418 else
3419 return TYPE_NAME (type);
3420 }
3421
3422 /* Finish processing a BASE_CLASS with the indicated ACCESS_SPECIFIER.
3423 Return a TREE_LIST containing the ACCESS_SPECIFIER and the
3424 BASE_CLASS, or NULL_TREE if an error occurred. The
3425 ACCESS_SPECIFIER is one of
3426 access_{default,public,protected_private}_node. For a virtual base
3427 we set TREE_TYPE. */
3428
3429 tree
3430 finish_base_specifier (tree base, tree access, bool virtual_p)
3431 {
3432 tree result;
3433
3434 if (base == error_mark_node)
3435 {
3436 error ("invalid base-class specification");
3437 result = NULL_TREE;
3438 }
3439 else if (! MAYBE_CLASS_TYPE_P (base))
3440 {
3441 error ("%qT is not a class type", base);
3442 result = NULL_TREE;
3443 }
3444 else
3445 {
3446 if (cp_type_quals (base) != 0)
3447 {
3448 /* DR 484: Can a base-specifier name a cv-qualified
3449 class type? */
3450 base = TYPE_MAIN_VARIANT (base);
3451 }
3452 result = build_tree_list (access, base);
3453 if (virtual_p)
3454 TREE_TYPE (result) = integer_type_node;
3455 }
3456
3457 return result;
3458 }
3459
3460 /* If FNS is a member function, a set of member functions, or a
3461 template-id referring to one or more member functions, return a
3462 BASELINK for FNS, incorporating the current access context.
3463 Otherwise, return FNS unchanged. */
3464
3465 tree
3466 baselink_for_fns (tree fns)
3467 {
3468 tree scope;
3469 tree cl;
3470
3471 if (BASELINK_P (fns)
3472 || error_operand_p (fns))
3473 return fns;
3474
3475 scope = ovl_scope (fns);
3476 if (!CLASS_TYPE_P (scope))
3477 return fns;
3478
3479 cl = currently_open_derived_class (scope);
3480 if (!cl)
3481 cl = scope;
3482 cl = TYPE_BINFO (cl);
3483 return build_baselink (cl, cl, fns, /*optype=*/NULL_TREE);
3484 }
3485
3486 /* Returns true iff DECL is a variable from a function outside
3487 the current one. */
3488
3489 static bool
3490 outer_var_p (tree decl)
3491 {
3492 return ((VAR_P (decl) || TREE_CODE (decl) == PARM_DECL)
3493 && DECL_FUNCTION_SCOPE_P (decl)
3494 /* Don't get confused by temporaries. */
3495 && DECL_NAME (decl)
3496 && (DECL_CONTEXT (decl) != current_function_decl
3497 || parsing_nsdmi ()));
3498 }
3499
3500 /* As above, but also checks that DECL is automatic. */
3501
3502 bool
3503 outer_automatic_var_p (tree decl)
3504 {
3505 return (outer_var_p (decl)
3506 && !TREE_STATIC (decl));
3507 }
3508
3509 /* DECL satisfies outer_automatic_var_p. Possibly complain about it or
3510 rewrite it for lambda capture.
3511
3512 If ODR_USE is true, we're being called from mark_use, and we complain about
3513 use of constant variables. If ODR_USE is false, we're being called for the
3514 id-expression, and we do lambda capture. */
3515
3516 tree
3517 process_outer_var_ref (tree decl, tsubst_flags_t complain, bool odr_use)
3518 {
3519 if (cp_unevaluated_operand)
3520 /* It's not a use (3.2) if we're in an unevaluated context. */
3521 return decl;
3522 if (decl == error_mark_node)
3523 return decl;
3524
3525 tree context = DECL_CONTEXT (decl);
3526 tree containing_function = current_function_decl;
3527 tree lambda_stack = NULL_TREE;
3528 tree lambda_expr = NULL_TREE;
3529 tree initializer = convert_from_reference (decl);
3530
3531 /* Mark it as used now even if the use is ill-formed. */
3532 if (!mark_used (decl, complain))
3533 return error_mark_node;
3534
3535 if (parsing_nsdmi ())
3536 containing_function = NULL_TREE;
3537
3538 if (containing_function && LAMBDA_FUNCTION_P (containing_function))
3539 {
3540 /* Check whether we've already built a proxy. */
3541 tree var = decl;
3542 while (is_normal_capture_proxy (var))
3543 var = DECL_CAPTURED_VARIABLE (var);
3544 tree d = retrieve_local_specialization (var);
3545
3546 if (d && d != decl && is_capture_proxy (d))
3547 {
3548 if (DECL_CONTEXT (d) == containing_function)
3549 /* We already have an inner proxy. */
3550 return d;
3551 else
3552 /* We need to capture an outer proxy. */
3553 return process_outer_var_ref (d, complain, odr_use);
3554 }
3555 }
3556
3557 /* If we are in a lambda function, we can move out until we hit
3558 1. the context,
3559 2. a non-lambda function, or
3560 3. a non-default capturing lambda function. */
3561 while (context != containing_function
3562 /* containing_function can be null with invalid generic lambdas. */
3563 && containing_function
3564 && LAMBDA_FUNCTION_P (containing_function))
3565 {
3566 tree closure = DECL_CONTEXT (containing_function);
3567 lambda_expr = CLASSTYPE_LAMBDA_EXPR (closure);
3568
3569 if (TYPE_CLASS_SCOPE_P (closure))
3570 /* A lambda in an NSDMI (c++/64496). */
3571 break;
3572
3573 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr) == CPLD_NONE)
3574 break;
3575
3576 lambda_stack = tree_cons (NULL_TREE, lambda_expr, lambda_stack);
3577
3578 containing_function = decl_function_context (containing_function);
3579 }
3580
3581 /* In a lambda within a template, wait until instantiation time to implicitly
3582 capture a parameter pack. We want to wait because we don't know if we're
3583 capturing the whole pack or a single element, and it's OK to wait because
3584 find_parameter_packs_r walks into the lambda body. */
3585 if (context == containing_function
3586 && DECL_PACK_P (decl))
3587 return decl;
3588
3589 if (lambda_expr && VAR_P (decl) && DECL_ANON_UNION_VAR_P (decl))
3590 {
3591 if (complain & tf_error)
3592 error ("cannot capture member %qD of anonymous union", decl);
3593 return error_mark_node;
3594 }
3595 /* Do lambda capture when processing the id-expression, not when
3596 odr-using a variable. */
3597 if (!odr_use && context == containing_function)
3598 decl = add_default_capture (lambda_stack,
3599 /*id=*/DECL_NAME (decl), initializer);
3600 /* Only an odr-use of an outer automatic variable causes an
3601 error, and a constant variable can decay to a prvalue
3602 constant without odr-use. So don't complain yet. */
3603 else if (!odr_use && decl_constant_var_p (decl))
3604 return decl;
3605 else if (lambda_expr)
3606 {
3607 if (complain & tf_error)
3608 {
3609 error ("%qD is not captured", decl);
3610 tree closure = LAMBDA_EXPR_CLOSURE (lambda_expr);
3611 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr) == CPLD_NONE)
3612 inform (location_of (closure),
3613 "the lambda has no capture-default");
3614 else if (TYPE_CLASS_SCOPE_P (closure))
3615 inform (UNKNOWN_LOCATION, "lambda in local class %q+T cannot "
3616 "capture variables from the enclosing context",
3617 TYPE_CONTEXT (closure));
3618 inform (DECL_SOURCE_LOCATION (decl), "%q#D declared here", decl);
3619 }
3620 return error_mark_node;
3621 }
3622 else
3623 {
3624 if (complain & tf_error)
3625 {
3626 error (VAR_P (decl)
3627 ? G_("use of local variable with automatic storage from "
3628 "containing function")
3629 : G_("use of parameter from containing function"));
3630 inform (DECL_SOURCE_LOCATION (decl), "%q#D declared here", decl);
3631 }
3632 return error_mark_node;
3633 }
3634 return decl;
3635 }
3636
3637 /* ID_EXPRESSION is a representation of parsed, but unprocessed,
3638 id-expression. (See cp_parser_id_expression for details.) SCOPE,
3639 if non-NULL, is the type or namespace used to explicitly qualify
3640 ID_EXPRESSION. DECL is the entity to which that name has been
3641 resolved.
3642
3643 *CONSTANT_EXPRESSION_P is true if we are presently parsing a
3644 constant-expression. In that case, *NON_CONSTANT_EXPRESSION_P will
3645 be set to true if this expression isn't permitted in a
3646 constant-expression, but it is otherwise not set by this function.
3647 *ALLOW_NON_CONSTANT_EXPRESSION_P is true if we are parsing a
3648 constant-expression, but a non-constant expression is also
3649 permissible.
3650
3651 DONE is true if this expression is a complete postfix-expression;
3652 it is false if this expression is followed by '->', '[', '(', etc.
3653 ADDRESS_P is true iff this expression is the operand of '&'.
3654 TEMPLATE_P is true iff the qualified-id was of the form
3655 "A::template B". TEMPLATE_ARG_P is true iff this qualified name
3656 appears as a template argument.
3657
3658 If an error occurs, and it is the kind of error that might cause
3659 the parser to abort a tentative parse, *ERROR_MSG is filled in. It
3660 is the caller's responsibility to issue the message. *ERROR_MSG
3661 will be a string with static storage duration, so the caller need
3662 not "free" it.
3663
3664 Return an expression for the entity, after issuing appropriate
3665 diagnostics. This function is also responsible for transforming a
3666 reference to a non-static member into a COMPONENT_REF that makes
3667 the use of "this" explicit.
3668
3669 Upon return, *IDK will be filled in appropriately. */
3670 static cp_expr
3671 finish_id_expression_1 (tree id_expression,
3672 tree decl,
3673 tree scope,
3674 cp_id_kind *idk,
3675 bool integral_constant_expression_p,
3676 bool allow_non_integral_constant_expression_p,
3677 bool *non_integral_constant_expression_p,
3678 bool template_p,
3679 bool done,
3680 bool address_p,
3681 bool template_arg_p,
3682 const char **error_msg,
3683 location_t location)
3684 {
3685 decl = strip_using_decl (decl);
3686
3687 /* Initialize the output parameters. */
3688 *idk = CP_ID_KIND_NONE;
3689 *error_msg = NULL;
3690
3691 if (id_expression == error_mark_node)
3692 return error_mark_node;
3693 /* If we have a template-id, then no further lookup is
3694 required. If the template-id was for a template-class, we
3695 will sometimes have a TYPE_DECL at this point. */
3696 else if (TREE_CODE (decl) == TEMPLATE_ID_EXPR
3697 || TREE_CODE (decl) == TYPE_DECL)
3698 ;
3699 /* Look up the name. */
3700 else
3701 {
3702 if (decl == error_mark_node)
3703 {
3704 /* Name lookup failed. */
3705 if (scope
3706 && (!TYPE_P (scope)
3707 || (!dependent_type_p (scope)
3708 && !(identifier_p (id_expression)
3709 && IDENTIFIER_CONV_OP_P (id_expression)
3710 && dependent_type_p (TREE_TYPE (id_expression))))))
3711 {
3712 /* If the qualifying type is non-dependent (and the name
3713 does not name a conversion operator to a dependent
3714 type), issue an error. */
3715 qualified_name_lookup_error (scope, id_expression, decl, location);
3716 return error_mark_node;
3717 }
3718 else if (!scope)
3719 {
3720 /* It may be resolved via Koenig lookup. */
3721 *idk = CP_ID_KIND_UNQUALIFIED;
3722 return id_expression;
3723 }
3724 else
3725 decl = id_expression;
3726 }
3727
3728 /* Remember that the name was used in the definition of
3729 the current class so that we can check later to see if
3730 the meaning would have been different after the class
3731 was entirely defined. */
3732 if (!scope && decl != error_mark_node && identifier_p (id_expression))
3733 maybe_note_name_used_in_class (id_expression, decl);
3734
3735 /* A use in unevaluated operand might not be instantiated appropriately
3736 if tsubst_copy builds a dummy parm, or if we never instantiate a
3737 generic lambda, so mark it now. */
3738 if (processing_template_decl && cp_unevaluated_operand)
3739 mark_type_use (decl);
3740
3741 /* Disallow uses of local variables from containing functions, except
3742 within lambda-expressions. */
3743 if (outer_automatic_var_p (decl))
3744 {
3745 decl = process_outer_var_ref (decl, tf_warning_or_error);
3746 if (decl == error_mark_node)
3747 return error_mark_node;
3748 }
3749
3750 /* Also disallow uses of function parameters outside the function
3751 body, except inside an unevaluated context (i.e. decltype). */
3752 if (TREE_CODE (decl) == PARM_DECL
3753 && DECL_CONTEXT (decl) == NULL_TREE
3754 && !cp_unevaluated_operand)
3755 {
3756 *error_msg = G_("use of parameter outside function body");
3757 return error_mark_node;
3758 }
3759 }
3760
3761 /* If we didn't find anything, or what we found was a type,
3762 then this wasn't really an id-expression. */
3763 if (TREE_CODE (decl) == TEMPLATE_DECL
3764 && !DECL_FUNCTION_TEMPLATE_P (decl))
3765 {
3766 *error_msg = G_("missing template arguments");
3767 return error_mark_node;
3768 }
3769 else if (TREE_CODE (decl) == TYPE_DECL
3770 || TREE_CODE (decl) == NAMESPACE_DECL)
3771 {
3772 *error_msg = G_("expected primary-expression");
3773 return error_mark_node;
3774 }
3775
3776 /* If the name resolved to a template parameter, there is no
3777 need to look it up again later. */
3778 if ((TREE_CODE (decl) == CONST_DECL && DECL_TEMPLATE_PARM_P (decl))
3779 || TREE_CODE (decl) == TEMPLATE_PARM_INDEX)
3780 {
3781 tree r;
3782
3783 *idk = CP_ID_KIND_NONE;
3784 if (TREE_CODE (decl) == TEMPLATE_PARM_INDEX)
3785 decl = TEMPLATE_PARM_DECL (decl);
3786 r = DECL_INITIAL (decl);
3787 if (CLASS_TYPE_P (TREE_TYPE (r)) && !CP_TYPE_CONST_P (TREE_TYPE (r)))
3788 {
3789 /* If the entity is a template parameter object for a template
3790 parameter of type T, the type of the expression is const T. */
3791 tree ctype = TREE_TYPE (r);
3792 ctype = cp_build_qualified_type (ctype, (cp_type_quals (ctype)
3793 | TYPE_QUAL_CONST));
3794 r = build1 (VIEW_CONVERT_EXPR, ctype, r);
3795 }
3796 r = convert_from_reference (r);
3797 if (integral_constant_expression_p
3798 && !dependent_type_p (TREE_TYPE (decl))
3799 && !(INTEGRAL_OR_ENUMERATION_TYPE_P (TREE_TYPE (r))))
3800 {
3801 if (!allow_non_integral_constant_expression_p)
3802 error ("template parameter %qD of type %qT is not allowed in "
3803 "an integral constant expression because it is not of "
3804 "integral or enumeration type", decl, TREE_TYPE (decl));
3805 *non_integral_constant_expression_p = true;
3806 }
3807 return r;
3808 }
3809 else
3810 {
3811 bool dependent_p = type_dependent_expression_p (decl);
3812
3813 /* If the declaration was explicitly qualified indicate
3814 that. The semantics of `A::f(3)' are different than
3815 `f(3)' if `f' is virtual. */
3816 *idk = (scope
3817 ? CP_ID_KIND_QUALIFIED
3818 : (TREE_CODE (decl) == TEMPLATE_ID_EXPR
3819 ? CP_ID_KIND_TEMPLATE_ID
3820 : (dependent_p
3821 ? CP_ID_KIND_UNQUALIFIED_DEPENDENT
3822 : CP_ID_KIND_UNQUALIFIED)));
3823
3824 if (dependent_p
3825 && DECL_P (decl)
3826 && any_dependent_type_attributes_p (DECL_ATTRIBUTES (decl)))
3827 /* Dependent type attributes on the decl mean that the TREE_TYPE is
3828 wrong, so just return the identifier. */
3829 return id_expression;
3830
3831 if (DECL_CLASS_TEMPLATE_P (decl))
3832 {
3833 error ("use of class template %qT as expression", decl);
3834 return error_mark_node;
3835 }
3836
3837 if (TREE_CODE (decl) == TREE_LIST)
3838 {
3839 /* Ambiguous reference to base members. */
3840 error ("request for member %qD is ambiguous in "
3841 "multiple inheritance lattice", id_expression);
3842 print_candidates (decl);
3843 return error_mark_node;
3844 }
3845
3846 /* Mark variable-like entities as used. Functions are similarly
3847 marked either below or after overload resolution. */
3848 if ((VAR_P (decl)
3849 || TREE_CODE (decl) == PARM_DECL
3850 || TREE_CODE (decl) == CONST_DECL
3851 || TREE_CODE (decl) == RESULT_DECL)
3852 && !mark_used (decl))
3853 return error_mark_node;
3854
3855 /* Only certain kinds of names are allowed in constant
3856 expression. Template parameters have already
3857 been handled above. */
3858 if (! error_operand_p (decl)
3859 && !dependent_p
3860 && integral_constant_expression_p
3861 && !decl_constant_var_p (decl)
3862 && TREE_CODE (decl) != CONST_DECL
3863 && !builtin_valid_in_constant_expr_p (decl)
3864 && !concept_check_p (decl))
3865 {
3866 if (!allow_non_integral_constant_expression_p)
3867 {
3868 error ("%qD cannot appear in a constant-expression", decl);
3869 return error_mark_node;
3870 }
3871 *non_integral_constant_expression_p = true;
3872 }
3873
3874 if (tree wrap = maybe_get_tls_wrapper_call (decl))
3875 /* Replace an evaluated use of the thread_local variable with
3876 a call to its wrapper. */
3877 decl = wrap;
3878 else if (TREE_CODE (decl) == TEMPLATE_ID_EXPR
3879 && !dependent_p
3880 && variable_template_p (TREE_OPERAND (decl, 0))
3881 && !concept_check_p (decl))
3882 {
3883 decl = finish_template_variable (decl);
3884 mark_used (decl);
3885 decl = convert_from_reference (decl);
3886 }
3887 else if (concept_check_p (decl))
3888 {
3889 /* Nothing more to do. All of the analysis for concept checks
3890 is done by build_conept_id, called from the parser. */
3891 }
3892 else if (scope)
3893 {
3894 if (TREE_CODE (decl) == SCOPE_REF)
3895 {
3896 gcc_assert (same_type_p (scope, TREE_OPERAND (decl, 0)));
3897 decl = TREE_OPERAND (decl, 1);
3898 }
3899
3900 decl = (adjust_result_of_qualified_name_lookup
3901 (decl, scope, current_nonlambda_class_type()));
3902
3903 if (TREE_CODE (decl) == FUNCTION_DECL)
3904 mark_used (decl);
3905
3906 cp_warn_deprecated_use_scopes (scope);
3907
3908 if (TYPE_P (scope))
3909 decl = finish_qualified_id_expr (scope,
3910 decl,
3911 done,
3912 address_p,
3913 template_p,
3914 template_arg_p,
3915 tf_warning_or_error);
3916 else
3917 decl = convert_from_reference (decl);
3918 }
3919 else if (TREE_CODE (decl) == FIELD_DECL)
3920 {
3921 /* Since SCOPE is NULL here, this is an unqualified name.
3922 Access checking has been performed during name lookup
3923 already. Turn off checking to avoid duplicate errors. */
3924 push_deferring_access_checks (dk_no_check);
3925 decl = finish_non_static_data_member (decl, NULL_TREE,
3926 /*qualifying_scope=*/NULL_TREE);
3927 pop_deferring_access_checks ();
3928 }
3929 else if (is_overloaded_fn (decl))
3930 {
3931 /* We only need to look at the first function,
3932 because all the fns share the attribute we're
3933 concerned with (all member fns or all non-members). */
3934 tree first_fn = get_first_fn (decl);
3935 first_fn = STRIP_TEMPLATE (first_fn);
3936
3937 /* [basic.def.odr]: "A function whose name appears as a
3938 potentially-evaluated expression is odr-used if it is the unique
3939 lookup result".
3940
3941 But only mark it if it's a complete postfix-expression; in a call,
3942 ADL might select a different function, and we'll call mark_used in
3943 build_over_call. */
3944 if (done
3945 && !really_overloaded_fn (decl)
3946 && !mark_used (first_fn))
3947 return error_mark_node;
3948
3949 if (!template_arg_p
3950 && (TREE_CODE (first_fn) == USING_DECL
3951 || (TREE_CODE (first_fn) == FUNCTION_DECL
3952 && DECL_FUNCTION_MEMBER_P (first_fn)
3953 && !shared_member_p (decl))))
3954 {
3955 /* A set of member functions. */
3956 decl = maybe_dummy_object (DECL_CONTEXT (first_fn), 0);
3957 return finish_class_member_access_expr (decl, id_expression,
3958 /*template_p=*/false,
3959 tf_warning_or_error);
3960 }
3961
3962 decl = baselink_for_fns (decl);
3963 }
3964 else
3965 {
3966 if (DECL_P (decl) && DECL_NONLOCAL (decl)
3967 && DECL_CLASS_SCOPE_P (decl))
3968 {
3969 tree context = context_for_name_lookup (decl);
3970 if (context != current_class_type)
3971 {
3972 tree path = currently_open_derived_class (context);
3973 perform_or_defer_access_check (TYPE_BINFO (path),
3974 decl, decl,
3975 tf_warning_or_error);
3976 }
3977 }
3978
3979 decl = convert_from_reference (decl);
3980 }
3981 }
3982
3983 return cp_expr (decl, location);
3984 }
3985
3986 /* As per finish_id_expression_1, but adding a wrapper node
3987 around the result if needed to express LOCATION. */
3988
3989 cp_expr
3990 finish_id_expression (tree id_expression,
3991 tree decl,
3992 tree scope,
3993 cp_id_kind *idk,
3994 bool integral_constant_expression_p,
3995 bool allow_non_integral_constant_expression_p,
3996 bool *non_integral_constant_expression_p,
3997 bool template_p,
3998 bool done,
3999 bool address_p,
4000 bool template_arg_p,
4001 const char **error_msg,
4002 location_t location)
4003 {
4004 cp_expr result
4005 = finish_id_expression_1 (id_expression, decl, scope, idk,
4006 integral_constant_expression_p,
4007 allow_non_integral_constant_expression_p,
4008 non_integral_constant_expression_p,
4009 template_p, done, address_p, template_arg_p,
4010 error_msg, location);
4011 return result.maybe_add_location_wrapper ();
4012 }
4013
4014 /* Implement the __typeof keyword: Return the type of EXPR, suitable for
4015 use as a type-specifier. */
4016
4017 tree
4018 finish_typeof (tree expr)
4019 {
4020 tree type;
4021
4022 if (type_dependent_expression_p (expr))
4023 {
4024 type = cxx_make_type (TYPEOF_TYPE);
4025 TYPEOF_TYPE_EXPR (type) = expr;
4026 SET_TYPE_STRUCTURAL_EQUALITY (type);
4027
4028 return type;
4029 }
4030
4031 expr = mark_type_use (expr);
4032
4033 type = unlowered_expr_type (expr);
4034
4035 if (!type || type == unknown_type_node)
4036 {
4037 error ("type of %qE is unknown", expr);
4038 return error_mark_node;
4039 }
4040
4041 return type;
4042 }
4043
4044 /* Implement the __underlying_type keyword: Return the underlying
4045 type of TYPE, suitable for use as a type-specifier. */
4046
4047 tree
4048 finish_underlying_type (tree type)
4049 {
4050 tree underlying_type;
4051
4052 if (processing_template_decl)
4053 {
4054 underlying_type = cxx_make_type (UNDERLYING_TYPE);
4055 UNDERLYING_TYPE_TYPE (underlying_type) = type;
4056 SET_TYPE_STRUCTURAL_EQUALITY (underlying_type);
4057
4058 return underlying_type;
4059 }
4060
4061 if (!complete_type_or_else (type, NULL_TREE))
4062 return error_mark_node;
4063
4064 if (TREE_CODE (type) != ENUMERAL_TYPE)
4065 {
4066 error ("%qT is not an enumeration type", type);
4067 return error_mark_node;
4068 }
4069
4070 underlying_type = ENUM_UNDERLYING_TYPE (type);
4071
4072 /* Fixup necessary in this case because ENUM_UNDERLYING_TYPE
4073 includes TYPE_MIN_VALUE and TYPE_MAX_VALUE information.
4074 See finish_enum_value_list for details. */
4075 if (!ENUM_FIXED_UNDERLYING_TYPE_P (type))
4076 underlying_type
4077 = c_common_type_for_mode (TYPE_MODE (underlying_type),
4078 TYPE_UNSIGNED (underlying_type));
4079
4080 return underlying_type;
4081 }
4082
4083 /* Implement the __direct_bases keyword: Return the direct base classes
4084 of type. */
4085
4086 tree
4087 calculate_direct_bases (tree type, tsubst_flags_t complain)
4088 {
4089 if (!complete_type_or_maybe_complain (type, NULL_TREE, complain)
4090 || !NON_UNION_CLASS_TYPE_P (type))
4091 return make_tree_vec (0);
4092
4093 releasing_vec vector;
4094 vec<tree, va_gc> *base_binfos = BINFO_BASE_BINFOS (TYPE_BINFO (type));
4095 tree binfo;
4096 unsigned i;
4097
4098 /* Virtual bases are initialized first */
4099 for (i = 0; base_binfos->iterate (i, &binfo); i++)
4100 if (BINFO_VIRTUAL_P (binfo))
4101 vec_safe_push (vector, binfo);
4102
4103 /* Now non-virtuals */
4104 for (i = 0; base_binfos->iterate (i, &binfo); i++)
4105 if (!BINFO_VIRTUAL_P (binfo))
4106 vec_safe_push (vector, binfo);
4107
4108 tree bases_vec = make_tree_vec (vector->length ());
4109
4110 for (i = 0; i < vector->length (); ++i)
4111 TREE_VEC_ELT (bases_vec, i) = BINFO_TYPE ((*vector)[i]);
4112
4113 return bases_vec;
4114 }
4115
4116 /* Implement the __bases keyword: Return the base classes
4117 of type */
4118
4119 /* Find morally non-virtual base classes by walking binfo hierarchy */
4120 /* Virtual base classes are handled separately in finish_bases */
4121
4122 static tree
4123 dfs_calculate_bases_pre (tree binfo, void * /*data_*/)
4124 {
4125 /* Don't walk bases of virtual bases */
4126 return BINFO_VIRTUAL_P (binfo) ? dfs_skip_bases : NULL_TREE;
4127 }
4128
4129 static tree
4130 dfs_calculate_bases_post (tree binfo, void *data_)
4131 {
4132 vec<tree, va_gc> **data = ((vec<tree, va_gc> **) data_);
4133 if (!BINFO_VIRTUAL_P (binfo))
4134 vec_safe_push (*data, BINFO_TYPE (binfo));
4135 return NULL_TREE;
4136 }
4137
4138 /* Calculates the morally non-virtual base classes of a class */
4139 static vec<tree, va_gc> *
4140 calculate_bases_helper (tree type)
4141 {
4142 vec<tree, va_gc> *vector = make_tree_vector ();
4143
4144 /* Now add non-virtual base classes in order of construction */
4145 if (TYPE_BINFO (type))
4146 dfs_walk_all (TYPE_BINFO (type),
4147 dfs_calculate_bases_pre, dfs_calculate_bases_post, &vector);
4148 return vector;
4149 }
4150
4151 tree
4152 calculate_bases (tree type, tsubst_flags_t complain)
4153 {
4154 if (!complete_type_or_maybe_complain (type, NULL_TREE, complain)
4155 || !NON_UNION_CLASS_TYPE_P (type))
4156 return make_tree_vec (0);
4157
4158 releasing_vec vector;
4159 tree bases_vec = NULL_TREE;
4160 unsigned i;
4161 vec<tree, va_gc> *vbases;
4162 tree binfo;
4163
4164 /* First go through virtual base classes */
4165 for (vbases = CLASSTYPE_VBASECLASSES (type), i = 0;
4166 vec_safe_iterate (vbases, i, &binfo); i++)
4167 {
4168 releasing_vec vbase_bases
4169 = calculate_bases_helper (BINFO_TYPE (binfo));
4170 vec_safe_splice (vector, vbase_bases);
4171 }
4172
4173 /* Now for the non-virtual bases */
4174 releasing_vec nonvbases = calculate_bases_helper (type);
4175 vec_safe_splice (vector, nonvbases);
4176
4177 /* Note that during error recovery vector->length can even be zero. */
4178 if (vector->length () > 1)
4179 {
4180 /* Last element is entire class, so don't copy */
4181 bases_vec = make_tree_vec (vector->length () - 1);
4182
4183 for (i = 0; i < vector->length () - 1; ++i)
4184 TREE_VEC_ELT (bases_vec, i) = (*vector)[i];
4185 }
4186 else
4187 bases_vec = make_tree_vec (0);
4188
4189 return bases_vec;
4190 }
4191
4192 tree
4193 finish_bases (tree type, bool direct)
4194 {
4195 tree bases = NULL_TREE;
4196
4197 if (!processing_template_decl)
4198 {
4199 /* Parameter packs can only be used in templates */
4200 error ("parameter pack %<__bases%> only valid in template declaration");
4201 return error_mark_node;
4202 }
4203
4204 bases = cxx_make_type (BASES);
4205 BASES_TYPE (bases) = type;
4206 BASES_DIRECT (bases) = direct;
4207 SET_TYPE_STRUCTURAL_EQUALITY (bases);
4208
4209 return bases;
4210 }
4211
4212 /* Perform C++-specific checks for __builtin_offsetof before calling
4213 fold_offsetof. */
4214
4215 tree
4216 finish_offsetof (tree object_ptr, tree expr, location_t loc)
4217 {
4218 /* If we're processing a template, we can't finish the semantics yet.
4219 Otherwise we can fold the entire expression now. */
4220 if (processing_template_decl)
4221 {
4222 expr = build2 (OFFSETOF_EXPR, size_type_node, expr, object_ptr);
4223 SET_EXPR_LOCATION (expr, loc);
4224 return expr;
4225 }
4226
4227 if (expr == error_mark_node)
4228 return error_mark_node;
4229
4230 if (TREE_CODE (expr) == PSEUDO_DTOR_EXPR)
4231 {
4232 error ("cannot apply %<offsetof%> to destructor %<~%T%>",
4233 TREE_OPERAND (expr, 2));
4234 return error_mark_node;
4235 }
4236 if (FUNC_OR_METHOD_TYPE_P (TREE_TYPE (expr))
4237 || TREE_TYPE (expr) == unknown_type_node)
4238 {
4239 while (TREE_CODE (expr) == COMPONENT_REF
4240 || TREE_CODE (expr) == COMPOUND_EXPR)
4241 expr = TREE_OPERAND (expr, 1);
4242
4243 if (DECL_P (expr))
4244 {
4245 error ("cannot apply %<offsetof%> to member function %qD", expr);
4246 inform (DECL_SOURCE_LOCATION (expr), "declared here");
4247 }
4248 else
4249 error ("cannot apply %<offsetof%> to member function");
4250 return error_mark_node;
4251 }
4252 if (TREE_CODE (expr) == CONST_DECL)
4253 {
4254 error ("cannot apply %<offsetof%> to an enumerator %qD", expr);
4255 return error_mark_node;
4256 }
4257 if (REFERENCE_REF_P (expr))
4258 expr = TREE_OPERAND (expr, 0);
4259 if (!complete_type_or_else (TREE_TYPE (TREE_TYPE (object_ptr)), object_ptr))
4260 return error_mark_node;
4261 if (warn_invalid_offsetof
4262 && CLASS_TYPE_P (TREE_TYPE (TREE_TYPE (object_ptr)))
4263 && CLASSTYPE_NON_STD_LAYOUT (TREE_TYPE (TREE_TYPE (object_ptr)))
4264 && cp_unevaluated_operand == 0)
4265 warning_at (loc, OPT_Winvalid_offsetof, "%<offsetof%> within "
4266 "non-standard-layout type %qT is conditionally-supported",
4267 TREE_TYPE (TREE_TYPE (object_ptr)));
4268 return fold_offsetof (expr);
4269 }
4270
4271 /* Replace the AGGR_INIT_EXPR at *TP with an equivalent CALL_EXPR. This
4272 function is broken out from the above for the benefit of the tree-ssa
4273 project. */
4274
4275 void
4276 simplify_aggr_init_expr (tree *tp)
4277 {
4278 tree aggr_init_expr = *tp;
4279
4280 /* Form an appropriate CALL_EXPR. */
4281 tree fn = AGGR_INIT_EXPR_FN (aggr_init_expr);
4282 tree slot = AGGR_INIT_EXPR_SLOT (aggr_init_expr);
4283 tree type = TREE_TYPE (slot);
4284
4285 tree call_expr;
4286 enum style_t { ctor, arg, pcc } style;
4287
4288 if (AGGR_INIT_VIA_CTOR_P (aggr_init_expr))
4289 style = ctor;
4290 #ifdef PCC_STATIC_STRUCT_RETURN
4291 else if (1)
4292 style = pcc;
4293 #endif
4294 else
4295 {
4296 gcc_assert (TREE_ADDRESSABLE (type));
4297 style = arg;
4298 }
4299
4300 call_expr = build_call_array_loc (input_location,
4301 TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))),
4302 fn,
4303 aggr_init_expr_nargs (aggr_init_expr),
4304 AGGR_INIT_EXPR_ARGP (aggr_init_expr));
4305 TREE_NOTHROW (call_expr) = TREE_NOTHROW (aggr_init_expr);
4306 CALL_FROM_THUNK_P (call_expr) = AGGR_INIT_FROM_THUNK_P (aggr_init_expr);
4307 CALL_EXPR_OPERATOR_SYNTAX (call_expr)
4308 = CALL_EXPR_OPERATOR_SYNTAX (aggr_init_expr);
4309 CALL_EXPR_ORDERED_ARGS (call_expr) = CALL_EXPR_ORDERED_ARGS (aggr_init_expr);
4310 CALL_EXPR_REVERSE_ARGS (call_expr) = CALL_EXPR_REVERSE_ARGS (aggr_init_expr);
4311
4312 if (style == ctor)
4313 {
4314 /* Replace the first argument to the ctor with the address of the
4315 slot. */
4316 cxx_mark_addressable (slot);
4317 CALL_EXPR_ARG (call_expr, 0) =
4318 build1 (ADDR_EXPR, build_pointer_type (type), slot);
4319 }
4320 else if (style == arg)
4321 {
4322 /* Just mark it addressable here, and leave the rest to
4323 expand_call{,_inline}. */
4324 cxx_mark_addressable (slot);
4325 CALL_EXPR_RETURN_SLOT_OPT (call_expr) = true;
4326 call_expr = build2 (INIT_EXPR, TREE_TYPE (call_expr), slot, call_expr);
4327 }
4328 else if (style == pcc)
4329 {
4330 /* If we're using the non-reentrant PCC calling convention, then we
4331 need to copy the returned value out of the static buffer into the
4332 SLOT. */
4333 push_deferring_access_checks (dk_no_check);
4334 call_expr = build_aggr_init (slot, call_expr,
4335 DIRECT_BIND | LOOKUP_ONLYCONVERTING,
4336 tf_warning_or_error);
4337 pop_deferring_access_checks ();
4338 call_expr = build2 (COMPOUND_EXPR, TREE_TYPE (slot), call_expr, slot);
4339 }
4340
4341 if (AGGR_INIT_ZERO_FIRST (aggr_init_expr))
4342 {
4343 tree init = build_zero_init (type, NULL_TREE,
4344 /*static_storage_p=*/false);
4345 init = build2 (INIT_EXPR, void_type_node, slot, init);
4346 call_expr = build2 (COMPOUND_EXPR, TREE_TYPE (call_expr),
4347 init, call_expr);
4348 }
4349
4350 *tp = call_expr;
4351 }
4352
4353 /* Emit all thunks to FN that should be emitted when FN is emitted. */
4354
4355 void
4356 emit_associated_thunks (tree fn)
4357 {
4358 /* When we use vcall offsets, we emit thunks with the virtual
4359 functions to which they thunk. The whole point of vcall offsets
4360 is so that you can know statically the entire set of thunks that
4361 will ever be needed for a given virtual function, thereby
4362 enabling you to output all the thunks with the function itself. */
4363 if (DECL_VIRTUAL_P (fn)
4364 /* Do not emit thunks for extern template instantiations. */
4365 && ! DECL_REALLY_EXTERN (fn))
4366 {
4367 tree thunk;
4368
4369 for (thunk = DECL_THUNKS (fn); thunk; thunk = DECL_CHAIN (thunk))
4370 {
4371 if (!THUNK_ALIAS (thunk))
4372 {
4373 use_thunk (thunk, /*emit_p=*/1);
4374 if (DECL_RESULT_THUNK_P (thunk))
4375 {
4376 tree probe;
4377
4378 for (probe = DECL_THUNKS (thunk);
4379 probe; probe = DECL_CHAIN (probe))
4380 use_thunk (probe, /*emit_p=*/1);
4381 }
4382 }
4383 else
4384 gcc_assert (!DECL_THUNKS (thunk));
4385 }
4386 }
4387 }
4388
4389 /* Generate RTL for FN. */
4390
4391 bool
4392 expand_or_defer_fn_1 (tree fn)
4393 {
4394 /* When the parser calls us after finishing the body of a template
4395 function, we don't really want to expand the body. */
4396 if (processing_template_decl)
4397 {
4398 /* Normally, collection only occurs in rest_of_compilation. So,
4399 if we don't collect here, we never collect junk generated
4400 during the processing of templates until we hit a
4401 non-template function. It's not safe to do this inside a
4402 nested class, though, as the parser may have local state that
4403 is not a GC root. */
4404 if (!function_depth)
4405 ggc_collect ();
4406 return false;
4407 }
4408
4409 gcc_assert (DECL_SAVED_TREE (fn));
4410
4411 /* We make a decision about linkage for these functions at the end
4412 of the compilation. Until that point, we do not want the back
4413 end to output them -- but we do want it to see the bodies of
4414 these functions so that it can inline them as appropriate. */
4415 if (DECL_DECLARED_INLINE_P (fn) || DECL_IMPLICIT_INSTANTIATION (fn))
4416 {
4417 if (DECL_INTERFACE_KNOWN (fn))
4418 /* We've already made a decision as to how this function will
4419 be handled. */;
4420 else if (!at_eof
4421 || DECL_IMMEDIATE_FUNCTION_P (fn)
4422 || DECL_OMP_DECLARE_REDUCTION_P (fn))
4423 tentative_decl_linkage (fn);
4424 else
4425 import_export_decl (fn);
4426
4427 /* If the user wants us to keep all inline functions, then mark
4428 this function as needed so that finish_file will make sure to
4429 output it later. Similarly, all dllexport'd functions must
4430 be emitted; there may be callers in other DLLs. */
4431 if (DECL_DECLARED_INLINE_P (fn)
4432 && !DECL_REALLY_EXTERN (fn)
4433 && !DECL_IMMEDIATE_FUNCTION_P (fn)
4434 && !DECL_OMP_DECLARE_REDUCTION_P (fn)
4435 && (flag_keep_inline_functions
4436 || (flag_keep_inline_dllexport
4437 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (fn)))))
4438 {
4439 mark_needed (fn);
4440 DECL_EXTERNAL (fn) = 0;
4441 }
4442 }
4443
4444 /* If this is a constructor or destructor body, we have to clone
4445 it. */
4446 if (maybe_clone_body (fn))
4447 {
4448 /* We don't want to process FN again, so pretend we've written
4449 it out, even though we haven't. */
4450 TREE_ASM_WRITTEN (fn) = 1;
4451 /* If this is a constexpr function, keep DECL_SAVED_TREE. */
4452 if (!DECL_DECLARED_CONSTEXPR_P (fn))
4453 DECL_SAVED_TREE (fn) = NULL_TREE;
4454 return false;
4455 }
4456
4457 /* There's no reason to do any of the work here if we're only doing
4458 semantic analysis; this code just generates RTL. */
4459 if (flag_syntax_only)
4460 {
4461 /* Pretend that this function has been written out so that we don't try
4462 to expand it again. */
4463 TREE_ASM_WRITTEN (fn) = 1;
4464 return false;
4465 }
4466
4467 if (DECL_OMP_DECLARE_REDUCTION_P (fn))
4468 return false;
4469
4470 return true;
4471 }
4472
4473 void
4474 expand_or_defer_fn (tree fn)
4475 {
4476 if (expand_or_defer_fn_1 (fn))
4477 {
4478 function_depth++;
4479
4480 /* Expand or defer, at the whim of the compilation unit manager. */
4481 cgraph_node::finalize_function (fn, function_depth > 1);
4482 emit_associated_thunks (fn);
4483
4484 function_depth--;
4485 }
4486 }
4487
4488 class nrv_data
4489 {
4490 public:
4491 nrv_data () : visited (37) {}
4492
4493 tree var;
4494 tree result;
4495 hash_table<nofree_ptr_hash <tree_node> > visited;
4496 };
4497
4498 /* Helper function for walk_tree, used by finalize_nrv below. */
4499
4500 static tree
4501 finalize_nrv_r (tree* tp, int* walk_subtrees, void* data)
4502 {
4503 class nrv_data *dp = (class nrv_data *)data;
4504 tree_node **slot;
4505
4506 /* No need to walk into types. There wouldn't be any need to walk into
4507 non-statements, except that we have to consider STMT_EXPRs. */
4508 if (TYPE_P (*tp))
4509 *walk_subtrees = 0;
4510 /* Change all returns to just refer to the RESULT_DECL; this is a nop,
4511 but differs from using NULL_TREE in that it indicates that we care
4512 about the value of the RESULT_DECL. */
4513 else if (TREE_CODE (*tp) == RETURN_EXPR)
4514 TREE_OPERAND (*tp, 0) = dp->result;
4515 /* Change all cleanups for the NRV to only run when an exception is
4516 thrown. */
4517 else if (TREE_CODE (*tp) == CLEANUP_STMT
4518 && CLEANUP_DECL (*tp) == dp->var)
4519 CLEANUP_EH_ONLY (*tp) = 1;
4520 /* Replace the DECL_EXPR for the NRV with an initialization of the
4521 RESULT_DECL, if needed. */
4522 else if (TREE_CODE (*tp) == DECL_EXPR
4523 && DECL_EXPR_DECL (*tp) == dp->var)
4524 {
4525 tree init;
4526 if (DECL_INITIAL (dp->var)
4527 && DECL_INITIAL (dp->var) != error_mark_node)
4528 init = build2 (INIT_EXPR, void_type_node, dp->result,
4529 DECL_INITIAL (dp->var));
4530 else
4531 init = build_empty_stmt (EXPR_LOCATION (*tp));
4532 DECL_INITIAL (dp->var) = NULL_TREE;
4533 SET_EXPR_LOCATION (init, EXPR_LOCATION (*tp));
4534 *tp = init;
4535 }
4536 /* And replace all uses of the NRV with the RESULT_DECL. */
4537 else if (*tp == dp->var)
4538 *tp = dp->result;
4539
4540 /* Avoid walking into the same tree more than once. Unfortunately, we
4541 can't just use walk_tree_without duplicates because it would only call
4542 us for the first occurrence of dp->var in the function body. */
4543 slot = dp->visited.find_slot (*tp, INSERT);
4544 if (*slot)
4545 *walk_subtrees = 0;
4546 else
4547 *slot = *tp;
4548
4549 /* Keep iterating. */
4550 return NULL_TREE;
4551 }
4552
4553 /* Called from finish_function to implement the named return value
4554 optimization by overriding all the RETURN_EXPRs and pertinent
4555 CLEANUP_STMTs and replacing all occurrences of VAR with RESULT, the
4556 RESULT_DECL for the function. */
4557
4558 void
4559 finalize_nrv (tree *tp, tree var, tree result)
4560 {
4561 class nrv_data data;
4562
4563 /* Copy name from VAR to RESULT. */
4564 DECL_NAME (result) = DECL_NAME (var);
4565 /* Don't forget that we take its address. */
4566 TREE_ADDRESSABLE (result) = TREE_ADDRESSABLE (var);
4567 /* Finally set DECL_VALUE_EXPR to avoid assigning
4568 a stack slot at -O0 for the original var and debug info
4569 uses RESULT location for VAR. */
4570 SET_DECL_VALUE_EXPR (var, result);
4571 DECL_HAS_VALUE_EXPR_P (var) = 1;
4572
4573 data.var = var;
4574 data.result = result;
4575 cp_walk_tree (tp, finalize_nrv_r, &data, 0);
4576 }
4577 \f
4578 /* Create CP_OMP_CLAUSE_INFO for clause C. Returns true if it is invalid. */
4579
4580 bool
4581 cxx_omp_create_clause_info (tree c, tree type, bool need_default_ctor,
4582 bool need_copy_ctor, bool need_copy_assignment,
4583 bool need_dtor)
4584 {
4585 int save_errorcount = errorcount;
4586 tree info, t;
4587
4588 /* Always allocate 3 elements for simplicity. These are the
4589 function decls for the ctor, dtor, and assignment op.
4590 This layout is known to the three lang hooks,
4591 cxx_omp_clause_default_init, cxx_omp_clause_copy_init,
4592 and cxx_omp_clause_assign_op. */
4593 info = make_tree_vec (3);
4594 CP_OMP_CLAUSE_INFO (c) = info;
4595
4596 if (need_default_ctor || need_copy_ctor)
4597 {
4598 if (need_default_ctor)
4599 t = get_default_ctor (type);
4600 else
4601 t = get_copy_ctor (type, tf_warning_or_error);
4602
4603 if (t && !trivial_fn_p (t))
4604 TREE_VEC_ELT (info, 0) = t;
4605 }
4606
4607 if (need_dtor && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type))
4608 TREE_VEC_ELT (info, 1) = get_dtor (type, tf_warning_or_error);
4609
4610 if (need_copy_assignment)
4611 {
4612 t = get_copy_assign (type);
4613
4614 if (t && !trivial_fn_p (t))
4615 TREE_VEC_ELT (info, 2) = t;
4616 }
4617
4618 return errorcount != save_errorcount;
4619 }
4620
4621 /* If DECL is DECL_OMP_PRIVATIZED_MEMBER, return corresponding
4622 FIELD_DECL, otherwise return DECL itself. */
4623
4624 static tree
4625 omp_clause_decl_field (tree decl)
4626 {
4627 if (VAR_P (decl)
4628 && DECL_HAS_VALUE_EXPR_P (decl)
4629 && DECL_ARTIFICIAL (decl)
4630 && DECL_LANG_SPECIFIC (decl)
4631 && DECL_OMP_PRIVATIZED_MEMBER (decl))
4632 {
4633 tree f = DECL_VALUE_EXPR (decl);
4634 if (INDIRECT_REF_P (f))
4635 f = TREE_OPERAND (f, 0);
4636 if (TREE_CODE (f) == COMPONENT_REF)
4637 {
4638 f = TREE_OPERAND (f, 1);
4639 gcc_assert (TREE_CODE (f) == FIELD_DECL);
4640 return f;
4641 }
4642 }
4643 return NULL_TREE;
4644 }
4645
4646 /* Adjust DECL if needed for printing using %qE. */
4647
4648 static tree
4649 omp_clause_printable_decl (tree decl)
4650 {
4651 tree t = omp_clause_decl_field (decl);
4652 if (t)
4653 return t;
4654 return decl;
4655 }
4656
4657 /* For a FIELD_DECL F and corresponding DECL_OMP_PRIVATIZED_MEMBER
4658 VAR_DECL T that doesn't need a DECL_EXPR added, record it for
4659 privatization. */
4660
4661 static void
4662 omp_note_field_privatization (tree f, tree t)
4663 {
4664 if (!omp_private_member_map)
4665 omp_private_member_map = new hash_map<tree, tree>;
4666 tree &v = omp_private_member_map->get_or_insert (f);
4667 if (v == NULL_TREE)
4668 {
4669 v = t;
4670 omp_private_member_vec.safe_push (f);
4671 /* Signal that we don't want to create DECL_EXPR for this dummy var. */
4672 omp_private_member_vec.safe_push (integer_zero_node);
4673 }
4674 }
4675
4676 /* Privatize FIELD_DECL T, return corresponding DECL_OMP_PRIVATIZED_MEMBER
4677 dummy VAR_DECL. */
4678
4679 tree
4680 omp_privatize_field (tree t, bool shared)
4681 {
4682 tree m = finish_non_static_data_member (t, NULL_TREE, NULL_TREE);
4683 if (m == error_mark_node)
4684 return error_mark_node;
4685 if (!omp_private_member_map && !shared)
4686 omp_private_member_map = new hash_map<tree, tree>;
4687 if (TYPE_REF_P (TREE_TYPE (t)))
4688 {
4689 gcc_assert (INDIRECT_REF_P (m));
4690 m = TREE_OPERAND (m, 0);
4691 }
4692 tree vb = NULL_TREE;
4693 tree &v = shared ? vb : omp_private_member_map->get_or_insert (t);
4694 if (v == NULL_TREE)
4695 {
4696 v = create_temporary_var (TREE_TYPE (m));
4697 retrofit_lang_decl (v);
4698 DECL_OMP_PRIVATIZED_MEMBER (v) = 1;
4699 SET_DECL_VALUE_EXPR (v, m);
4700 DECL_HAS_VALUE_EXPR_P (v) = 1;
4701 if (!shared)
4702 omp_private_member_vec.safe_push (t);
4703 }
4704 return v;
4705 }
4706
4707 /* Helper function for handle_omp_array_sections. Called recursively
4708 to handle multiple array-section-subscripts. C is the clause,
4709 T current expression (initially OMP_CLAUSE_DECL), which is either
4710 a TREE_LIST for array-section-subscript (TREE_PURPOSE is low-bound
4711 expression if specified, TREE_VALUE length expression if specified,
4712 TREE_CHAIN is what it has been specified after, or some decl.
4713 TYPES vector is populated with array section types, MAYBE_ZERO_LEN
4714 set to true if any of the array-section-subscript could have length
4715 of zero (explicit or implicit), FIRST_NON_ONE is the index of the
4716 first array-section-subscript which is known not to have length
4717 of one. Given say:
4718 map(a[:b][2:1][:c][:2][:d][e:f][2:5])
4719 FIRST_NON_ONE will be 3, array-section-subscript [:b], [2:1] and [:c]
4720 all are or may have length of 1, array-section-subscript [:2] is the
4721 first one known not to have length 1. For array-section-subscript
4722 <= FIRST_NON_ONE we diagnose non-contiguous arrays if low bound isn't
4723 0 or length isn't the array domain max + 1, for > FIRST_NON_ONE we
4724 can if MAYBE_ZERO_LEN is false. MAYBE_ZERO_LEN will be true in the above
4725 case though, as some lengths could be zero. */
4726
4727 static tree
4728 handle_omp_array_sections_1 (tree c, tree t, vec<tree> &types,
4729 bool &maybe_zero_len, unsigned int &first_non_one,
4730 enum c_omp_region_type ort)
4731 {
4732 tree ret, low_bound, length, type;
4733 if (TREE_CODE (t) != TREE_LIST)
4734 {
4735 if (error_operand_p (t))
4736 return error_mark_node;
4737 if (REFERENCE_REF_P (t)
4738 && TREE_CODE (TREE_OPERAND (t, 0)) == COMPONENT_REF)
4739 t = TREE_OPERAND (t, 0);
4740 ret = t;
4741 if (TREE_CODE (t) == COMPONENT_REF
4742 && ort == C_ORT_OMP
4743 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
4744 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO
4745 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM)
4746 && !type_dependent_expression_p (t))
4747 {
4748 if (TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL
4749 && DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
4750 {
4751 error_at (OMP_CLAUSE_LOCATION (c),
4752 "bit-field %qE in %qs clause",
4753 t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
4754 return error_mark_node;
4755 }
4756 while (TREE_CODE (t) == COMPONENT_REF)
4757 {
4758 if (TREE_TYPE (TREE_OPERAND (t, 0))
4759 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == UNION_TYPE)
4760 {
4761 error_at (OMP_CLAUSE_LOCATION (c),
4762 "%qE is a member of a union", t);
4763 return error_mark_node;
4764 }
4765 t = TREE_OPERAND (t, 0);
4766 }
4767 if (REFERENCE_REF_P (t))
4768 t = TREE_OPERAND (t, 0);
4769 }
4770 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL)
4771 {
4772 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
4773 return NULL_TREE;
4774 if (DECL_P (t))
4775 error_at (OMP_CLAUSE_LOCATION (c),
4776 "%qD is not a variable in %qs clause", t,
4777 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
4778 else
4779 error_at (OMP_CLAUSE_LOCATION (c),
4780 "%qE is not a variable in %qs clause", t,
4781 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
4782 return error_mark_node;
4783 }
4784 else if (ort == C_ORT_OMP
4785 && TREE_CODE (t) == PARM_DECL
4786 && DECL_ARTIFICIAL (t)
4787 && DECL_NAME (t) == this_identifier)
4788 {
4789 error_at (OMP_CLAUSE_LOCATION (c),
4790 "%<this%> allowed in OpenMP only in %<declare simd%>"
4791 " clauses");
4792 return error_mark_node;
4793 }
4794 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
4795 && VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t))
4796 {
4797 error_at (OMP_CLAUSE_LOCATION (c),
4798 "%qD is threadprivate variable in %qs clause", t,
4799 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
4800 return error_mark_node;
4801 }
4802 if (type_dependent_expression_p (ret))
4803 return NULL_TREE;
4804 ret = convert_from_reference (ret);
4805 return ret;
4806 }
4807
4808 if (ort == C_ORT_OMP
4809 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
4810 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
4811 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
4812 && TREE_CODE (TREE_CHAIN (t)) == FIELD_DECL)
4813 TREE_CHAIN (t) = omp_privatize_field (TREE_CHAIN (t), false);
4814 ret = handle_omp_array_sections_1 (c, TREE_CHAIN (t), types,
4815 maybe_zero_len, first_non_one, ort);
4816 if (ret == error_mark_node || ret == NULL_TREE)
4817 return ret;
4818
4819 type = TREE_TYPE (ret);
4820 low_bound = TREE_PURPOSE (t);
4821 length = TREE_VALUE (t);
4822 if ((low_bound && type_dependent_expression_p (low_bound))
4823 || (length && type_dependent_expression_p (length)))
4824 return NULL_TREE;
4825
4826 if (low_bound == error_mark_node || length == error_mark_node)
4827 return error_mark_node;
4828
4829 if (low_bound && !INTEGRAL_TYPE_P (TREE_TYPE (low_bound)))
4830 {
4831 error_at (OMP_CLAUSE_LOCATION (c),
4832 "low bound %qE of array section does not have integral type",
4833 low_bound);
4834 return error_mark_node;
4835 }
4836 if (length && !INTEGRAL_TYPE_P (TREE_TYPE (length)))
4837 {
4838 error_at (OMP_CLAUSE_LOCATION (c),
4839 "length %qE of array section does not have integral type",
4840 length);
4841 return error_mark_node;
4842 }
4843 if (low_bound)
4844 low_bound = mark_rvalue_use (low_bound);
4845 if (length)
4846 length = mark_rvalue_use (length);
4847 /* We need to reduce to real constant-values for checks below. */
4848 if (length)
4849 length = fold_simple (length);
4850 if (low_bound)
4851 low_bound = fold_simple (low_bound);
4852 if (low_bound
4853 && TREE_CODE (low_bound) == INTEGER_CST
4854 && TYPE_PRECISION (TREE_TYPE (low_bound))
4855 > TYPE_PRECISION (sizetype))
4856 low_bound = fold_convert (sizetype, low_bound);
4857 if (length
4858 && TREE_CODE (length) == INTEGER_CST
4859 && TYPE_PRECISION (TREE_TYPE (length))
4860 > TYPE_PRECISION (sizetype))
4861 length = fold_convert (sizetype, length);
4862 if (low_bound == NULL_TREE)
4863 low_bound = integer_zero_node;
4864
4865 if (length != NULL_TREE)
4866 {
4867 if (!integer_nonzerop (length))
4868 {
4869 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
4870 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
4871 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
4872 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
4873 {
4874 if (integer_zerop (length))
4875 {
4876 error_at (OMP_CLAUSE_LOCATION (c),
4877 "zero length array section in %qs clause",
4878 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
4879 return error_mark_node;
4880 }
4881 }
4882 else
4883 maybe_zero_len = true;
4884 }
4885 if (first_non_one == types.length ()
4886 && (TREE_CODE (length) != INTEGER_CST || integer_onep (length)))
4887 first_non_one++;
4888 }
4889 if (TREE_CODE (type) == ARRAY_TYPE)
4890 {
4891 if (length == NULL_TREE
4892 && (TYPE_DOMAIN (type) == NULL_TREE
4893 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE))
4894 {
4895 error_at (OMP_CLAUSE_LOCATION (c),
4896 "for unknown bound array type length expression must "
4897 "be specified");
4898 return error_mark_node;
4899 }
4900 if (TREE_CODE (low_bound) == INTEGER_CST
4901 && tree_int_cst_sgn (low_bound) == -1)
4902 {
4903 error_at (OMP_CLAUSE_LOCATION (c),
4904 "negative low bound in array section in %qs clause",
4905 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
4906 return error_mark_node;
4907 }
4908 if (length != NULL_TREE
4909 && TREE_CODE (length) == INTEGER_CST
4910 && tree_int_cst_sgn (length) == -1)
4911 {
4912 error_at (OMP_CLAUSE_LOCATION (c),
4913 "negative length in array section in %qs clause",
4914 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
4915 return error_mark_node;
4916 }
4917 if (TYPE_DOMAIN (type)
4918 && TYPE_MAX_VALUE (TYPE_DOMAIN (type))
4919 && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
4920 == INTEGER_CST)
4921 {
4922 tree size
4923 = fold_convert (sizetype, TYPE_MAX_VALUE (TYPE_DOMAIN (type)));
4924 size = size_binop (PLUS_EXPR, size, size_one_node);
4925 if (TREE_CODE (low_bound) == INTEGER_CST)
4926 {
4927 if (tree_int_cst_lt (size, low_bound))
4928 {
4929 error_at (OMP_CLAUSE_LOCATION (c),
4930 "low bound %qE above array section size "
4931 "in %qs clause", low_bound,
4932 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
4933 return error_mark_node;
4934 }
4935 if (tree_int_cst_equal (size, low_bound))
4936 {
4937 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
4938 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
4939 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
4940 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
4941 {
4942 error_at (OMP_CLAUSE_LOCATION (c),
4943 "zero length array section in %qs clause",
4944 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
4945 return error_mark_node;
4946 }
4947 maybe_zero_len = true;
4948 }
4949 else if (length == NULL_TREE
4950 && first_non_one == types.length ()
4951 && tree_int_cst_equal
4952 (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4953 low_bound))
4954 first_non_one++;
4955 }
4956 else if (length == NULL_TREE)
4957 {
4958 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
4959 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
4960 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION
4961 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TASK_REDUCTION)
4962 maybe_zero_len = true;
4963 if (first_non_one == types.length ())
4964 first_non_one++;
4965 }
4966 if (length && TREE_CODE (length) == INTEGER_CST)
4967 {
4968 if (tree_int_cst_lt (size, length))
4969 {
4970 error_at (OMP_CLAUSE_LOCATION (c),
4971 "length %qE above array section size "
4972 "in %qs clause", length,
4973 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
4974 return error_mark_node;
4975 }
4976 if (TREE_CODE (low_bound) == INTEGER_CST)
4977 {
4978 tree lbpluslen
4979 = size_binop (PLUS_EXPR,
4980 fold_convert (sizetype, low_bound),
4981 fold_convert (sizetype, length));
4982 if (TREE_CODE (lbpluslen) == INTEGER_CST
4983 && tree_int_cst_lt (size, lbpluslen))
4984 {
4985 error_at (OMP_CLAUSE_LOCATION (c),
4986 "high bound %qE above array section size "
4987 "in %qs clause", lbpluslen,
4988 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
4989 return error_mark_node;
4990 }
4991 }
4992 }
4993 }
4994 else if (length == NULL_TREE)
4995 {
4996 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
4997 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
4998 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION
4999 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TASK_REDUCTION)
5000 maybe_zero_len = true;
5001 if (first_non_one == types.length ())
5002 first_non_one++;
5003 }
5004
5005 /* For [lb:] we will need to evaluate lb more than once. */
5006 if (length == NULL_TREE && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
5007 {
5008 tree lb = cp_save_expr (low_bound);
5009 if (lb != low_bound)
5010 {
5011 TREE_PURPOSE (t) = lb;
5012 low_bound = lb;
5013 }
5014 }
5015 }
5016 else if (TYPE_PTR_P (type))
5017 {
5018 if (length == NULL_TREE)
5019 {
5020 error_at (OMP_CLAUSE_LOCATION (c),
5021 "for pointer type length expression must be specified");
5022 return error_mark_node;
5023 }
5024 if (length != NULL_TREE
5025 && TREE_CODE (length) == INTEGER_CST
5026 && tree_int_cst_sgn (length) == -1)
5027 {
5028 error_at (OMP_CLAUSE_LOCATION (c),
5029 "negative length in array section in %qs clause",
5030 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5031 return error_mark_node;
5032 }
5033 /* If there is a pointer type anywhere but in the very first
5034 array-section-subscript, the array section can't be contiguous. */
5035 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
5036 && TREE_CODE (TREE_CHAIN (t)) == TREE_LIST)
5037 {
5038 error_at (OMP_CLAUSE_LOCATION (c),
5039 "array section is not contiguous in %qs clause",
5040 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5041 return error_mark_node;
5042 }
5043 }
5044 else
5045 {
5046 error_at (OMP_CLAUSE_LOCATION (c),
5047 "%qE does not have pointer or array type", ret);
5048 return error_mark_node;
5049 }
5050 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
5051 types.safe_push (TREE_TYPE (ret));
5052 /* We will need to evaluate lb more than once. */
5053 tree lb = cp_save_expr (low_bound);
5054 if (lb != low_bound)
5055 {
5056 TREE_PURPOSE (t) = lb;
5057 low_bound = lb;
5058 }
5059 /* Temporarily disable -fstrong-eval-order for array reductions.
5060 The SAVE_EXPR and COMPOUND_EXPR added if low_bound has side-effects
5061 is something the middle-end can't cope with and more importantly,
5062 it needs to be the actual base variable that is privatized, not some
5063 temporary assigned previous value of it. That, together with OpenMP
5064 saying how many times the side-effects are evaluated is unspecified,
5065 makes int *a, *b; ... reduction(+:a[a = b, 3:10]) really unspecified. */
5066 warning_sentinel s (flag_strong_eval_order,
5067 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5068 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5069 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION);
5070 ret = grok_array_decl (OMP_CLAUSE_LOCATION (c), ret, low_bound, false);
5071 return ret;
5072 }
5073
5074 /* Handle array sections for clause C. */
5075
5076 static bool
5077 handle_omp_array_sections (tree c, enum c_omp_region_type ort)
5078 {
5079 bool maybe_zero_len = false;
5080 unsigned int first_non_one = 0;
5081 auto_vec<tree, 10> types;
5082 tree *tp = &OMP_CLAUSE_DECL (c);
5083 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
5084 && TREE_CODE (*tp) == TREE_LIST
5085 && TREE_PURPOSE (*tp)
5086 && TREE_CODE (TREE_PURPOSE (*tp)) == TREE_VEC)
5087 tp = &TREE_VALUE (*tp);
5088 tree first = handle_omp_array_sections_1 (c, *tp, types,
5089 maybe_zero_len, first_non_one,
5090 ort);
5091 if (first == error_mark_node)
5092 return true;
5093 if (first == NULL_TREE)
5094 return false;
5095 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
5096 {
5097 tree t = *tp;
5098 tree tem = NULL_TREE;
5099 if (processing_template_decl)
5100 return false;
5101 /* Need to evaluate side effects in the length expressions
5102 if any. */
5103 while (TREE_CODE (t) == TREE_LIST)
5104 {
5105 if (TREE_VALUE (t) && TREE_SIDE_EFFECTS (TREE_VALUE (t)))
5106 {
5107 if (tem == NULL_TREE)
5108 tem = TREE_VALUE (t);
5109 else
5110 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem),
5111 TREE_VALUE (t), tem);
5112 }
5113 t = TREE_CHAIN (t);
5114 }
5115 if (tem)
5116 first = build2 (COMPOUND_EXPR, TREE_TYPE (first), tem, first);
5117 *tp = first;
5118 }
5119 else
5120 {
5121 unsigned int num = types.length (), i;
5122 tree t, side_effects = NULL_TREE, size = NULL_TREE;
5123 tree condition = NULL_TREE;
5124
5125 if (int_size_in_bytes (TREE_TYPE (first)) <= 0)
5126 maybe_zero_len = true;
5127 if (processing_template_decl && maybe_zero_len)
5128 return false;
5129
5130 for (i = num, t = OMP_CLAUSE_DECL (c); i > 0;
5131 t = TREE_CHAIN (t))
5132 {
5133 tree low_bound = TREE_PURPOSE (t);
5134 tree length = TREE_VALUE (t);
5135
5136 i--;
5137 if (low_bound
5138 && TREE_CODE (low_bound) == INTEGER_CST
5139 && TYPE_PRECISION (TREE_TYPE (low_bound))
5140 > TYPE_PRECISION (sizetype))
5141 low_bound = fold_convert (sizetype, low_bound);
5142 if (length
5143 && TREE_CODE (length) == INTEGER_CST
5144 && TYPE_PRECISION (TREE_TYPE (length))
5145 > TYPE_PRECISION (sizetype))
5146 length = fold_convert (sizetype, length);
5147 if (low_bound == NULL_TREE)
5148 low_bound = integer_zero_node;
5149 if (!maybe_zero_len && i > first_non_one)
5150 {
5151 if (integer_nonzerop (low_bound))
5152 goto do_warn_noncontiguous;
5153 if (length != NULL_TREE
5154 && TREE_CODE (length) == INTEGER_CST
5155 && TYPE_DOMAIN (types[i])
5156 && TYPE_MAX_VALUE (TYPE_DOMAIN (types[i]))
5157 && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])))
5158 == INTEGER_CST)
5159 {
5160 tree size;
5161 size = size_binop (PLUS_EXPR,
5162 TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])),
5163 size_one_node);
5164 if (!tree_int_cst_equal (length, size))
5165 {
5166 do_warn_noncontiguous:
5167 error_at (OMP_CLAUSE_LOCATION (c),
5168 "array section is not contiguous in %qs "
5169 "clause",
5170 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5171 return true;
5172 }
5173 }
5174 if (!processing_template_decl
5175 && length != NULL_TREE
5176 && TREE_SIDE_EFFECTS (length))
5177 {
5178 if (side_effects == NULL_TREE)
5179 side_effects = length;
5180 else
5181 side_effects = build2 (COMPOUND_EXPR,
5182 TREE_TYPE (side_effects),
5183 length, side_effects);
5184 }
5185 }
5186 else if (processing_template_decl)
5187 continue;
5188 else
5189 {
5190 tree l;
5191
5192 if (i > first_non_one
5193 && ((length && integer_nonzerop (length))
5194 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5195 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5196 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION))
5197 continue;
5198 if (length)
5199 l = fold_convert (sizetype, length);
5200 else
5201 {
5202 l = size_binop (PLUS_EXPR,
5203 TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])),
5204 size_one_node);
5205 l = size_binop (MINUS_EXPR, l,
5206 fold_convert (sizetype, low_bound));
5207 }
5208 if (i > first_non_one)
5209 {
5210 l = fold_build2 (NE_EXPR, boolean_type_node, l,
5211 size_zero_node);
5212 if (condition == NULL_TREE)
5213 condition = l;
5214 else
5215 condition = fold_build2 (BIT_AND_EXPR, boolean_type_node,
5216 l, condition);
5217 }
5218 else if (size == NULL_TREE)
5219 {
5220 size = size_in_bytes (TREE_TYPE (types[i]));
5221 tree eltype = TREE_TYPE (types[num - 1]);
5222 while (TREE_CODE (eltype) == ARRAY_TYPE)
5223 eltype = TREE_TYPE (eltype);
5224 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5225 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5226 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
5227 size = size_binop (EXACT_DIV_EXPR, size,
5228 size_in_bytes (eltype));
5229 size = size_binop (MULT_EXPR, size, l);
5230 if (condition)
5231 size = fold_build3 (COND_EXPR, sizetype, condition,
5232 size, size_zero_node);
5233 }
5234 else
5235 size = size_binop (MULT_EXPR, size, l);
5236 }
5237 }
5238 if (!processing_template_decl)
5239 {
5240 if (side_effects)
5241 size = build2 (COMPOUND_EXPR, sizetype, side_effects, size);
5242 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5243 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5244 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
5245 {
5246 size = size_binop (MINUS_EXPR, size, size_one_node);
5247 size = save_expr (size);
5248 tree index_type = build_index_type (size);
5249 tree eltype = TREE_TYPE (first);
5250 while (TREE_CODE (eltype) == ARRAY_TYPE)
5251 eltype = TREE_TYPE (eltype);
5252 tree type = build_array_type (eltype, index_type);
5253 tree ptype = build_pointer_type (eltype);
5254 if (TYPE_REF_P (TREE_TYPE (t))
5255 && INDIRECT_TYPE_P (TREE_TYPE (TREE_TYPE (t))))
5256 t = convert_from_reference (t);
5257 else if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE)
5258 t = build_fold_addr_expr (t);
5259 tree t2 = build_fold_addr_expr (first);
5260 t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c),
5261 ptrdiff_type_node, t2);
5262 t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR,
5263 ptrdiff_type_node, t2,
5264 fold_convert_loc (OMP_CLAUSE_LOCATION (c),
5265 ptrdiff_type_node, t));
5266 if (tree_fits_shwi_p (t2))
5267 t = build2 (MEM_REF, type, t,
5268 build_int_cst (ptype, tree_to_shwi (t2)));
5269 else
5270 {
5271 t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c),
5272 sizetype, t2);
5273 t = build2_loc (OMP_CLAUSE_LOCATION (c), POINTER_PLUS_EXPR,
5274 TREE_TYPE (t), t, t2);
5275 t = build2 (MEM_REF, type, t, build_int_cst (ptype, 0));
5276 }
5277 OMP_CLAUSE_DECL (c) = t;
5278 return false;
5279 }
5280 OMP_CLAUSE_DECL (c) = first;
5281 OMP_CLAUSE_SIZE (c) = size;
5282 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
5283 || (TREE_CODE (t) == COMPONENT_REF
5284 && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE))
5285 return false;
5286 if (ort == C_ORT_OMP || ort == C_ORT_ACC)
5287 switch (OMP_CLAUSE_MAP_KIND (c))
5288 {
5289 case GOMP_MAP_ALLOC:
5290 case GOMP_MAP_TO:
5291 case GOMP_MAP_FROM:
5292 case GOMP_MAP_TOFROM:
5293 case GOMP_MAP_ALWAYS_TO:
5294 case GOMP_MAP_ALWAYS_FROM:
5295 case GOMP_MAP_ALWAYS_TOFROM:
5296 case GOMP_MAP_RELEASE:
5297 case GOMP_MAP_DELETE:
5298 case GOMP_MAP_FORCE_TO:
5299 case GOMP_MAP_FORCE_FROM:
5300 case GOMP_MAP_FORCE_TOFROM:
5301 case GOMP_MAP_FORCE_PRESENT:
5302 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1;
5303 break;
5304 default:
5305 break;
5306 }
5307 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
5308 OMP_CLAUSE_MAP);
5309 if ((ort & C_ORT_OMP_DECLARE_SIMD) != C_ORT_OMP && ort != C_ORT_ACC)
5310 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_POINTER);
5311 else if (TREE_CODE (t) == COMPONENT_REF)
5312 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALWAYS_POINTER);
5313 else if (REFERENCE_REF_P (t)
5314 && TREE_CODE (TREE_OPERAND (t, 0)) == COMPONENT_REF)
5315 {
5316 t = TREE_OPERAND (t, 0);
5317 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALWAYS_POINTER);
5318 }
5319 else
5320 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_POINTER);
5321 if (OMP_CLAUSE_MAP_KIND (c2) != GOMP_MAP_FIRSTPRIVATE_POINTER
5322 && !cxx_mark_addressable (t))
5323 return false;
5324 OMP_CLAUSE_DECL (c2) = t;
5325 t = build_fold_addr_expr (first);
5326 t = fold_convert_loc (OMP_CLAUSE_LOCATION (c),
5327 ptrdiff_type_node, t);
5328 tree ptr = OMP_CLAUSE_DECL (c2);
5329 ptr = convert_from_reference (ptr);
5330 if (!INDIRECT_TYPE_P (TREE_TYPE (ptr)))
5331 ptr = build_fold_addr_expr (ptr);
5332 t = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR,
5333 ptrdiff_type_node, t,
5334 fold_convert_loc (OMP_CLAUSE_LOCATION (c),
5335 ptrdiff_type_node, ptr));
5336 OMP_CLAUSE_SIZE (c2) = t;
5337 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
5338 OMP_CLAUSE_CHAIN (c) = c2;
5339 ptr = OMP_CLAUSE_DECL (c2);
5340 if (OMP_CLAUSE_MAP_KIND (c2) != GOMP_MAP_FIRSTPRIVATE_POINTER
5341 && TYPE_REF_P (TREE_TYPE (ptr))
5342 && INDIRECT_TYPE_P (TREE_TYPE (TREE_TYPE (ptr))))
5343 {
5344 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
5345 OMP_CLAUSE_MAP);
5346 OMP_CLAUSE_SET_MAP_KIND (c3, OMP_CLAUSE_MAP_KIND (c2));
5347 OMP_CLAUSE_DECL (c3) = ptr;
5348 if (OMP_CLAUSE_MAP_KIND (c2) == GOMP_MAP_ALWAYS_POINTER)
5349 OMP_CLAUSE_DECL (c2) = build_simple_mem_ref (ptr);
5350 else
5351 OMP_CLAUSE_DECL (c2) = convert_from_reference (ptr);
5352 OMP_CLAUSE_SIZE (c3) = size_zero_node;
5353 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
5354 OMP_CLAUSE_CHAIN (c2) = c3;
5355 }
5356 }
5357 }
5358 return false;
5359 }
5360
5361 /* Return identifier to look up for omp declare reduction. */
5362
5363 tree
5364 omp_reduction_id (enum tree_code reduction_code, tree reduction_id, tree type)
5365 {
5366 const char *p = NULL;
5367 const char *m = NULL;
5368 switch (reduction_code)
5369 {
5370 case PLUS_EXPR:
5371 case MULT_EXPR:
5372 case MINUS_EXPR:
5373 case BIT_AND_EXPR:
5374 case BIT_XOR_EXPR:
5375 case BIT_IOR_EXPR:
5376 case TRUTH_ANDIF_EXPR:
5377 case TRUTH_ORIF_EXPR:
5378 reduction_id = ovl_op_identifier (false, reduction_code);
5379 break;
5380 case MIN_EXPR:
5381 p = "min";
5382 break;
5383 case MAX_EXPR:
5384 p = "max";
5385 break;
5386 default:
5387 break;
5388 }
5389
5390 if (p == NULL)
5391 {
5392 if (TREE_CODE (reduction_id) != IDENTIFIER_NODE)
5393 return error_mark_node;
5394 p = IDENTIFIER_POINTER (reduction_id);
5395 }
5396
5397 if (type != NULL_TREE)
5398 m = mangle_type_string (TYPE_MAIN_VARIANT (type));
5399
5400 const char prefix[] = "omp declare reduction ";
5401 size_t lenp = sizeof (prefix);
5402 if (strncmp (p, prefix, lenp - 1) == 0)
5403 lenp = 1;
5404 size_t len = strlen (p);
5405 size_t lenm = m ? strlen (m) + 1 : 0;
5406 char *name = XALLOCAVEC (char, lenp + len + lenm);
5407 if (lenp > 1)
5408 memcpy (name, prefix, lenp - 1);
5409 memcpy (name + lenp - 1, p, len + 1);
5410 if (m)
5411 {
5412 name[lenp + len - 1] = '~';
5413 memcpy (name + lenp + len, m, lenm);
5414 }
5415 return get_identifier (name);
5416 }
5417
5418 /* Lookup OpenMP UDR ID for TYPE, return the corresponding artificial
5419 FUNCTION_DECL or NULL_TREE if not found. */
5420
5421 static tree
5422 omp_reduction_lookup (location_t loc, tree id, tree type, tree *baselinkp,
5423 vec<tree> *ambiguousp)
5424 {
5425 tree orig_id = id;
5426 tree baselink = NULL_TREE;
5427 if (identifier_p (id))
5428 {
5429 cp_id_kind idk;
5430 bool nonint_cst_expression_p;
5431 const char *error_msg;
5432 id = omp_reduction_id (ERROR_MARK, id, type);
5433 tree decl = lookup_name (id);
5434 if (decl == NULL_TREE)
5435 decl = error_mark_node;
5436 id = finish_id_expression (id, decl, NULL_TREE, &idk, false, true,
5437 &nonint_cst_expression_p, false, true, false,
5438 false, &error_msg, loc);
5439 if (idk == CP_ID_KIND_UNQUALIFIED
5440 && identifier_p (id))
5441 {
5442 vec<tree, va_gc> *args = NULL;
5443 vec_safe_push (args, build_reference_type (type));
5444 id = perform_koenig_lookup (id, args, tf_none);
5445 }
5446 }
5447 else if (TREE_CODE (id) == SCOPE_REF)
5448 id = lookup_qualified_name (TREE_OPERAND (id, 0),
5449 omp_reduction_id (ERROR_MARK,
5450 TREE_OPERAND (id, 1),
5451 type),
5452 false, false);
5453 tree fns = id;
5454 id = NULL_TREE;
5455 if (fns && is_overloaded_fn (fns))
5456 {
5457 for (lkp_iterator iter (get_fns (fns)); iter; ++iter)
5458 {
5459 tree fndecl = *iter;
5460 if (TREE_CODE (fndecl) == FUNCTION_DECL)
5461 {
5462 tree argtype = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
5463 if (same_type_p (TREE_TYPE (argtype), type))
5464 {
5465 id = fndecl;
5466 break;
5467 }
5468 }
5469 }
5470
5471 if (id && BASELINK_P (fns))
5472 {
5473 if (baselinkp)
5474 *baselinkp = fns;
5475 else
5476 baselink = fns;
5477 }
5478 }
5479
5480 if (!id && CLASS_TYPE_P (type) && TYPE_BINFO (type))
5481 {
5482 vec<tree> ambiguous = vNULL;
5483 tree binfo = TYPE_BINFO (type), base_binfo, ret = NULL_TREE;
5484 unsigned int ix;
5485 if (ambiguousp == NULL)
5486 ambiguousp = &ambiguous;
5487 for (ix = 0; BINFO_BASE_ITERATE (binfo, ix, base_binfo); ix++)
5488 {
5489 id = omp_reduction_lookup (loc, orig_id, BINFO_TYPE (base_binfo),
5490 baselinkp ? baselinkp : &baselink,
5491 ambiguousp);
5492 if (id == NULL_TREE)
5493 continue;
5494 if (!ambiguousp->is_empty ())
5495 ambiguousp->safe_push (id);
5496 else if (ret != NULL_TREE)
5497 {
5498 ambiguousp->safe_push (ret);
5499 ambiguousp->safe_push (id);
5500 ret = NULL_TREE;
5501 }
5502 else
5503 ret = id;
5504 }
5505 if (ambiguousp != &ambiguous)
5506 return ret;
5507 if (!ambiguous.is_empty ())
5508 {
5509 const char *str = _("candidates are:");
5510 unsigned int idx;
5511 tree udr;
5512 error_at (loc, "user defined reduction lookup is ambiguous");
5513 FOR_EACH_VEC_ELT (ambiguous, idx, udr)
5514 {
5515 inform (DECL_SOURCE_LOCATION (udr), "%s %#qD", str, udr);
5516 if (idx == 0)
5517 str = get_spaces (str);
5518 }
5519 ambiguous.release ();
5520 ret = error_mark_node;
5521 baselink = NULL_TREE;
5522 }
5523 id = ret;
5524 }
5525 if (id && baselink)
5526 perform_or_defer_access_check (BASELINK_BINFO (baselink),
5527 id, id, tf_warning_or_error);
5528 return id;
5529 }
5530
5531 /* Helper function for cp_parser_omp_declare_reduction_exprs
5532 and tsubst_omp_udr.
5533 Remove CLEANUP_STMT for data (omp_priv variable).
5534 Also append INIT_EXPR for DECL_INITIAL of omp_priv after its
5535 DECL_EXPR. */
5536
5537 tree
5538 cp_remove_omp_priv_cleanup_stmt (tree *tp, int *walk_subtrees, void *data)
5539 {
5540 if (TYPE_P (*tp))
5541 *walk_subtrees = 0;
5542 else if (TREE_CODE (*tp) == CLEANUP_STMT && CLEANUP_DECL (*tp) == (tree) data)
5543 *tp = CLEANUP_BODY (*tp);
5544 else if (TREE_CODE (*tp) == DECL_EXPR)
5545 {
5546 tree decl = DECL_EXPR_DECL (*tp);
5547 if (!processing_template_decl
5548 && decl == (tree) data
5549 && DECL_INITIAL (decl)
5550 && DECL_INITIAL (decl) != error_mark_node)
5551 {
5552 tree list = NULL_TREE;
5553 append_to_statement_list_force (*tp, &list);
5554 tree init_expr = build2 (INIT_EXPR, void_type_node,
5555 decl, DECL_INITIAL (decl));
5556 DECL_INITIAL (decl) = NULL_TREE;
5557 append_to_statement_list_force (init_expr, &list);
5558 *tp = list;
5559 }
5560 }
5561 return NULL_TREE;
5562 }
5563
5564 /* Data passed from cp_check_omp_declare_reduction to
5565 cp_check_omp_declare_reduction_r. */
5566
5567 struct cp_check_omp_declare_reduction_data
5568 {
5569 location_t loc;
5570 tree stmts[7];
5571 bool combiner_p;
5572 };
5573
5574 /* Helper function for cp_check_omp_declare_reduction, called via
5575 cp_walk_tree. */
5576
5577 static tree
5578 cp_check_omp_declare_reduction_r (tree *tp, int *, void *data)
5579 {
5580 struct cp_check_omp_declare_reduction_data *udr_data
5581 = (struct cp_check_omp_declare_reduction_data *) data;
5582 if (SSA_VAR_P (*tp)
5583 && !DECL_ARTIFICIAL (*tp)
5584 && *tp != DECL_EXPR_DECL (udr_data->stmts[udr_data->combiner_p ? 0 : 3])
5585 && *tp != DECL_EXPR_DECL (udr_data->stmts[udr_data->combiner_p ? 1 : 4]))
5586 {
5587 location_t loc = udr_data->loc;
5588 if (udr_data->combiner_p)
5589 error_at (loc, "%<#pragma omp declare reduction%> combiner refers to "
5590 "variable %qD which is not %<omp_out%> nor %<omp_in%>",
5591 *tp);
5592 else
5593 error_at (loc, "%<#pragma omp declare reduction%> initializer refers "
5594 "to variable %qD which is not %<omp_priv%> nor "
5595 "%<omp_orig%>",
5596 *tp);
5597 return *tp;
5598 }
5599 return NULL_TREE;
5600 }
5601
5602 /* Diagnose violation of OpenMP #pragma omp declare reduction restrictions. */
5603
5604 void
5605 cp_check_omp_declare_reduction (tree udr)
5606 {
5607 tree type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (udr)));
5608 gcc_assert (TYPE_REF_P (type));
5609 type = TREE_TYPE (type);
5610 int i;
5611 location_t loc = DECL_SOURCE_LOCATION (udr);
5612
5613 if (type == error_mark_node)
5614 return;
5615 if (ARITHMETIC_TYPE_P (type))
5616 {
5617 static enum tree_code predef_codes[]
5618 = { PLUS_EXPR, MULT_EXPR, MINUS_EXPR, BIT_AND_EXPR, BIT_XOR_EXPR,
5619 BIT_IOR_EXPR, TRUTH_ANDIF_EXPR, TRUTH_ORIF_EXPR };
5620 for (i = 0; i < 8; i++)
5621 {
5622 tree id = omp_reduction_id (predef_codes[i], NULL_TREE, NULL_TREE);
5623 const char *n1 = IDENTIFIER_POINTER (DECL_NAME (udr));
5624 const char *n2 = IDENTIFIER_POINTER (id);
5625 if (strncmp (n1, n2, IDENTIFIER_LENGTH (id)) == 0
5626 && (n1[IDENTIFIER_LENGTH (id)] == '~'
5627 || n1[IDENTIFIER_LENGTH (id)] == '\0'))
5628 break;
5629 }
5630
5631 if (i == 8
5632 && TREE_CODE (type) != COMPLEX_EXPR)
5633 {
5634 const char prefix_minmax[] = "omp declare reduction m";
5635 size_t prefix_size = sizeof (prefix_minmax) - 1;
5636 const char *n = IDENTIFIER_POINTER (DECL_NAME (udr));
5637 if (strncmp (IDENTIFIER_POINTER (DECL_NAME (udr)),
5638 prefix_minmax, prefix_size) == 0
5639 && ((n[prefix_size] == 'i' && n[prefix_size + 1] == 'n')
5640 || (n[prefix_size] == 'a' && n[prefix_size + 1] == 'x'))
5641 && (n[prefix_size + 2] == '~' || n[prefix_size + 2] == '\0'))
5642 i = 0;
5643 }
5644 if (i < 8)
5645 {
5646 error_at (loc, "predeclared arithmetic type %qT in "
5647 "%<#pragma omp declare reduction%>", type);
5648 return;
5649 }
5650 }
5651 else if (FUNC_OR_METHOD_TYPE_P (type)
5652 || TREE_CODE (type) == ARRAY_TYPE)
5653 {
5654 error_at (loc, "function or array type %qT in "
5655 "%<#pragma omp declare reduction%>", type);
5656 return;
5657 }
5658 else if (TYPE_REF_P (type))
5659 {
5660 error_at (loc, "reference type %qT in %<#pragma omp declare reduction%>",
5661 type);
5662 return;
5663 }
5664 else if (TYPE_QUALS_NO_ADDR_SPACE (type))
5665 {
5666 error_at (loc, "%<const%>, %<volatile%> or %<__restrict%>-qualified "
5667 "type %qT in %<#pragma omp declare reduction%>", type);
5668 return;
5669 }
5670
5671 tree body = DECL_SAVED_TREE (udr);
5672 if (body == NULL_TREE || TREE_CODE (body) != STATEMENT_LIST)
5673 return;
5674
5675 tree_stmt_iterator tsi;
5676 struct cp_check_omp_declare_reduction_data data;
5677 memset (data.stmts, 0, sizeof data.stmts);
5678 for (i = 0, tsi = tsi_start (body);
5679 i < 7 && !tsi_end_p (tsi);
5680 i++, tsi_next (&tsi))
5681 data.stmts[i] = tsi_stmt (tsi);
5682 data.loc = loc;
5683 gcc_assert (tsi_end_p (tsi));
5684 if (i >= 3)
5685 {
5686 gcc_assert (TREE_CODE (data.stmts[0]) == DECL_EXPR
5687 && TREE_CODE (data.stmts[1]) == DECL_EXPR);
5688 if (TREE_NO_WARNING (DECL_EXPR_DECL (data.stmts[0])))
5689 return;
5690 data.combiner_p = true;
5691 if (cp_walk_tree (&data.stmts[2], cp_check_omp_declare_reduction_r,
5692 &data, NULL))
5693 TREE_NO_WARNING (DECL_EXPR_DECL (data.stmts[0])) = 1;
5694 }
5695 if (i >= 6)
5696 {
5697 gcc_assert (TREE_CODE (data.stmts[3]) == DECL_EXPR
5698 && TREE_CODE (data.stmts[4]) == DECL_EXPR);
5699 data.combiner_p = false;
5700 if (cp_walk_tree (&data.stmts[5], cp_check_omp_declare_reduction_r,
5701 &data, NULL)
5702 || cp_walk_tree (&DECL_INITIAL (DECL_EXPR_DECL (data.stmts[3])),
5703 cp_check_omp_declare_reduction_r, &data, NULL))
5704 TREE_NO_WARNING (DECL_EXPR_DECL (data.stmts[0])) = 1;
5705 if (i == 7)
5706 gcc_assert (TREE_CODE (data.stmts[6]) == DECL_EXPR);
5707 }
5708 }
5709
5710 /* Helper function of finish_omp_clauses. Clone STMT as if we were making
5711 an inline call. But, remap
5712 the OMP_DECL1 VAR_DECL (omp_out resp. omp_orig) to PLACEHOLDER
5713 and OMP_DECL2 VAR_DECL (omp_in resp. omp_priv) to DECL. */
5714
5715 static tree
5716 clone_omp_udr (tree stmt, tree omp_decl1, tree omp_decl2,
5717 tree decl, tree placeholder)
5718 {
5719 copy_body_data id;
5720 hash_map<tree, tree> decl_map;
5721
5722 decl_map.put (omp_decl1, placeholder);
5723 decl_map.put (omp_decl2, decl);
5724 memset (&id, 0, sizeof (id));
5725 id.src_fn = DECL_CONTEXT (omp_decl1);
5726 id.dst_fn = current_function_decl;
5727 id.src_cfun = DECL_STRUCT_FUNCTION (id.src_fn);
5728 id.decl_map = &decl_map;
5729
5730 id.copy_decl = copy_decl_no_change;
5731 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5732 id.transform_new_cfg = true;
5733 id.transform_return_to_modify = false;
5734 id.transform_lang_insert_block = NULL;
5735 id.eh_lp_nr = 0;
5736 walk_tree (&stmt, copy_tree_body_r, &id, NULL);
5737 return stmt;
5738 }
5739
5740 /* Helper function of finish_omp_clauses, called via cp_walk_tree.
5741 Find OMP_CLAUSE_PLACEHOLDER (passed in DATA) in *TP. */
5742
5743 static tree
5744 find_omp_placeholder_r (tree *tp, int *, void *data)
5745 {
5746 if (*tp == (tree) data)
5747 return *tp;
5748 return NULL_TREE;
5749 }
5750
5751 /* Helper function of finish_omp_clauses. Handle OMP_CLAUSE_REDUCTION C.
5752 Return true if there is some error and the clause should be removed. */
5753
5754 static bool
5755 finish_omp_reduction_clause (tree c, bool *need_default_ctor, bool *need_dtor)
5756 {
5757 tree t = OMP_CLAUSE_DECL (c);
5758 bool predefined = false;
5759 if (TREE_CODE (t) == TREE_LIST)
5760 {
5761 gcc_assert (processing_template_decl);
5762 return false;
5763 }
5764 tree type = TREE_TYPE (t);
5765 if (TREE_CODE (t) == MEM_REF)
5766 type = TREE_TYPE (type);
5767 if (TYPE_REF_P (type))
5768 type = TREE_TYPE (type);
5769 if (TREE_CODE (type) == ARRAY_TYPE)
5770 {
5771 tree oatype = type;
5772 gcc_assert (TREE_CODE (t) != MEM_REF);
5773 while (TREE_CODE (type) == ARRAY_TYPE)
5774 type = TREE_TYPE (type);
5775 if (!processing_template_decl)
5776 {
5777 t = require_complete_type (t);
5778 if (t == error_mark_node)
5779 return true;
5780 tree size = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (oatype),
5781 TYPE_SIZE_UNIT (type));
5782 if (integer_zerop (size))
5783 {
5784 error_at (OMP_CLAUSE_LOCATION (c),
5785 "%qE in %<reduction%> clause is a zero size array",
5786 omp_clause_printable_decl (t));
5787 return true;
5788 }
5789 size = size_binop (MINUS_EXPR, size, size_one_node);
5790 size = save_expr (size);
5791 tree index_type = build_index_type (size);
5792 tree atype = build_array_type (type, index_type);
5793 tree ptype = build_pointer_type (type);
5794 if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE)
5795 t = build_fold_addr_expr (t);
5796 t = build2 (MEM_REF, atype, t, build_int_cst (ptype, 0));
5797 OMP_CLAUSE_DECL (c) = t;
5798 }
5799 }
5800 if (type == error_mark_node)
5801 return true;
5802 else if (ARITHMETIC_TYPE_P (type))
5803 switch (OMP_CLAUSE_REDUCTION_CODE (c))
5804 {
5805 case PLUS_EXPR:
5806 case MULT_EXPR:
5807 case MINUS_EXPR:
5808 predefined = true;
5809 break;
5810 case MIN_EXPR:
5811 case MAX_EXPR:
5812 if (TREE_CODE (type) == COMPLEX_TYPE)
5813 break;
5814 predefined = true;
5815 break;
5816 case BIT_AND_EXPR:
5817 case BIT_IOR_EXPR:
5818 case BIT_XOR_EXPR:
5819 if (FLOAT_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE)
5820 break;
5821 predefined = true;
5822 break;
5823 case TRUTH_ANDIF_EXPR:
5824 case TRUTH_ORIF_EXPR:
5825 if (FLOAT_TYPE_P (type))
5826 break;
5827 predefined = true;
5828 break;
5829 default:
5830 break;
5831 }
5832 else if (TYPE_READONLY (type))
5833 {
5834 error_at (OMP_CLAUSE_LOCATION (c),
5835 "%qE has const type for %<reduction%>",
5836 omp_clause_printable_decl (t));
5837 return true;
5838 }
5839 else if (!processing_template_decl)
5840 {
5841 t = require_complete_type (t);
5842 if (t == error_mark_node)
5843 return true;
5844 OMP_CLAUSE_DECL (c) = t;
5845 }
5846
5847 if (predefined)
5848 {
5849 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL_TREE;
5850 return false;
5851 }
5852 else if (processing_template_decl)
5853 {
5854 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == error_mark_node)
5855 return true;
5856 return false;
5857 }
5858
5859 tree id = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5860
5861 type = TYPE_MAIN_VARIANT (type);
5862 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL_TREE;
5863 if (id == NULL_TREE)
5864 id = omp_reduction_id (OMP_CLAUSE_REDUCTION_CODE (c),
5865 NULL_TREE, NULL_TREE);
5866 id = omp_reduction_lookup (OMP_CLAUSE_LOCATION (c), id, type, NULL, NULL);
5867 if (id)
5868 {
5869 if (id == error_mark_node)
5870 return true;
5871 mark_used (id);
5872 tree body = DECL_SAVED_TREE (id);
5873 if (!body)
5874 return true;
5875 if (TREE_CODE (body) == STATEMENT_LIST)
5876 {
5877 tree_stmt_iterator tsi;
5878 tree placeholder = NULL_TREE, decl_placeholder = NULL_TREE;
5879 int i;
5880 tree stmts[7];
5881 tree atype = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (id)));
5882 atype = TREE_TYPE (atype);
5883 bool need_static_cast = !same_type_p (type, atype);
5884 memset (stmts, 0, sizeof stmts);
5885 for (i = 0, tsi = tsi_start (body);
5886 i < 7 && !tsi_end_p (tsi);
5887 i++, tsi_next (&tsi))
5888 stmts[i] = tsi_stmt (tsi);
5889 gcc_assert (tsi_end_p (tsi));
5890
5891 if (i >= 3)
5892 {
5893 gcc_assert (TREE_CODE (stmts[0]) == DECL_EXPR
5894 && TREE_CODE (stmts[1]) == DECL_EXPR);
5895 placeholder = build_lang_decl (VAR_DECL, NULL_TREE, type);
5896 DECL_ARTIFICIAL (placeholder) = 1;
5897 DECL_IGNORED_P (placeholder) = 1;
5898 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = placeholder;
5899 if (TREE_CODE (t) == MEM_REF)
5900 {
5901 decl_placeholder = build_lang_decl (VAR_DECL, NULL_TREE,
5902 type);
5903 DECL_ARTIFICIAL (decl_placeholder) = 1;
5904 DECL_IGNORED_P (decl_placeholder) = 1;
5905 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = decl_placeholder;
5906 }
5907 if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[0])))
5908 cxx_mark_addressable (placeholder);
5909 if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[1]))
5910 && (decl_placeholder
5911 || !TYPE_REF_P (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
5912 cxx_mark_addressable (decl_placeholder ? decl_placeholder
5913 : OMP_CLAUSE_DECL (c));
5914 tree omp_out = placeholder;
5915 tree omp_in = decl_placeholder ? decl_placeholder
5916 : convert_from_reference (OMP_CLAUSE_DECL (c));
5917 if (need_static_cast)
5918 {
5919 tree rtype = build_reference_type (atype);
5920 omp_out = build_static_cast (rtype, omp_out,
5921 tf_warning_or_error);
5922 omp_in = build_static_cast (rtype, omp_in,
5923 tf_warning_or_error);
5924 if (omp_out == error_mark_node || omp_in == error_mark_node)
5925 return true;
5926 omp_out = convert_from_reference (omp_out);
5927 omp_in = convert_from_reference (omp_in);
5928 }
5929 OMP_CLAUSE_REDUCTION_MERGE (c)
5930 = clone_omp_udr (stmts[2], DECL_EXPR_DECL (stmts[0]),
5931 DECL_EXPR_DECL (stmts[1]), omp_in, omp_out);
5932 }
5933 if (i >= 6)
5934 {
5935 gcc_assert (TREE_CODE (stmts[3]) == DECL_EXPR
5936 && TREE_CODE (stmts[4]) == DECL_EXPR);
5937 if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[3]))
5938 && (decl_placeholder
5939 || !TYPE_REF_P (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
5940 cxx_mark_addressable (decl_placeholder ? decl_placeholder
5941 : OMP_CLAUSE_DECL (c));
5942 if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[4])))
5943 cxx_mark_addressable (placeholder);
5944 tree omp_priv = decl_placeholder ? decl_placeholder
5945 : convert_from_reference (OMP_CLAUSE_DECL (c));
5946 tree omp_orig = placeholder;
5947 if (need_static_cast)
5948 {
5949 if (i == 7)
5950 {
5951 error_at (OMP_CLAUSE_LOCATION (c),
5952 "user defined reduction with constructor "
5953 "initializer for base class %qT", atype);
5954 return true;
5955 }
5956 tree rtype = build_reference_type (atype);
5957 omp_priv = build_static_cast (rtype, omp_priv,
5958 tf_warning_or_error);
5959 omp_orig = build_static_cast (rtype, omp_orig,
5960 tf_warning_or_error);
5961 if (omp_priv == error_mark_node
5962 || omp_orig == error_mark_node)
5963 return true;
5964 omp_priv = convert_from_reference (omp_priv);
5965 omp_orig = convert_from_reference (omp_orig);
5966 }
5967 if (i == 6)
5968 *need_default_ctor = true;
5969 OMP_CLAUSE_REDUCTION_INIT (c)
5970 = clone_omp_udr (stmts[5], DECL_EXPR_DECL (stmts[4]),
5971 DECL_EXPR_DECL (stmts[3]),
5972 omp_priv, omp_orig);
5973 if (cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
5974 find_omp_placeholder_r, placeholder, NULL))
5975 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1;
5976 }
5977 else if (i >= 3)
5978 {
5979 if (CLASS_TYPE_P (type) && !pod_type_p (type))
5980 *need_default_ctor = true;
5981 else
5982 {
5983 tree init;
5984 tree v = decl_placeholder ? decl_placeholder
5985 : convert_from_reference (t);
5986 if (AGGREGATE_TYPE_P (TREE_TYPE (v)))
5987 init = build_constructor (TREE_TYPE (v), NULL);
5988 else
5989 init = fold_convert (TREE_TYPE (v), integer_zero_node);
5990 OMP_CLAUSE_REDUCTION_INIT (c)
5991 = build2 (INIT_EXPR, TREE_TYPE (v), v, init);
5992 }
5993 }
5994 }
5995 }
5996 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5997 *need_dtor = true;
5998 else
5999 {
6000 error_at (OMP_CLAUSE_LOCATION (c),
6001 "user defined reduction not found for %qE",
6002 omp_clause_printable_decl (t));
6003 return true;
6004 }
6005 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6006 gcc_assert (TYPE_SIZE_UNIT (type)
6007 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
6008 return false;
6009 }
6010
6011 /* Called from finish_struct_1. linear(this) or linear(this:step)
6012 clauses might not be finalized yet because the class has been incomplete
6013 when parsing #pragma omp declare simd methods. Fix those up now. */
6014
6015 void
6016 finish_omp_declare_simd_methods (tree t)
6017 {
6018 if (processing_template_decl)
6019 return;
6020
6021 for (tree x = TYPE_FIELDS (t); x; x = DECL_CHAIN (x))
6022 {
6023 if (TREE_CODE (x) == USING_DECL
6024 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (x))
6025 continue;
6026 tree ods = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (x));
6027 if (!ods || !TREE_VALUE (ods))
6028 continue;
6029 for (tree c = TREE_VALUE (TREE_VALUE (ods)); c; c = OMP_CLAUSE_CHAIN (c))
6030 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6031 && integer_zerop (OMP_CLAUSE_DECL (c))
6032 && OMP_CLAUSE_LINEAR_STEP (c)
6033 && TYPE_PTR_P (TREE_TYPE (OMP_CLAUSE_LINEAR_STEP (c))))
6034 {
6035 tree s = OMP_CLAUSE_LINEAR_STEP (c);
6036 s = fold_convert_loc (OMP_CLAUSE_LOCATION (c), sizetype, s);
6037 s = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MULT_EXPR,
6038 sizetype, s, TYPE_SIZE_UNIT (t));
6039 OMP_CLAUSE_LINEAR_STEP (c) = s;
6040 }
6041 }
6042 }
6043
6044 /* Adjust sink depend clause to take into account pointer offsets.
6045
6046 Return TRUE if there was a problem processing the offset, and the
6047 whole clause should be removed. */
6048
6049 static bool
6050 cp_finish_omp_clause_depend_sink (tree sink_clause)
6051 {
6052 tree t = OMP_CLAUSE_DECL (sink_clause);
6053 gcc_assert (TREE_CODE (t) == TREE_LIST);
6054
6055 /* Make sure we don't adjust things twice for templates. */
6056 if (processing_template_decl)
6057 return false;
6058
6059 for (; t; t = TREE_CHAIN (t))
6060 {
6061 tree decl = TREE_VALUE (t);
6062 if (TYPE_PTR_P (TREE_TYPE (decl)))
6063 {
6064 tree offset = TREE_PURPOSE (t);
6065 bool neg = wi::neg_p (wi::to_wide (offset));
6066 offset = fold_unary (ABS_EXPR, TREE_TYPE (offset), offset);
6067 decl = mark_rvalue_use (decl);
6068 decl = convert_from_reference (decl);
6069 tree t2 = pointer_int_sum (OMP_CLAUSE_LOCATION (sink_clause),
6070 neg ? MINUS_EXPR : PLUS_EXPR,
6071 decl, offset);
6072 t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (sink_clause),
6073 MINUS_EXPR, sizetype,
6074 fold_convert (sizetype, t2),
6075 fold_convert (sizetype, decl));
6076 if (t2 == error_mark_node)
6077 return true;
6078 TREE_PURPOSE (t) = t2;
6079 }
6080 }
6081 return false;
6082 }
6083
6084 /* Finish OpenMP iterators ITER. Return true if they are errorneous
6085 and clauses containing them should be removed. */
6086
6087 static bool
6088 cp_omp_finish_iterators (tree iter)
6089 {
6090 bool ret = false;
6091 for (tree it = iter; it; it = TREE_CHAIN (it))
6092 {
6093 tree var = TREE_VEC_ELT (it, 0);
6094 tree begin = TREE_VEC_ELT (it, 1);
6095 tree end = TREE_VEC_ELT (it, 2);
6096 tree step = TREE_VEC_ELT (it, 3);
6097 tree orig_step;
6098 tree type = TREE_TYPE (var);
6099 location_t loc = DECL_SOURCE_LOCATION (var);
6100 if (type == error_mark_node)
6101 {
6102 ret = true;
6103 continue;
6104 }
6105 if (type_dependent_expression_p (var))
6106 continue;
6107 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
6108 {
6109 error_at (loc, "iterator %qD has neither integral nor pointer type",
6110 var);
6111 ret = true;
6112 continue;
6113 }
6114 else if (TYPE_READONLY (type))
6115 {
6116 error_at (loc, "iterator %qD has const qualified type", var);
6117 ret = true;
6118 continue;
6119 }
6120 if (type_dependent_expression_p (begin)
6121 || type_dependent_expression_p (end)
6122 || type_dependent_expression_p (step))
6123 continue;
6124 else if (error_operand_p (step))
6125 {
6126 ret = true;
6127 continue;
6128 }
6129 else if (!INTEGRAL_TYPE_P (TREE_TYPE (step)))
6130 {
6131 error_at (EXPR_LOC_OR_LOC (step, loc),
6132 "iterator step with non-integral type");
6133 ret = true;
6134 continue;
6135 }
6136
6137 begin = mark_rvalue_use (begin);
6138 end = mark_rvalue_use (end);
6139 step = mark_rvalue_use (step);
6140 begin = cp_build_c_cast (type, begin, tf_warning_or_error);
6141 end = cp_build_c_cast (type, end, tf_warning_or_error);
6142 orig_step = step;
6143 if (!processing_template_decl)
6144 step = orig_step = save_expr (step);
6145 tree stype = POINTER_TYPE_P (type) ? sizetype : type;
6146 step = cp_build_c_cast (stype, step, tf_warning_or_error);
6147 if (POINTER_TYPE_P (type) && !processing_template_decl)
6148 {
6149 begin = save_expr (begin);
6150 step = pointer_int_sum (loc, PLUS_EXPR, begin, step);
6151 step = fold_build2_loc (loc, MINUS_EXPR, sizetype,
6152 fold_convert (sizetype, step),
6153 fold_convert (sizetype, begin));
6154 step = fold_convert (ssizetype, step);
6155 }
6156 if (!processing_template_decl)
6157 {
6158 begin = maybe_constant_value (begin);
6159 end = maybe_constant_value (end);
6160 step = maybe_constant_value (step);
6161 orig_step = maybe_constant_value (orig_step);
6162 }
6163 if (integer_zerop (step))
6164 {
6165 error_at (loc, "iterator %qD has zero step", var);
6166 ret = true;
6167 continue;
6168 }
6169
6170 if (begin == error_mark_node
6171 || end == error_mark_node
6172 || step == error_mark_node
6173 || orig_step == error_mark_node)
6174 {
6175 ret = true;
6176 continue;
6177 }
6178
6179 if (!processing_template_decl)
6180 {
6181 begin = fold_build_cleanup_point_expr (TREE_TYPE (begin), begin);
6182 end = fold_build_cleanup_point_expr (TREE_TYPE (end), end);
6183 step = fold_build_cleanup_point_expr (TREE_TYPE (step), step);
6184 orig_step = fold_build_cleanup_point_expr (TREE_TYPE (orig_step),
6185 orig_step);
6186 }
6187 hash_set<tree> pset;
6188 tree it2;
6189 for (it2 = TREE_CHAIN (it); it2; it2 = TREE_CHAIN (it2))
6190 {
6191 tree var2 = TREE_VEC_ELT (it2, 0);
6192 tree begin2 = TREE_VEC_ELT (it2, 1);
6193 tree end2 = TREE_VEC_ELT (it2, 2);
6194 tree step2 = TREE_VEC_ELT (it2, 3);
6195 location_t loc2 = DECL_SOURCE_LOCATION (var2);
6196 if (cp_walk_tree (&begin2, find_omp_placeholder_r, var, &pset))
6197 {
6198 error_at (EXPR_LOC_OR_LOC (begin2, loc2),
6199 "begin expression refers to outer iterator %qD", var);
6200 break;
6201 }
6202 else if (cp_walk_tree (&end2, find_omp_placeholder_r, var, &pset))
6203 {
6204 error_at (EXPR_LOC_OR_LOC (end2, loc2),
6205 "end expression refers to outer iterator %qD", var);
6206 break;
6207 }
6208 else if (cp_walk_tree (&step2, find_omp_placeholder_r, var, &pset))
6209 {
6210 error_at (EXPR_LOC_OR_LOC (step2, loc2),
6211 "step expression refers to outer iterator %qD", var);
6212 break;
6213 }
6214 }
6215 if (it2)
6216 {
6217 ret = true;
6218 continue;
6219 }
6220 TREE_VEC_ELT (it, 1) = begin;
6221 TREE_VEC_ELT (it, 2) = end;
6222 if (processing_template_decl)
6223 TREE_VEC_ELT (it, 3) = orig_step;
6224 else
6225 {
6226 TREE_VEC_ELT (it, 3) = step;
6227 TREE_VEC_ELT (it, 4) = orig_step;
6228 }
6229 }
6230 return ret;
6231 }
6232
6233 /* For all elements of CLAUSES, validate them vs OpenMP constraints.
6234 Remove any elements from the list that are invalid. */
6235
6236 tree
6237 finish_omp_clauses (tree clauses, enum c_omp_region_type ort)
6238 {
6239 bitmap_head generic_head, firstprivate_head, lastprivate_head;
6240 bitmap_head aligned_head, map_head, map_field_head, oacc_reduction_head;
6241 tree c, t, *pc;
6242 tree safelen = NULL_TREE;
6243 bool branch_seen = false;
6244 bool copyprivate_seen = false;
6245 bool ordered_seen = false;
6246 bool order_seen = false;
6247 bool schedule_seen = false;
6248 bool oacc_async = false;
6249 tree last_iterators = NULL_TREE;
6250 bool last_iterators_remove = false;
6251 /* 1 if normal/task reduction has been seen, -1 if inscan reduction
6252 has been seen, -2 if mixed inscan/normal reduction diagnosed. */
6253 int reduction_seen = 0;
6254
6255 bitmap_obstack_initialize (NULL);
6256 bitmap_initialize (&generic_head, &bitmap_default_obstack);
6257 bitmap_initialize (&firstprivate_head, &bitmap_default_obstack);
6258 bitmap_initialize (&lastprivate_head, &bitmap_default_obstack);
6259 bitmap_initialize (&aligned_head, &bitmap_default_obstack);
6260 /* If ort == C_ORT_OMP_DECLARE_SIMD used as uniform_head instead. */
6261 bitmap_initialize (&map_head, &bitmap_default_obstack);
6262 bitmap_initialize (&map_field_head, &bitmap_default_obstack);
6263 /* If ort == C_ORT_OMP used as nontemporal_head or use_device_xxx_head
6264 instead. */
6265 bitmap_initialize (&oacc_reduction_head, &bitmap_default_obstack);
6266
6267 if (ort & C_ORT_ACC)
6268 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6269 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ASYNC)
6270 {
6271 oacc_async = true;
6272 break;
6273 }
6274
6275 for (pc = &clauses, c = clauses; c ; c = *pc)
6276 {
6277 bool remove = false;
6278 bool field_ok = false;
6279
6280 switch (OMP_CLAUSE_CODE (c))
6281 {
6282 case OMP_CLAUSE_SHARED:
6283 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP);
6284 goto check_dup_generic;
6285 case OMP_CLAUSE_PRIVATE:
6286 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP);
6287 goto check_dup_generic;
6288 case OMP_CLAUSE_REDUCTION:
6289 if (reduction_seen == 0)
6290 reduction_seen = OMP_CLAUSE_REDUCTION_INSCAN (c) ? -1 : 1;
6291 else if (reduction_seen != -2
6292 && reduction_seen != (OMP_CLAUSE_REDUCTION_INSCAN (c)
6293 ? -1 : 1))
6294 {
6295 error_at (OMP_CLAUSE_LOCATION (c),
6296 "%<inscan%> and non-%<inscan%> %<reduction%> clauses "
6297 "on the same construct");
6298 reduction_seen = -2;
6299 }
6300 /* FALLTHRU */
6301 case OMP_CLAUSE_IN_REDUCTION:
6302 case OMP_CLAUSE_TASK_REDUCTION:
6303 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP);
6304 t = OMP_CLAUSE_DECL (c);
6305 if (TREE_CODE (t) == TREE_LIST)
6306 {
6307 if (handle_omp_array_sections (c, ort))
6308 {
6309 remove = true;
6310 break;
6311 }
6312 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6313 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6314 {
6315 error_at (OMP_CLAUSE_LOCATION (c),
6316 "%<inscan%> %<reduction%> clause with array "
6317 "section");
6318 remove = true;
6319 break;
6320 }
6321 if (TREE_CODE (t) == TREE_LIST)
6322 {
6323 while (TREE_CODE (t) == TREE_LIST)
6324 t = TREE_CHAIN (t);
6325 }
6326 else
6327 {
6328 gcc_assert (TREE_CODE (t) == MEM_REF);
6329 t = TREE_OPERAND (t, 0);
6330 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
6331 t = TREE_OPERAND (t, 0);
6332 if (TREE_CODE (t) == ADDR_EXPR
6333 || INDIRECT_REF_P (t))
6334 t = TREE_OPERAND (t, 0);
6335 }
6336 tree n = omp_clause_decl_field (t);
6337 if (n)
6338 t = n;
6339 goto check_dup_generic_t;
6340 }
6341 if (oacc_async)
6342 cxx_mark_addressable (t);
6343 goto check_dup_generic;
6344 case OMP_CLAUSE_COPYPRIVATE:
6345 copyprivate_seen = true;
6346 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP);
6347 goto check_dup_generic;
6348 case OMP_CLAUSE_COPYIN:
6349 goto check_dup_generic;
6350 case OMP_CLAUSE_LINEAR:
6351 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP);
6352 t = OMP_CLAUSE_DECL (c);
6353 if (ort != C_ORT_OMP_DECLARE_SIMD
6354 && OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_DEFAULT)
6355 {
6356 error_at (OMP_CLAUSE_LOCATION (c),
6357 "modifier should not be specified in %<linear%> "
6358 "clause on %<simd%> or %<for%> constructs");
6359 OMP_CLAUSE_LINEAR_KIND (c) = OMP_CLAUSE_LINEAR_DEFAULT;
6360 }
6361 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
6362 && !type_dependent_expression_p (t))
6363 {
6364 tree type = TREE_TYPE (t);
6365 if ((OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF
6366 || OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_UVAL)
6367 && !TYPE_REF_P (type))
6368 {
6369 error_at (OMP_CLAUSE_LOCATION (c),
6370 "linear clause with %qs modifier applied to "
6371 "non-reference variable with %qT type",
6372 OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF
6373 ? "ref" : "uval", TREE_TYPE (t));
6374 remove = true;
6375 break;
6376 }
6377 if (TYPE_REF_P (type))
6378 type = TREE_TYPE (type);
6379 if (OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_REF)
6380 {
6381 if (!INTEGRAL_TYPE_P (type)
6382 && !TYPE_PTR_P (type))
6383 {
6384 error_at (OMP_CLAUSE_LOCATION (c),
6385 "linear clause applied to non-integral "
6386 "non-pointer variable with %qT type",
6387 TREE_TYPE (t));
6388 remove = true;
6389 break;
6390 }
6391 }
6392 }
6393 t = OMP_CLAUSE_LINEAR_STEP (c);
6394 if (t == NULL_TREE)
6395 t = integer_one_node;
6396 if (t == error_mark_node)
6397 {
6398 remove = true;
6399 break;
6400 }
6401 else if (!type_dependent_expression_p (t)
6402 && !INTEGRAL_TYPE_P (TREE_TYPE (t))
6403 && (ort != C_ORT_OMP_DECLARE_SIMD
6404 || TREE_CODE (t) != PARM_DECL
6405 || !TYPE_REF_P (TREE_TYPE (t))
6406 || !INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (t)))))
6407 {
6408 error_at (OMP_CLAUSE_LOCATION (c),
6409 "linear step expression must be integral");
6410 remove = true;
6411 break;
6412 }
6413 else
6414 {
6415 t = mark_rvalue_use (t);
6416 if (ort == C_ORT_OMP_DECLARE_SIMD && TREE_CODE (t) == PARM_DECL)
6417 {
6418 OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c) = 1;
6419 goto check_dup_generic;
6420 }
6421 if (!processing_template_decl
6422 && (VAR_P (OMP_CLAUSE_DECL (c))
6423 || TREE_CODE (OMP_CLAUSE_DECL (c)) == PARM_DECL))
6424 {
6425 if (ort == C_ORT_OMP_DECLARE_SIMD)
6426 {
6427 t = maybe_constant_value (t);
6428 if (TREE_CODE (t) != INTEGER_CST)
6429 {
6430 error_at (OMP_CLAUSE_LOCATION (c),
6431 "%<linear%> clause step %qE is neither "
6432 "constant nor a parameter", t);
6433 remove = true;
6434 break;
6435 }
6436 }
6437 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
6438 tree type = TREE_TYPE (OMP_CLAUSE_DECL (c));
6439 if (TYPE_REF_P (type))
6440 type = TREE_TYPE (type);
6441 if (OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF)
6442 {
6443 type = build_pointer_type (type);
6444 tree d = fold_convert (type, OMP_CLAUSE_DECL (c));
6445 t = pointer_int_sum (OMP_CLAUSE_LOCATION (c), PLUS_EXPR,
6446 d, t);
6447 t = fold_build2_loc (OMP_CLAUSE_LOCATION (c),
6448 MINUS_EXPR, sizetype,
6449 fold_convert (sizetype, t),
6450 fold_convert (sizetype, d));
6451 if (t == error_mark_node)
6452 {
6453 remove = true;
6454 break;
6455 }
6456 }
6457 else if (TYPE_PTR_P (type)
6458 /* Can't multiply the step yet if *this
6459 is still incomplete type. */
6460 && (ort != C_ORT_OMP_DECLARE_SIMD
6461 || TREE_CODE (OMP_CLAUSE_DECL (c)) != PARM_DECL
6462 || !DECL_ARTIFICIAL (OMP_CLAUSE_DECL (c))
6463 || DECL_NAME (OMP_CLAUSE_DECL (c))
6464 != this_identifier
6465 || !TYPE_BEING_DEFINED (TREE_TYPE (type))))
6466 {
6467 tree d = convert_from_reference (OMP_CLAUSE_DECL (c));
6468 t = pointer_int_sum (OMP_CLAUSE_LOCATION (c), PLUS_EXPR,
6469 d, t);
6470 t = fold_build2_loc (OMP_CLAUSE_LOCATION (c),
6471 MINUS_EXPR, sizetype,
6472 fold_convert (sizetype, t),
6473 fold_convert (sizetype, d));
6474 if (t == error_mark_node)
6475 {
6476 remove = true;
6477 break;
6478 }
6479 }
6480 else
6481 t = fold_convert (type, t);
6482 }
6483 OMP_CLAUSE_LINEAR_STEP (c) = t;
6484 }
6485 goto check_dup_generic;
6486 check_dup_generic:
6487 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c));
6488 if (t)
6489 {
6490 if (!remove && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED)
6491 omp_note_field_privatization (t, OMP_CLAUSE_DECL (c));
6492 }
6493 else
6494 t = OMP_CLAUSE_DECL (c);
6495 check_dup_generic_t:
6496 if (t == current_class_ptr
6497 && (ort != C_ORT_OMP_DECLARE_SIMD
6498 || (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6499 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_UNIFORM)))
6500 {
6501 error_at (OMP_CLAUSE_LOCATION (c),
6502 "%<this%> allowed in OpenMP only in %<declare simd%>"
6503 " clauses");
6504 remove = true;
6505 break;
6506 }
6507 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL
6508 && (!field_ok || TREE_CODE (t) != FIELD_DECL))
6509 {
6510 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
6511 break;
6512 if (DECL_P (t))
6513 error_at (OMP_CLAUSE_LOCATION (c),
6514 "%qD is not a variable in clause %qs", t,
6515 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
6516 else
6517 error_at (OMP_CLAUSE_LOCATION (c),
6518 "%qE is not a variable in clause %qs", t,
6519 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
6520 remove = true;
6521 }
6522 else if ((ort == C_ORT_ACC
6523 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6524 || (ort == C_ORT_OMP
6525 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
6526 || (OMP_CLAUSE_CODE (c)
6527 == OMP_CLAUSE_USE_DEVICE_ADDR))))
6528 {
6529 if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t)))
6530 {
6531 error_at (OMP_CLAUSE_LOCATION (c),
6532 ort == C_ORT_ACC
6533 ? "%qD appears more than once in reduction clauses"
6534 : "%qD appears more than once in data clauses",
6535 t);
6536 remove = true;
6537 }
6538 else
6539 bitmap_set_bit (&oacc_reduction_head, DECL_UID (t));
6540 }
6541 else if (bitmap_bit_p (&generic_head, DECL_UID (t))
6542 || bitmap_bit_p (&firstprivate_head, DECL_UID (t))
6543 || bitmap_bit_p (&lastprivate_head, DECL_UID (t)))
6544 {
6545 error_at (OMP_CLAUSE_LOCATION (c),
6546 "%qD appears more than once in data clauses", t);
6547 remove = true;
6548 }
6549 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
6550 && bitmap_bit_p (&map_head, DECL_UID (t)))
6551 {
6552 if (ort == C_ORT_ACC)
6553 error_at (OMP_CLAUSE_LOCATION (c),
6554 "%qD appears more than once in data clauses", t);
6555 else
6556 error_at (OMP_CLAUSE_LOCATION (c),
6557 "%qD appears both in data and map clauses", t);
6558 remove = true;
6559 }
6560 else
6561 bitmap_set_bit (&generic_head, DECL_UID (t));
6562 if (!field_ok)
6563 break;
6564 handle_field_decl:
6565 if (!remove
6566 && TREE_CODE (t) == FIELD_DECL
6567 && t == OMP_CLAUSE_DECL (c)
6568 && ort != C_ORT_ACC)
6569 {
6570 OMP_CLAUSE_DECL (c)
6571 = omp_privatize_field (t, (OMP_CLAUSE_CODE (c)
6572 == OMP_CLAUSE_SHARED));
6573 if (OMP_CLAUSE_DECL (c) == error_mark_node)
6574 remove = true;
6575 }
6576 break;
6577
6578 case OMP_CLAUSE_FIRSTPRIVATE:
6579 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c));
6580 if (t)
6581 omp_note_field_privatization (t, OMP_CLAUSE_DECL (c));
6582 else
6583 t = OMP_CLAUSE_DECL (c);
6584 if (ort != C_ORT_ACC && t == current_class_ptr)
6585 {
6586 error_at (OMP_CLAUSE_LOCATION (c),
6587 "%<this%> allowed in OpenMP only in %<declare simd%>"
6588 " clauses");
6589 remove = true;
6590 break;
6591 }
6592 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL
6593 && ((ort & C_ORT_OMP_DECLARE_SIMD) != C_ORT_OMP
6594 || TREE_CODE (t) != FIELD_DECL))
6595 {
6596 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
6597 break;
6598 if (DECL_P (t))
6599 error_at (OMP_CLAUSE_LOCATION (c),
6600 "%qD is not a variable in clause %<firstprivate%>",
6601 t);
6602 else
6603 error_at (OMP_CLAUSE_LOCATION (c),
6604 "%qE is not a variable in clause %<firstprivate%>",
6605 t);
6606 remove = true;
6607 }
6608 else if (bitmap_bit_p (&generic_head, DECL_UID (t))
6609 || bitmap_bit_p (&firstprivate_head, DECL_UID (t)))
6610 {
6611 error_at (OMP_CLAUSE_LOCATION (c),
6612 "%qD appears more than once in data clauses", t);
6613 remove = true;
6614 }
6615 else if (bitmap_bit_p (&map_head, DECL_UID (t)))
6616 {
6617 if (ort == C_ORT_ACC)
6618 error_at (OMP_CLAUSE_LOCATION (c),
6619 "%qD appears more than once in data clauses", t);
6620 else
6621 error_at (OMP_CLAUSE_LOCATION (c),
6622 "%qD appears both in data and map clauses", t);
6623 remove = true;
6624 }
6625 else
6626 bitmap_set_bit (&firstprivate_head, DECL_UID (t));
6627 goto handle_field_decl;
6628
6629 case OMP_CLAUSE_LASTPRIVATE:
6630 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c));
6631 if (t)
6632 omp_note_field_privatization (t, OMP_CLAUSE_DECL (c));
6633 else
6634 t = OMP_CLAUSE_DECL (c);
6635 if (t == current_class_ptr)
6636 {
6637 error_at (OMP_CLAUSE_LOCATION (c),
6638 "%<this%> allowed in OpenMP only in %<declare simd%>"
6639 " clauses");
6640 remove = true;
6641 break;
6642 }
6643 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL
6644 && ((ort & C_ORT_OMP_DECLARE_SIMD) != C_ORT_OMP
6645 || TREE_CODE (t) != FIELD_DECL))
6646 {
6647 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
6648 break;
6649 if (DECL_P (t))
6650 error_at (OMP_CLAUSE_LOCATION (c),
6651 "%qD is not a variable in clause %<lastprivate%>",
6652 t);
6653 else
6654 error_at (OMP_CLAUSE_LOCATION (c),
6655 "%qE is not a variable in clause %<lastprivate%>",
6656 t);
6657 remove = true;
6658 }
6659 else if (bitmap_bit_p (&generic_head, DECL_UID (t))
6660 || bitmap_bit_p (&lastprivate_head, DECL_UID (t)))
6661 {
6662 error_at (OMP_CLAUSE_LOCATION (c),
6663 "%qD appears more than once in data clauses", t);
6664 remove = true;
6665 }
6666 else
6667 bitmap_set_bit (&lastprivate_head, DECL_UID (t));
6668 goto handle_field_decl;
6669
6670 case OMP_CLAUSE_IF:
6671 t = OMP_CLAUSE_IF_EXPR (c);
6672 t = maybe_convert_cond (t);
6673 if (t == error_mark_node)
6674 remove = true;
6675 else if (!processing_template_decl)
6676 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
6677 OMP_CLAUSE_IF_EXPR (c) = t;
6678 break;
6679
6680 case OMP_CLAUSE_FINAL:
6681 t = OMP_CLAUSE_FINAL_EXPR (c);
6682 t = maybe_convert_cond (t);
6683 if (t == error_mark_node)
6684 remove = true;
6685 else if (!processing_template_decl)
6686 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
6687 OMP_CLAUSE_FINAL_EXPR (c) = t;
6688 break;
6689
6690 case OMP_CLAUSE_GANG:
6691 /* Operand 1 is the gang static: argument. */
6692 t = OMP_CLAUSE_OPERAND (c, 1);
6693 if (t != NULL_TREE)
6694 {
6695 if (t == error_mark_node)
6696 remove = true;
6697 else if (!type_dependent_expression_p (t)
6698 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
6699 {
6700 error_at (OMP_CLAUSE_LOCATION (c),
6701 "%<gang%> static expression must be integral");
6702 remove = true;
6703 }
6704 else
6705 {
6706 t = mark_rvalue_use (t);
6707 if (!processing_template_decl)
6708 {
6709 t = maybe_constant_value (t);
6710 if (TREE_CODE (t) == INTEGER_CST
6711 && tree_int_cst_sgn (t) != 1
6712 && t != integer_minus_one_node)
6713 {
6714 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6715 "%<gang%> static value must be "
6716 "positive");
6717 t = integer_one_node;
6718 }
6719 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
6720 }
6721 }
6722 OMP_CLAUSE_OPERAND (c, 1) = t;
6723 }
6724 /* Check operand 0, the num argument. */
6725 /* FALLTHRU */
6726
6727 case OMP_CLAUSE_WORKER:
6728 case OMP_CLAUSE_VECTOR:
6729 if (OMP_CLAUSE_OPERAND (c, 0) == NULL_TREE)
6730 break;
6731 /* FALLTHRU */
6732
6733 case OMP_CLAUSE_NUM_TASKS:
6734 case OMP_CLAUSE_NUM_TEAMS:
6735 case OMP_CLAUSE_NUM_THREADS:
6736 case OMP_CLAUSE_NUM_GANGS:
6737 case OMP_CLAUSE_NUM_WORKERS:
6738 case OMP_CLAUSE_VECTOR_LENGTH:
6739 t = OMP_CLAUSE_OPERAND (c, 0);
6740 if (t == error_mark_node)
6741 remove = true;
6742 else if (!type_dependent_expression_p (t)
6743 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
6744 {
6745 switch (OMP_CLAUSE_CODE (c))
6746 {
6747 case OMP_CLAUSE_GANG:
6748 error_at (OMP_CLAUSE_LOCATION (c),
6749 "%<gang%> num expression must be integral"); break;
6750 case OMP_CLAUSE_VECTOR:
6751 error_at (OMP_CLAUSE_LOCATION (c),
6752 "%<vector%> length expression must be integral");
6753 break;
6754 case OMP_CLAUSE_WORKER:
6755 error_at (OMP_CLAUSE_LOCATION (c),
6756 "%<worker%> num expression must be integral");
6757 break;
6758 default:
6759 error_at (OMP_CLAUSE_LOCATION (c),
6760 "%qs expression must be integral",
6761 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
6762 }
6763 remove = true;
6764 }
6765 else
6766 {
6767 t = mark_rvalue_use (t);
6768 if (!processing_template_decl)
6769 {
6770 t = maybe_constant_value (t);
6771 if (TREE_CODE (t) == INTEGER_CST
6772 && tree_int_cst_sgn (t) != 1)
6773 {
6774 switch (OMP_CLAUSE_CODE (c))
6775 {
6776 case OMP_CLAUSE_GANG:
6777 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6778 "%<gang%> num value must be positive");
6779 break;
6780 case OMP_CLAUSE_VECTOR:
6781 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6782 "%<vector%> length value must be "
6783 "positive");
6784 break;
6785 case OMP_CLAUSE_WORKER:
6786 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6787 "%<worker%> num value must be "
6788 "positive");
6789 break;
6790 default:
6791 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6792 "%qs value must be positive",
6793 omp_clause_code_name
6794 [OMP_CLAUSE_CODE (c)]);
6795 }
6796 t = integer_one_node;
6797 }
6798 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
6799 }
6800 OMP_CLAUSE_OPERAND (c, 0) = t;
6801 }
6802 break;
6803
6804 case OMP_CLAUSE_SCHEDULE:
6805 t = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c);
6806 if (t == NULL)
6807 ;
6808 else if (t == error_mark_node)
6809 remove = true;
6810 else if (!type_dependent_expression_p (t)
6811 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
6812 {
6813 error_at (OMP_CLAUSE_LOCATION (c),
6814 "schedule chunk size expression must be integral");
6815 remove = true;
6816 }
6817 else
6818 {
6819 t = mark_rvalue_use (t);
6820 if (!processing_template_decl)
6821 {
6822 t = maybe_constant_value (t);
6823 if (TREE_CODE (t) == INTEGER_CST
6824 && tree_int_cst_sgn (t) != 1)
6825 {
6826 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6827 "chunk size value must be positive");
6828 t = integer_one_node;
6829 }
6830 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
6831 }
6832 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = t;
6833 }
6834 if (!remove)
6835 schedule_seen = true;
6836 break;
6837
6838 case OMP_CLAUSE_SIMDLEN:
6839 case OMP_CLAUSE_SAFELEN:
6840 t = OMP_CLAUSE_OPERAND (c, 0);
6841 if (t == error_mark_node)
6842 remove = true;
6843 else if (!type_dependent_expression_p (t)
6844 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
6845 {
6846 error_at (OMP_CLAUSE_LOCATION (c),
6847 "%qs length expression must be integral",
6848 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
6849 remove = true;
6850 }
6851 else
6852 {
6853 t = mark_rvalue_use (t);
6854 if (!processing_template_decl)
6855 {
6856 t = maybe_constant_value (t);
6857 if (TREE_CODE (t) != INTEGER_CST
6858 || tree_int_cst_sgn (t) != 1)
6859 {
6860 error_at (OMP_CLAUSE_LOCATION (c),
6861 "%qs length expression must be positive "
6862 "constant integer expression",
6863 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
6864 remove = true;
6865 }
6866 }
6867 OMP_CLAUSE_OPERAND (c, 0) = t;
6868 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SAFELEN)
6869 safelen = c;
6870 }
6871 break;
6872
6873 case OMP_CLAUSE_ASYNC:
6874 t = OMP_CLAUSE_ASYNC_EXPR (c);
6875 if (t == error_mark_node)
6876 remove = true;
6877 else if (!type_dependent_expression_p (t)
6878 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
6879 {
6880 error_at (OMP_CLAUSE_LOCATION (c),
6881 "%<async%> expression must be integral");
6882 remove = true;
6883 }
6884 else
6885 {
6886 t = mark_rvalue_use (t);
6887 if (!processing_template_decl)
6888 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
6889 OMP_CLAUSE_ASYNC_EXPR (c) = t;
6890 }
6891 break;
6892
6893 case OMP_CLAUSE_WAIT:
6894 t = OMP_CLAUSE_WAIT_EXPR (c);
6895 if (t == error_mark_node)
6896 remove = true;
6897 else if (!processing_template_decl)
6898 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
6899 OMP_CLAUSE_WAIT_EXPR (c) = t;
6900 break;
6901
6902 case OMP_CLAUSE_THREAD_LIMIT:
6903 t = OMP_CLAUSE_THREAD_LIMIT_EXPR (c);
6904 if (t == error_mark_node)
6905 remove = true;
6906 else if (!type_dependent_expression_p (t)
6907 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
6908 {
6909 error_at (OMP_CLAUSE_LOCATION (c),
6910 "%<thread_limit%> expression must be integral");
6911 remove = true;
6912 }
6913 else
6914 {
6915 t = mark_rvalue_use (t);
6916 if (!processing_template_decl)
6917 {
6918 t = maybe_constant_value (t);
6919 if (TREE_CODE (t) == INTEGER_CST
6920 && tree_int_cst_sgn (t) != 1)
6921 {
6922 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6923 "%<thread_limit%> value must be positive");
6924 t = integer_one_node;
6925 }
6926 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
6927 }
6928 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = t;
6929 }
6930 break;
6931
6932 case OMP_CLAUSE_DEVICE:
6933 t = OMP_CLAUSE_DEVICE_ID (c);
6934 if (t == error_mark_node)
6935 remove = true;
6936 else if (!type_dependent_expression_p (t)
6937 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
6938 {
6939 error_at (OMP_CLAUSE_LOCATION (c),
6940 "%<device%> id must be integral");
6941 remove = true;
6942 }
6943 else
6944 {
6945 t = mark_rvalue_use (t);
6946 if (!processing_template_decl)
6947 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
6948 OMP_CLAUSE_DEVICE_ID (c) = t;
6949 }
6950 break;
6951
6952 case OMP_CLAUSE_DIST_SCHEDULE:
6953 t = OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c);
6954 if (t == NULL)
6955 ;
6956 else if (t == error_mark_node)
6957 remove = true;
6958 else if (!type_dependent_expression_p (t)
6959 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
6960 {
6961 error_at (OMP_CLAUSE_LOCATION (c),
6962 "%<dist_schedule%> chunk size expression must be "
6963 "integral");
6964 remove = true;
6965 }
6966 else
6967 {
6968 t = mark_rvalue_use (t);
6969 if (!processing_template_decl)
6970 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
6971 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c) = t;
6972 }
6973 break;
6974
6975 case OMP_CLAUSE_ALIGNED:
6976 t = OMP_CLAUSE_DECL (c);
6977 if (t == current_class_ptr && ort != C_ORT_OMP_DECLARE_SIMD)
6978 {
6979 error_at (OMP_CLAUSE_LOCATION (c),
6980 "%<this%> allowed in OpenMP only in %<declare simd%>"
6981 " clauses");
6982 remove = true;
6983 break;
6984 }
6985 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL)
6986 {
6987 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
6988 break;
6989 if (DECL_P (t))
6990 error_at (OMP_CLAUSE_LOCATION (c),
6991 "%qD is not a variable in %<aligned%> clause", t);
6992 else
6993 error_at (OMP_CLAUSE_LOCATION (c),
6994 "%qE is not a variable in %<aligned%> clause", t);
6995 remove = true;
6996 }
6997 else if (!type_dependent_expression_p (t)
6998 && !TYPE_PTR_P (TREE_TYPE (t))
6999 && TREE_CODE (TREE_TYPE (t)) != ARRAY_TYPE
7000 && (!TYPE_REF_P (TREE_TYPE (t))
7001 || (!INDIRECT_TYPE_P (TREE_TYPE (TREE_TYPE (t)))
7002 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
7003 != ARRAY_TYPE))))
7004 {
7005 error_at (OMP_CLAUSE_LOCATION (c),
7006 "%qE in %<aligned%> clause is neither a pointer nor "
7007 "an array nor a reference to pointer or array", t);
7008 remove = true;
7009 }
7010 else if (bitmap_bit_p (&aligned_head, DECL_UID (t)))
7011 {
7012 error_at (OMP_CLAUSE_LOCATION (c),
7013 "%qD appears more than once in %<aligned%> clauses",
7014 t);
7015 remove = true;
7016 }
7017 else
7018 bitmap_set_bit (&aligned_head, DECL_UID (t));
7019 t = OMP_CLAUSE_ALIGNED_ALIGNMENT (c);
7020 if (t == error_mark_node)
7021 remove = true;
7022 else if (t == NULL_TREE)
7023 break;
7024 else if (!type_dependent_expression_p (t)
7025 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7026 {
7027 error_at (OMP_CLAUSE_LOCATION (c),
7028 "%<aligned%> clause alignment expression must "
7029 "be integral");
7030 remove = true;
7031 }
7032 else
7033 {
7034 t = mark_rvalue_use (t);
7035 if (!processing_template_decl)
7036 {
7037 t = maybe_constant_value (t);
7038 if (TREE_CODE (t) != INTEGER_CST
7039 || tree_int_cst_sgn (t) != 1)
7040 {
7041 error_at (OMP_CLAUSE_LOCATION (c),
7042 "%<aligned%> clause alignment expression must "
7043 "be positive constant integer expression");
7044 remove = true;
7045 }
7046 else
7047 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7048 }
7049 OMP_CLAUSE_ALIGNED_ALIGNMENT (c) = t;
7050 }
7051 break;
7052
7053 case OMP_CLAUSE_NONTEMPORAL:
7054 t = OMP_CLAUSE_DECL (c);
7055 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL)
7056 {
7057 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
7058 break;
7059 if (DECL_P (t))
7060 error_at (OMP_CLAUSE_LOCATION (c),
7061 "%qD is not a variable in %<nontemporal%> clause",
7062 t);
7063 else
7064 error_at (OMP_CLAUSE_LOCATION (c),
7065 "%qE is not a variable in %<nontemporal%> clause",
7066 t);
7067 remove = true;
7068 }
7069 else if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t)))
7070 {
7071 error_at (OMP_CLAUSE_LOCATION (c),
7072 "%qD appears more than once in %<nontemporal%> "
7073 "clauses", t);
7074 remove = true;
7075 }
7076 else
7077 bitmap_set_bit (&oacc_reduction_head, DECL_UID (t));
7078 break;
7079
7080 case OMP_CLAUSE_DEPEND:
7081 t = OMP_CLAUSE_DECL (c);
7082 if (t == NULL_TREE)
7083 {
7084 gcc_assert (OMP_CLAUSE_DEPEND_KIND (c)
7085 == OMP_CLAUSE_DEPEND_SOURCE);
7086 break;
7087 }
7088 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
7089 {
7090 if (cp_finish_omp_clause_depend_sink (c))
7091 remove = true;
7092 break;
7093 }
7094 if (TREE_CODE (t) == TREE_LIST
7095 && TREE_PURPOSE (t)
7096 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7097 {
7098 if (TREE_PURPOSE (t) != last_iterators)
7099 last_iterators_remove
7100 = cp_omp_finish_iterators (TREE_PURPOSE (t));
7101 last_iterators = TREE_PURPOSE (t);
7102 t = TREE_VALUE (t);
7103 if (last_iterators_remove)
7104 t = error_mark_node;
7105 }
7106 else
7107 last_iterators = NULL_TREE;
7108
7109 if (TREE_CODE (t) == TREE_LIST)
7110 {
7111 if (handle_omp_array_sections (c, ort))
7112 remove = true;
7113 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_DEPOBJ)
7114 {
7115 error_at (OMP_CLAUSE_LOCATION (c),
7116 "%<depend%> clause with %<depobj%> dependence "
7117 "type on array section");
7118 remove = true;
7119 }
7120 break;
7121 }
7122 if (t == error_mark_node)
7123 remove = true;
7124 else if (t == current_class_ptr)
7125 {
7126 error_at (OMP_CLAUSE_LOCATION (c),
7127 "%<this%> allowed in OpenMP only in %<declare simd%>"
7128 " clauses");
7129 remove = true;
7130 }
7131 else if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
7132 break;
7133 else if (!lvalue_p (t))
7134 {
7135 if (DECL_P (t))
7136 error_at (OMP_CLAUSE_LOCATION (c),
7137 "%qD is not lvalue expression nor array section "
7138 "in %<depend%> clause", t);
7139 else
7140 error_at (OMP_CLAUSE_LOCATION (c),
7141 "%qE is not lvalue expression nor array section "
7142 "in %<depend%> clause", t);
7143 remove = true;
7144 }
7145 else if (TREE_CODE (t) == COMPONENT_REF
7146 && TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL
7147 && DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
7148 {
7149 error_at (OMP_CLAUSE_LOCATION (c),
7150 "bit-field %qE in %qs clause", t, "depend");
7151 remove = true;
7152 }
7153 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_DEPOBJ)
7154 {
7155 if (!c_omp_depend_t_p (TYPE_REF_P (TREE_TYPE (t))
7156 ? TREE_TYPE (TREE_TYPE (t))
7157 : TREE_TYPE (t)))
7158 {
7159 error_at (OMP_CLAUSE_LOCATION (c),
7160 "%qE does not have %<omp_depend_t%> type in "
7161 "%<depend%> clause with %<depobj%> dependence "
7162 "type", t);
7163 remove = true;
7164 }
7165 }
7166 else if (c_omp_depend_t_p (TYPE_REF_P (TREE_TYPE (t))
7167 ? TREE_TYPE (TREE_TYPE (t))
7168 : TREE_TYPE (t)))
7169 {
7170 error_at (OMP_CLAUSE_LOCATION (c),
7171 "%qE should not have %<omp_depend_t%> type in "
7172 "%<depend%> clause with dependence type other than "
7173 "%<depobj%>", t);
7174 remove = true;
7175 }
7176 if (!remove)
7177 {
7178 tree addr = cp_build_addr_expr (t, tf_warning_or_error);
7179 if (addr == error_mark_node)
7180 remove = true;
7181 else
7182 {
7183 t = cp_build_indirect_ref (OMP_CLAUSE_LOCATION (c),
7184 addr, RO_UNARY_STAR,
7185 tf_warning_or_error);
7186 if (t == error_mark_node)
7187 remove = true;
7188 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == TREE_LIST
7189 && TREE_PURPOSE (OMP_CLAUSE_DECL (c))
7190 && (TREE_CODE (TREE_PURPOSE (OMP_CLAUSE_DECL (c)))
7191 == TREE_VEC))
7192 TREE_VALUE (OMP_CLAUSE_DECL (c)) = t;
7193 else
7194 OMP_CLAUSE_DECL (c) = t;
7195 }
7196 }
7197 break;
7198
7199 case OMP_CLAUSE_MAP:
7200 case OMP_CLAUSE_TO:
7201 case OMP_CLAUSE_FROM:
7202 case OMP_CLAUSE__CACHE_:
7203 t = OMP_CLAUSE_DECL (c);
7204 if (TREE_CODE (t) == TREE_LIST)
7205 {
7206 if (handle_omp_array_sections (c, ort))
7207 remove = true;
7208 else
7209 {
7210 t = OMP_CLAUSE_DECL (c);
7211 if (TREE_CODE (t) != TREE_LIST
7212 && !type_dependent_expression_p (t)
7213 && !cp_omp_mappable_type (TREE_TYPE (t)))
7214 {
7215 error_at (OMP_CLAUSE_LOCATION (c),
7216 "array section does not have mappable type "
7217 "in %qs clause",
7218 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7219 cp_omp_emit_unmappable_type_notes (TREE_TYPE (t));
7220 remove = true;
7221 }
7222 while (TREE_CODE (t) == ARRAY_REF)
7223 t = TREE_OPERAND (t, 0);
7224 if (TREE_CODE (t) == COMPONENT_REF
7225 && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE)
7226 {
7227 while (TREE_CODE (t) == COMPONENT_REF)
7228 t = TREE_OPERAND (t, 0);
7229 if (REFERENCE_REF_P (t))
7230 t = TREE_OPERAND (t, 0);
7231 if (bitmap_bit_p (&map_field_head, DECL_UID (t)))
7232 break;
7233 if (bitmap_bit_p (&map_head, DECL_UID (t)))
7234 {
7235 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
7236 error_at (OMP_CLAUSE_LOCATION (c),
7237 "%qD appears more than once in motion"
7238 " clauses", t);
7239 else if (ort == C_ORT_ACC)
7240 error_at (OMP_CLAUSE_LOCATION (c),
7241 "%qD appears more than once in data"
7242 " clauses", t);
7243 else
7244 error_at (OMP_CLAUSE_LOCATION (c),
7245 "%qD appears more than once in map"
7246 " clauses", t);
7247 remove = true;
7248 }
7249 else
7250 {
7251 bitmap_set_bit (&map_head, DECL_UID (t));
7252 bitmap_set_bit (&map_field_head, DECL_UID (t));
7253 }
7254 }
7255 }
7256 break;
7257 }
7258 if (t == error_mark_node)
7259 {
7260 remove = true;
7261 break;
7262 }
7263 if (REFERENCE_REF_P (t)
7264 && TREE_CODE (TREE_OPERAND (t, 0)) == COMPONENT_REF)
7265 {
7266 t = TREE_OPERAND (t, 0);
7267 OMP_CLAUSE_DECL (c) = t;
7268 }
7269 if (TREE_CODE (t) == COMPONENT_REF
7270 && (ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP
7271 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE__CACHE_)
7272 {
7273 if (type_dependent_expression_p (t))
7274 break;
7275 if (TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL
7276 && DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
7277 {
7278 error_at (OMP_CLAUSE_LOCATION (c),
7279 "bit-field %qE in %qs clause",
7280 t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7281 remove = true;
7282 }
7283 else if (!cp_omp_mappable_type (TREE_TYPE (t)))
7284 {
7285 error_at (OMP_CLAUSE_LOCATION (c),
7286 "%qE does not have a mappable type in %qs clause",
7287 t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7288 cp_omp_emit_unmappable_type_notes (TREE_TYPE (t));
7289 remove = true;
7290 }
7291 while (TREE_CODE (t) == COMPONENT_REF)
7292 {
7293 if (TREE_TYPE (TREE_OPERAND (t, 0))
7294 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
7295 == UNION_TYPE))
7296 {
7297 error_at (OMP_CLAUSE_LOCATION (c),
7298 "%qE is a member of a union", t);
7299 remove = true;
7300 break;
7301 }
7302 t = TREE_OPERAND (t, 0);
7303 }
7304 if (remove)
7305 break;
7306 if (REFERENCE_REF_P (t))
7307 t = TREE_OPERAND (t, 0);
7308 if (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
7309 {
7310 if (bitmap_bit_p (&map_field_head, DECL_UID (t)))
7311 goto handle_map_references;
7312 }
7313 }
7314 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL)
7315 {
7316 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
7317 break;
7318 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7319 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7320 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER))
7321 break;
7322 if (DECL_P (t))
7323 error_at (OMP_CLAUSE_LOCATION (c),
7324 "%qD is not a variable in %qs clause", t,
7325 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7326 else
7327 error_at (OMP_CLAUSE_LOCATION (c),
7328 "%qE is not a variable in %qs clause", t,
7329 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7330 remove = true;
7331 }
7332 else if (VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t))
7333 {
7334 error_at (OMP_CLAUSE_LOCATION (c),
7335 "%qD is threadprivate variable in %qs clause", t,
7336 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7337 remove = true;
7338 }
7339 else if (ort != C_ORT_ACC && t == current_class_ptr)
7340 {
7341 error_at (OMP_CLAUSE_LOCATION (c),
7342 "%<this%> allowed in OpenMP only in %<declare simd%>"
7343 " clauses");
7344 remove = true;
7345 break;
7346 }
7347 else if (!processing_template_decl
7348 && !TYPE_REF_P (TREE_TYPE (t))
7349 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7350 || (OMP_CLAUSE_MAP_KIND (c)
7351 != GOMP_MAP_FIRSTPRIVATE_POINTER))
7352 && !cxx_mark_addressable (t))
7353 remove = true;
7354 else if (!(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7355 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7356 || (OMP_CLAUSE_MAP_KIND (c)
7357 == GOMP_MAP_FIRSTPRIVATE_POINTER)))
7358 && t == OMP_CLAUSE_DECL (c)
7359 && !type_dependent_expression_p (t)
7360 && !cp_omp_mappable_type (TYPE_REF_P (TREE_TYPE (t))
7361 ? TREE_TYPE (TREE_TYPE (t))
7362 : TREE_TYPE (t)))
7363 {
7364 error_at (OMP_CLAUSE_LOCATION (c),
7365 "%qD does not have a mappable type in %qs clause", t,
7366 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7367 cp_omp_emit_unmappable_type_notes (TREE_TYPE (t));
7368 remove = true;
7369 }
7370 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7371 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FORCE_DEVICEPTR
7372 && !type_dependent_expression_p (t)
7373 && !INDIRECT_TYPE_P (TREE_TYPE (t)))
7374 {
7375 error_at (OMP_CLAUSE_LOCATION (c),
7376 "%qD is not a pointer variable", t);
7377 remove = true;
7378 }
7379 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7380 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
7381 {
7382 if (bitmap_bit_p (&generic_head, DECL_UID (t))
7383 || bitmap_bit_p (&firstprivate_head, DECL_UID (t)))
7384 {
7385 error_at (OMP_CLAUSE_LOCATION (c),
7386 "%qD appears more than once in data clauses", t);
7387 remove = true;
7388 }
7389 else if (bitmap_bit_p (&map_head, DECL_UID (t)))
7390 {
7391 if (ort == C_ORT_ACC)
7392 error_at (OMP_CLAUSE_LOCATION (c),
7393 "%qD appears more than once in data clauses", t);
7394 else
7395 error_at (OMP_CLAUSE_LOCATION (c),
7396 "%qD appears both in data and map clauses", t);
7397 remove = true;
7398 }
7399 else
7400 bitmap_set_bit (&generic_head, DECL_UID (t));
7401 }
7402 else if (bitmap_bit_p (&map_head, DECL_UID (t)))
7403 {
7404 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
7405 error_at (OMP_CLAUSE_LOCATION (c),
7406 "%qD appears more than once in motion clauses", t);
7407 if (ort == C_ORT_ACC)
7408 error_at (OMP_CLAUSE_LOCATION (c),
7409 "%qD appears more than once in data clauses", t);
7410 else
7411 error_at (OMP_CLAUSE_LOCATION (c),
7412 "%qD appears more than once in map clauses", t);
7413 remove = true;
7414 }
7415 else if (bitmap_bit_p (&generic_head, DECL_UID (t))
7416 || bitmap_bit_p (&firstprivate_head, DECL_UID (t)))
7417 {
7418 if (ort == C_ORT_ACC)
7419 error_at (OMP_CLAUSE_LOCATION (c),
7420 "%qD appears more than once in data clauses", t);
7421 else
7422 error_at (OMP_CLAUSE_LOCATION (c),
7423 "%qD appears both in data and map clauses", t);
7424 remove = true;
7425 }
7426 else
7427 {
7428 bitmap_set_bit (&map_head, DECL_UID (t));
7429 if (t != OMP_CLAUSE_DECL (c)
7430 && TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
7431 bitmap_set_bit (&map_field_head, DECL_UID (t));
7432 }
7433 handle_map_references:
7434 if (!remove
7435 && !processing_template_decl
7436 && ort != C_ORT_DECLARE_SIMD
7437 && TYPE_REF_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))
7438 {
7439 t = OMP_CLAUSE_DECL (c);
7440 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
7441 {
7442 OMP_CLAUSE_DECL (c) = build_simple_mem_ref (t);
7443 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7444 OMP_CLAUSE_SIZE (c)
7445 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (t)));
7446 }
7447 else if (OMP_CLAUSE_MAP_KIND (c)
7448 != GOMP_MAP_FIRSTPRIVATE_POINTER
7449 && (OMP_CLAUSE_MAP_KIND (c)
7450 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
7451 && (OMP_CLAUSE_MAP_KIND (c)
7452 != GOMP_MAP_ALWAYS_POINTER))
7453 {
7454 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7455 OMP_CLAUSE_MAP);
7456 if (TREE_CODE (t) == COMPONENT_REF)
7457 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALWAYS_POINTER);
7458 else
7459 OMP_CLAUSE_SET_MAP_KIND (c2,
7460 GOMP_MAP_FIRSTPRIVATE_REFERENCE);
7461 OMP_CLAUSE_DECL (c2) = t;
7462 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7463 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
7464 OMP_CLAUSE_CHAIN (c) = c2;
7465 OMP_CLAUSE_DECL (c) = build_simple_mem_ref (t);
7466 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7467 OMP_CLAUSE_SIZE (c)
7468 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (t)));
7469 c = c2;
7470 }
7471 }
7472 break;
7473
7474 case OMP_CLAUSE_TO_DECLARE:
7475 case OMP_CLAUSE_LINK:
7476 t = OMP_CLAUSE_DECL (c);
7477 if (TREE_CODE (t) == FUNCTION_DECL
7478 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO_DECLARE)
7479 ;
7480 else if (!VAR_P (t))
7481 {
7482 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO_DECLARE)
7483 {
7484 if (TREE_CODE (t) == TEMPLATE_ID_EXPR)
7485 error_at (OMP_CLAUSE_LOCATION (c),
7486 "template %qE in clause %qs", t,
7487 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7488 else if (really_overloaded_fn (t))
7489 error_at (OMP_CLAUSE_LOCATION (c),
7490 "overloaded function name %qE in clause %qs", t,
7491 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7492 else
7493 error_at (OMP_CLAUSE_LOCATION (c),
7494 "%qE is neither a variable nor a function name "
7495 "in clause %qs", t,
7496 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7497 }
7498 else
7499 error_at (OMP_CLAUSE_LOCATION (c),
7500 "%qE is not a variable in clause %qs", t,
7501 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7502 remove = true;
7503 }
7504 else if (DECL_THREAD_LOCAL_P (t))
7505 {
7506 error_at (OMP_CLAUSE_LOCATION (c),
7507 "%qD is threadprivate variable in %qs clause", t,
7508 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7509 remove = true;
7510 }
7511 else if (!cp_omp_mappable_type (TREE_TYPE (t)))
7512 {
7513 error_at (OMP_CLAUSE_LOCATION (c),
7514 "%qD does not have a mappable type in %qs clause", t,
7515 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7516 cp_omp_emit_unmappable_type_notes (TREE_TYPE (t));
7517 remove = true;
7518 }
7519 if (remove)
7520 break;
7521 if (bitmap_bit_p (&generic_head, DECL_UID (t)))
7522 {
7523 error_at (OMP_CLAUSE_LOCATION (c),
7524 "%qE appears more than once on the same "
7525 "%<declare target%> directive", t);
7526 remove = true;
7527 }
7528 else
7529 bitmap_set_bit (&generic_head, DECL_UID (t));
7530 break;
7531
7532 case OMP_CLAUSE_UNIFORM:
7533 t = OMP_CLAUSE_DECL (c);
7534 if (TREE_CODE (t) != PARM_DECL)
7535 {
7536 if (processing_template_decl)
7537 break;
7538 if (DECL_P (t))
7539 error_at (OMP_CLAUSE_LOCATION (c),
7540 "%qD is not an argument in %<uniform%> clause", t);
7541 else
7542 error_at (OMP_CLAUSE_LOCATION (c),
7543 "%qE is not an argument in %<uniform%> clause", t);
7544 remove = true;
7545 break;
7546 }
7547 /* map_head bitmap is used as uniform_head if declare_simd. */
7548 bitmap_set_bit (&map_head, DECL_UID (t));
7549 goto check_dup_generic;
7550
7551 case OMP_CLAUSE_GRAINSIZE:
7552 t = OMP_CLAUSE_GRAINSIZE_EXPR (c);
7553 if (t == error_mark_node)
7554 remove = true;
7555 else if (!type_dependent_expression_p (t)
7556 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7557 {
7558 error_at (OMP_CLAUSE_LOCATION (c),
7559 "%<grainsize%> expression must be integral");
7560 remove = true;
7561 }
7562 else
7563 {
7564 t = mark_rvalue_use (t);
7565 if (!processing_template_decl)
7566 {
7567 t = maybe_constant_value (t);
7568 if (TREE_CODE (t) == INTEGER_CST
7569 && tree_int_cst_sgn (t) != 1)
7570 {
7571 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7572 "%<grainsize%> value must be positive");
7573 t = integer_one_node;
7574 }
7575 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7576 }
7577 OMP_CLAUSE_GRAINSIZE_EXPR (c) = t;
7578 }
7579 break;
7580
7581 case OMP_CLAUSE_PRIORITY:
7582 t = OMP_CLAUSE_PRIORITY_EXPR (c);
7583 if (t == error_mark_node)
7584 remove = true;
7585 else if (!type_dependent_expression_p (t)
7586 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7587 {
7588 error_at (OMP_CLAUSE_LOCATION (c),
7589 "%<priority%> expression must be integral");
7590 remove = true;
7591 }
7592 else
7593 {
7594 t = mark_rvalue_use (t);
7595 if (!processing_template_decl)
7596 {
7597 t = maybe_constant_value (t);
7598 if (TREE_CODE (t) == INTEGER_CST
7599 && tree_int_cst_sgn (t) == -1)
7600 {
7601 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7602 "%<priority%> value must be non-negative");
7603 t = integer_one_node;
7604 }
7605 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7606 }
7607 OMP_CLAUSE_PRIORITY_EXPR (c) = t;
7608 }
7609 break;
7610
7611 case OMP_CLAUSE_HINT:
7612 t = OMP_CLAUSE_HINT_EXPR (c);
7613 if (t == error_mark_node)
7614 remove = true;
7615 else if (!type_dependent_expression_p (t)
7616 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7617 {
7618 error_at (OMP_CLAUSE_LOCATION (c),
7619 "%<hint%> expression must be integral");
7620 remove = true;
7621 }
7622 else
7623 {
7624 t = mark_rvalue_use (t);
7625 if (!processing_template_decl)
7626 {
7627 t = maybe_constant_value (t);
7628 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7629 if (TREE_CODE (t) != INTEGER_CST)
7630 {
7631 error_at (OMP_CLAUSE_LOCATION (c),
7632 "%<hint%> expression must be constant integer "
7633 "expression");
7634 remove = true;
7635 }
7636 }
7637 OMP_CLAUSE_HINT_EXPR (c) = t;
7638 }
7639 break;
7640
7641 case OMP_CLAUSE_IS_DEVICE_PTR:
7642 case OMP_CLAUSE_USE_DEVICE_PTR:
7643 field_ok = (ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP;
7644 t = OMP_CLAUSE_DECL (c);
7645 if (!type_dependent_expression_p (t))
7646 {
7647 tree type = TREE_TYPE (t);
7648 if (!TYPE_PTR_P (type)
7649 && (!TYPE_REF_P (type) || !TYPE_PTR_P (TREE_TYPE (type))))
7650 {
7651 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
7652 && ort == C_ORT_OMP)
7653 {
7654 error_at (OMP_CLAUSE_LOCATION (c),
7655 "%qs variable is neither a pointer "
7656 "nor reference to pointer",
7657 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7658 remove = true;
7659 }
7660 else if (TREE_CODE (type) != ARRAY_TYPE
7661 && (!TYPE_REF_P (type)
7662 || TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7663 {
7664 error_at (OMP_CLAUSE_LOCATION (c),
7665 "%qs variable is neither a pointer, nor an "
7666 "array nor reference to pointer or array",
7667 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7668 remove = true;
7669 }
7670 }
7671 }
7672 goto check_dup_generic;
7673
7674 case OMP_CLAUSE_USE_DEVICE_ADDR:
7675 field_ok = true;
7676 t = OMP_CLAUSE_DECL (c);
7677 if (!processing_template_decl
7678 && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
7679 && !TYPE_REF_P (TREE_TYPE (t))
7680 && !cxx_mark_addressable (t))
7681 remove = true;
7682 goto check_dup_generic;
7683
7684 case OMP_CLAUSE_NOWAIT:
7685 case OMP_CLAUSE_DEFAULT:
7686 case OMP_CLAUSE_UNTIED:
7687 case OMP_CLAUSE_COLLAPSE:
7688 case OMP_CLAUSE_MERGEABLE:
7689 case OMP_CLAUSE_PARALLEL:
7690 case OMP_CLAUSE_FOR:
7691 case OMP_CLAUSE_SECTIONS:
7692 case OMP_CLAUSE_TASKGROUP:
7693 case OMP_CLAUSE_PROC_BIND:
7694 case OMP_CLAUSE_DEVICE_TYPE:
7695 case OMP_CLAUSE_NOGROUP:
7696 case OMP_CLAUSE_THREADS:
7697 case OMP_CLAUSE_SIMD:
7698 case OMP_CLAUSE_DEFAULTMAP:
7699 case OMP_CLAUSE_BIND:
7700 case OMP_CLAUSE_AUTO:
7701 case OMP_CLAUSE_INDEPENDENT:
7702 case OMP_CLAUSE_SEQ:
7703 case OMP_CLAUSE_IF_PRESENT:
7704 case OMP_CLAUSE_FINALIZE:
7705 break;
7706
7707 case OMP_CLAUSE_TILE:
7708 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
7709 list = TREE_CHAIN (list))
7710 {
7711 t = TREE_VALUE (list);
7712
7713 if (t == error_mark_node)
7714 remove = true;
7715 else if (!type_dependent_expression_p (t)
7716 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7717 {
7718 error_at (OMP_CLAUSE_LOCATION (c),
7719 "%<tile%> argument needs integral type");
7720 remove = true;
7721 }
7722 else
7723 {
7724 t = mark_rvalue_use (t);
7725 if (!processing_template_decl)
7726 {
7727 /* Zero is used to indicate '*', we permit you
7728 to get there via an ICE of value zero. */
7729 t = maybe_constant_value (t);
7730 if (!tree_fits_shwi_p (t)
7731 || tree_to_shwi (t) < 0)
7732 {
7733 error_at (OMP_CLAUSE_LOCATION (c),
7734 "%<tile%> argument needs positive "
7735 "integral constant");
7736 remove = true;
7737 }
7738 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7739 }
7740 }
7741
7742 /* Update list item. */
7743 TREE_VALUE (list) = t;
7744 }
7745 break;
7746
7747 case OMP_CLAUSE_ORDERED:
7748 ordered_seen = true;
7749 break;
7750
7751 case OMP_CLAUSE_ORDER:
7752 if (order_seen)
7753 remove = true;
7754 else
7755 order_seen = true;
7756 break;
7757
7758 case OMP_CLAUSE_INBRANCH:
7759 case OMP_CLAUSE_NOTINBRANCH:
7760 if (branch_seen)
7761 {
7762 error_at (OMP_CLAUSE_LOCATION (c),
7763 "%<inbranch%> clause is incompatible with "
7764 "%<notinbranch%>");
7765 remove = true;
7766 }
7767 branch_seen = true;
7768 break;
7769
7770 case OMP_CLAUSE_INCLUSIVE:
7771 case OMP_CLAUSE_EXCLUSIVE:
7772 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c));
7773 if (!t)
7774 t = OMP_CLAUSE_DECL (c);
7775 if (t == current_class_ptr)
7776 {
7777 error_at (OMP_CLAUSE_LOCATION (c),
7778 "%<this%> allowed in OpenMP only in %<declare simd%>"
7779 " clauses");
7780 remove = true;
7781 break;
7782 }
7783 if (!VAR_P (t)
7784 && TREE_CODE (t) != PARM_DECL
7785 && TREE_CODE (t) != FIELD_DECL)
7786 {
7787 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
7788 break;
7789 if (DECL_P (t))
7790 error_at (OMP_CLAUSE_LOCATION (c),
7791 "%qD is not a variable in clause %qs", t,
7792 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7793 else
7794 error_at (OMP_CLAUSE_LOCATION (c),
7795 "%qE is not a variable in clause %qs", t,
7796 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7797 remove = true;
7798 }
7799 break;
7800
7801 default:
7802 gcc_unreachable ();
7803 }
7804
7805 if (remove)
7806 *pc = OMP_CLAUSE_CHAIN (c);
7807 else
7808 pc = &OMP_CLAUSE_CHAIN (c);
7809 }
7810
7811 if (reduction_seen < 0 && (ordered_seen || schedule_seen))
7812 reduction_seen = -2;
7813
7814 for (pc = &clauses, c = clauses; c ; c = *pc)
7815 {
7816 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
7817 bool remove = false;
7818 bool need_complete_type = false;
7819 bool need_default_ctor = false;
7820 bool need_copy_ctor = false;
7821 bool need_copy_assignment = false;
7822 bool need_implicitly_determined = false;
7823 bool need_dtor = false;
7824 tree type, inner_type;
7825
7826 switch (c_kind)
7827 {
7828 case OMP_CLAUSE_SHARED:
7829 need_implicitly_determined = true;
7830 break;
7831 case OMP_CLAUSE_PRIVATE:
7832 need_complete_type = true;
7833 need_default_ctor = true;
7834 need_dtor = true;
7835 need_implicitly_determined = true;
7836 break;
7837 case OMP_CLAUSE_FIRSTPRIVATE:
7838 need_complete_type = true;
7839 need_copy_ctor = true;
7840 need_dtor = true;
7841 need_implicitly_determined = true;
7842 break;
7843 case OMP_CLAUSE_LASTPRIVATE:
7844 need_complete_type = true;
7845 need_copy_assignment = true;
7846 need_implicitly_determined = true;
7847 break;
7848 case OMP_CLAUSE_REDUCTION:
7849 if (reduction_seen == -2)
7850 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
7851 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
7852 need_copy_assignment = true;
7853 need_implicitly_determined = true;
7854 break;
7855 case OMP_CLAUSE_IN_REDUCTION:
7856 case OMP_CLAUSE_TASK_REDUCTION:
7857 case OMP_CLAUSE_INCLUSIVE:
7858 case OMP_CLAUSE_EXCLUSIVE:
7859 need_implicitly_determined = true;
7860 break;
7861 case OMP_CLAUSE_LINEAR:
7862 if (ort != C_ORT_OMP_DECLARE_SIMD)
7863 need_implicitly_determined = true;
7864 else if (OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c)
7865 && !bitmap_bit_p (&map_head,
7866 DECL_UID (OMP_CLAUSE_LINEAR_STEP (c))))
7867 {
7868 error_at (OMP_CLAUSE_LOCATION (c),
7869 "%<linear%> clause step is a parameter %qD not "
7870 "specified in %<uniform%> clause",
7871 OMP_CLAUSE_LINEAR_STEP (c));
7872 *pc = OMP_CLAUSE_CHAIN (c);
7873 continue;
7874 }
7875 break;
7876 case OMP_CLAUSE_COPYPRIVATE:
7877 need_copy_assignment = true;
7878 break;
7879 case OMP_CLAUSE_COPYIN:
7880 need_copy_assignment = true;
7881 break;
7882 case OMP_CLAUSE_SIMDLEN:
7883 if (safelen
7884 && !processing_template_decl
7885 && tree_int_cst_lt (OMP_CLAUSE_SAFELEN_EXPR (safelen),
7886 OMP_CLAUSE_SIMDLEN_EXPR (c)))
7887 {
7888 error_at (OMP_CLAUSE_LOCATION (c),
7889 "%<simdlen%> clause value is bigger than "
7890 "%<safelen%> clause value");
7891 OMP_CLAUSE_SIMDLEN_EXPR (c)
7892 = OMP_CLAUSE_SAFELEN_EXPR (safelen);
7893 }
7894 pc = &OMP_CLAUSE_CHAIN (c);
7895 continue;
7896 case OMP_CLAUSE_SCHEDULE:
7897 if (ordered_seen
7898 && (OMP_CLAUSE_SCHEDULE_KIND (c)
7899 & OMP_CLAUSE_SCHEDULE_NONMONOTONIC))
7900 {
7901 error_at (OMP_CLAUSE_LOCATION (c),
7902 "%<nonmonotonic%> schedule modifier specified "
7903 "together with %<ordered%> clause");
7904 OMP_CLAUSE_SCHEDULE_KIND (c)
7905 = (enum omp_clause_schedule_kind)
7906 (OMP_CLAUSE_SCHEDULE_KIND (c)
7907 & ~OMP_CLAUSE_SCHEDULE_NONMONOTONIC);
7908 }
7909 if (reduction_seen == -2)
7910 error_at (OMP_CLAUSE_LOCATION (c),
7911 "%qs clause specified together with %<inscan%> "
7912 "%<reduction%> clause", "schedule");
7913 pc = &OMP_CLAUSE_CHAIN (c);
7914 continue;
7915 case OMP_CLAUSE_NOGROUP:
7916 if (reduction_seen)
7917 {
7918 error_at (OMP_CLAUSE_LOCATION (c),
7919 "%<nogroup%> clause must not be used together with "
7920 "%<reduction%> clause");
7921 *pc = OMP_CLAUSE_CHAIN (c);
7922 continue;
7923 }
7924 pc = &OMP_CLAUSE_CHAIN (c);
7925 continue;
7926 case OMP_CLAUSE_ORDERED:
7927 if (reduction_seen == -2)
7928 error_at (OMP_CLAUSE_LOCATION (c),
7929 "%qs clause specified together with %<inscan%> "
7930 "%<reduction%> clause", "ordered");
7931 pc = &OMP_CLAUSE_CHAIN (c);
7932 continue;
7933 case OMP_CLAUSE_ORDER:
7934 if (ordered_seen)
7935 {
7936 error_at (OMP_CLAUSE_LOCATION (c),
7937 "%<order%> clause must not be used together "
7938 "with %<ordered%>");
7939 *pc = OMP_CLAUSE_CHAIN (c);
7940 continue;
7941 }
7942 pc = &OMP_CLAUSE_CHAIN (c);
7943 continue;
7944 case OMP_CLAUSE_NOWAIT:
7945 if (copyprivate_seen)
7946 {
7947 error_at (OMP_CLAUSE_LOCATION (c),
7948 "%<nowait%> clause must not be used together "
7949 "with %<copyprivate%>");
7950 *pc = OMP_CLAUSE_CHAIN (c);
7951 continue;
7952 }
7953 /* FALLTHRU */
7954 default:
7955 pc = &OMP_CLAUSE_CHAIN (c);
7956 continue;
7957 }
7958
7959 t = OMP_CLAUSE_DECL (c);
7960 if (processing_template_decl
7961 && !VAR_P (t) && TREE_CODE (t) != PARM_DECL)
7962 {
7963 pc = &OMP_CLAUSE_CHAIN (c);
7964 continue;
7965 }
7966
7967 switch (c_kind)
7968 {
7969 case OMP_CLAUSE_LASTPRIVATE:
7970 if (!bitmap_bit_p (&firstprivate_head, DECL_UID (t)))
7971 {
7972 need_default_ctor = true;
7973 need_dtor = true;
7974 }
7975 break;
7976
7977 case OMP_CLAUSE_REDUCTION:
7978 case OMP_CLAUSE_IN_REDUCTION:
7979 case OMP_CLAUSE_TASK_REDUCTION:
7980 if (finish_omp_reduction_clause (c, &need_default_ctor,
7981 &need_dtor))
7982 remove = true;
7983 else
7984 t = OMP_CLAUSE_DECL (c);
7985 break;
7986
7987 case OMP_CLAUSE_COPYIN:
7988 if (!VAR_P (t) || !CP_DECL_THREAD_LOCAL_P (t))
7989 {
7990 error_at (OMP_CLAUSE_LOCATION (c),
7991 "%qE must be %<threadprivate%> for %<copyin%>", t);
7992 remove = true;
7993 }
7994 break;
7995
7996 default:
7997 break;
7998 }
7999
8000 if (need_complete_type || need_copy_assignment)
8001 {
8002 t = require_complete_type (t);
8003 if (t == error_mark_node)
8004 remove = true;
8005 else if (!processing_template_decl
8006 && TYPE_REF_P (TREE_TYPE (t))
8007 && !complete_type_or_else (TREE_TYPE (TREE_TYPE (t)), t))
8008 remove = true;
8009 }
8010 if (need_implicitly_determined)
8011 {
8012 const char *share_name = NULL;
8013
8014 if (VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t))
8015 share_name = "threadprivate";
8016 else switch (cxx_omp_predetermined_sharing_1 (t))
8017 {
8018 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
8019 break;
8020 case OMP_CLAUSE_DEFAULT_SHARED:
8021 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8022 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8023 && c_omp_predefined_variable (t))
8024 /* The __func__ variable and similar function-local predefined
8025 variables may be listed in a shared or firstprivate
8026 clause. */
8027 break;
8028 if (VAR_P (t)
8029 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
8030 && TREE_STATIC (t)
8031 && cxx_omp_const_qual_no_mutable (t))
8032 {
8033 tree ctx = CP_DECL_CONTEXT (t);
8034 /* const qualified static data members without mutable
8035 member may be specified in firstprivate clause. */
8036 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
8037 break;
8038 }
8039 share_name = "shared";
8040 break;
8041 case OMP_CLAUSE_DEFAULT_PRIVATE:
8042 share_name = "private";
8043 break;
8044 default:
8045 gcc_unreachable ();
8046 }
8047 if (share_name)
8048 {
8049 error_at (OMP_CLAUSE_LOCATION (c),
8050 "%qE is predetermined %qs for %qs",
8051 omp_clause_printable_decl (t), share_name,
8052 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8053 remove = true;
8054 }
8055 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED
8056 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
8057 && cxx_omp_const_qual_no_mutable (t))
8058 {
8059 error_at (OMP_CLAUSE_LOCATION (c),
8060 "%<const%> qualified %qE without %<mutable%> member "
8061 "may appear only in %<shared%> or %<firstprivate%> "
8062 "clauses", omp_clause_printable_decl (t));
8063 remove = true;
8064 }
8065 }
8066
8067 /* We're interested in the base element, not arrays. */
8068 inner_type = type = TREE_TYPE (t);
8069 if ((need_complete_type
8070 || need_copy_assignment
8071 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8072 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
8073 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8074 && TYPE_REF_P (inner_type))
8075 inner_type = TREE_TYPE (inner_type);
8076 while (TREE_CODE (inner_type) == ARRAY_TYPE)
8077 inner_type = TREE_TYPE (inner_type);
8078
8079 /* Check for special function availability by building a call to one.
8080 Save the results, because later we won't be in the right context
8081 for making these queries. */
8082 if (CLASS_TYPE_P (inner_type)
8083 && COMPLETE_TYPE_P (inner_type)
8084 && (need_default_ctor || need_copy_ctor
8085 || need_copy_assignment || need_dtor)
8086 && !type_dependent_expression_p (t)
8087 && cxx_omp_create_clause_info (c, inner_type, need_default_ctor,
8088 need_copy_ctor, need_copy_assignment,
8089 need_dtor))
8090 remove = true;
8091
8092 if (!remove
8093 && c_kind == OMP_CLAUSE_SHARED
8094 && processing_template_decl)
8095 {
8096 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c));
8097 if (t)
8098 OMP_CLAUSE_DECL (c) = t;
8099 }
8100
8101 if (remove)
8102 *pc = OMP_CLAUSE_CHAIN (c);
8103 else
8104 pc = &OMP_CLAUSE_CHAIN (c);
8105 }
8106
8107 bitmap_obstack_release (NULL);
8108 return clauses;
8109 }
8110
8111 /* Start processing OpenMP clauses that can include any
8112 privatization clauses for non-static data members. */
8113
8114 tree
8115 push_omp_privatization_clauses (bool ignore_next)
8116 {
8117 if (omp_private_member_ignore_next)
8118 {
8119 omp_private_member_ignore_next = ignore_next;
8120 return NULL_TREE;
8121 }
8122 omp_private_member_ignore_next = ignore_next;
8123 if (omp_private_member_map)
8124 omp_private_member_vec.safe_push (error_mark_node);
8125 return push_stmt_list ();
8126 }
8127
8128 /* Revert remapping of any non-static data members since
8129 the last push_omp_privatization_clauses () call. */
8130
8131 void
8132 pop_omp_privatization_clauses (tree stmt)
8133 {
8134 if (stmt == NULL_TREE)
8135 return;
8136 stmt = pop_stmt_list (stmt);
8137 if (omp_private_member_map)
8138 {
8139 while (!omp_private_member_vec.is_empty ())
8140 {
8141 tree t = omp_private_member_vec.pop ();
8142 if (t == error_mark_node)
8143 {
8144 add_stmt (stmt);
8145 return;
8146 }
8147 bool no_decl_expr = t == integer_zero_node;
8148 if (no_decl_expr)
8149 t = omp_private_member_vec.pop ();
8150 tree *v = omp_private_member_map->get (t);
8151 gcc_assert (v);
8152 if (!no_decl_expr)
8153 add_decl_expr (*v);
8154 omp_private_member_map->remove (t);
8155 }
8156 delete omp_private_member_map;
8157 omp_private_member_map = NULL;
8158 }
8159 add_stmt (stmt);
8160 }
8161
8162 /* Remember OpenMP privatization clauses mapping and clear it.
8163 Used for lambdas. */
8164
8165 void
8166 save_omp_privatization_clauses (vec<tree> &save)
8167 {
8168 save = vNULL;
8169 if (omp_private_member_ignore_next)
8170 save.safe_push (integer_one_node);
8171 omp_private_member_ignore_next = false;
8172 if (!omp_private_member_map)
8173 return;
8174
8175 while (!omp_private_member_vec.is_empty ())
8176 {
8177 tree t = omp_private_member_vec.pop ();
8178 if (t == error_mark_node)
8179 {
8180 save.safe_push (t);
8181 continue;
8182 }
8183 tree n = t;
8184 if (t == integer_zero_node)
8185 t = omp_private_member_vec.pop ();
8186 tree *v = omp_private_member_map->get (t);
8187 gcc_assert (v);
8188 save.safe_push (*v);
8189 save.safe_push (t);
8190 if (n != t)
8191 save.safe_push (n);
8192 }
8193 delete omp_private_member_map;
8194 omp_private_member_map = NULL;
8195 }
8196
8197 /* Restore OpenMP privatization clauses mapping saved by the
8198 above function. */
8199
8200 void
8201 restore_omp_privatization_clauses (vec<tree> &save)
8202 {
8203 gcc_assert (omp_private_member_vec.is_empty ());
8204 omp_private_member_ignore_next = false;
8205 if (save.is_empty ())
8206 return;
8207 if (save.length () == 1 && save[0] == integer_one_node)
8208 {
8209 omp_private_member_ignore_next = true;
8210 save.release ();
8211 return;
8212 }
8213
8214 omp_private_member_map = new hash_map <tree, tree>;
8215 while (!save.is_empty ())
8216 {
8217 tree t = save.pop ();
8218 tree n = t;
8219 if (t != error_mark_node)
8220 {
8221 if (t == integer_one_node)
8222 {
8223 omp_private_member_ignore_next = true;
8224 gcc_assert (save.is_empty ());
8225 break;
8226 }
8227 if (t == integer_zero_node)
8228 t = save.pop ();
8229 tree &v = omp_private_member_map->get_or_insert (t);
8230 v = save.pop ();
8231 }
8232 omp_private_member_vec.safe_push (t);
8233 if (n != t)
8234 omp_private_member_vec.safe_push (n);
8235 }
8236 save.release ();
8237 }
8238
8239 /* For all variables in the tree_list VARS, mark them as thread local. */
8240
8241 void
8242 finish_omp_threadprivate (tree vars)
8243 {
8244 tree t;
8245
8246 /* Mark every variable in VARS to be assigned thread local storage. */
8247 for (t = vars; t; t = TREE_CHAIN (t))
8248 {
8249 tree v = TREE_PURPOSE (t);
8250
8251 if (error_operand_p (v))
8252 ;
8253 else if (!VAR_P (v))
8254 error ("%<threadprivate%> %qD is not file, namespace "
8255 "or block scope variable", v);
8256 /* If V had already been marked threadprivate, it doesn't matter
8257 whether it had been used prior to this point. */
8258 else if (TREE_USED (v)
8259 && (DECL_LANG_SPECIFIC (v) == NULL
8260 || !CP_DECL_THREADPRIVATE_P (v)))
8261 error ("%qE declared %<threadprivate%> after first use", v);
8262 else if (! TREE_STATIC (v) && ! DECL_EXTERNAL (v))
8263 error ("automatic variable %qE cannot be %<threadprivate%>", v);
8264 else if (! COMPLETE_TYPE_P (complete_type (TREE_TYPE (v))))
8265 error ("%<threadprivate%> %qE has incomplete type", v);
8266 else if (TREE_STATIC (v) && TYPE_P (CP_DECL_CONTEXT (v))
8267 && CP_DECL_CONTEXT (v) != current_class_type)
8268 error ("%<threadprivate%> %qE directive not "
8269 "in %qT definition", v, CP_DECL_CONTEXT (v));
8270 else
8271 {
8272 /* Allocate a LANG_SPECIFIC structure for V, if needed. */
8273 if (DECL_LANG_SPECIFIC (v) == NULL)
8274 retrofit_lang_decl (v);
8275
8276 if (! CP_DECL_THREAD_LOCAL_P (v))
8277 {
8278 CP_DECL_THREAD_LOCAL_P (v) = true;
8279 set_decl_tls_model (v, decl_default_tls_model (v));
8280 /* If rtl has been already set for this var, call
8281 make_decl_rtl once again, so that encode_section_info
8282 has a chance to look at the new decl flags. */
8283 if (DECL_RTL_SET_P (v))
8284 make_decl_rtl (v);
8285 }
8286 CP_DECL_THREADPRIVATE_P (v) = 1;
8287 }
8288 }
8289 }
8290
8291 /* Build an OpenMP structured block. */
8292
8293 tree
8294 begin_omp_structured_block (void)
8295 {
8296 return do_pushlevel (sk_omp);
8297 }
8298
8299 tree
8300 finish_omp_structured_block (tree block)
8301 {
8302 return do_poplevel (block);
8303 }
8304
8305 /* Similarly, except force the retention of the BLOCK. */
8306
8307 tree
8308 begin_omp_parallel (void)
8309 {
8310 keep_next_level (true);
8311 return begin_omp_structured_block ();
8312 }
8313
8314 /* Generate OACC_DATA, with CLAUSES and BLOCK as its compound
8315 statement. */
8316
8317 tree
8318 finish_oacc_data (tree clauses, tree block)
8319 {
8320 tree stmt;
8321
8322 block = finish_omp_structured_block (block);
8323
8324 stmt = make_node (OACC_DATA);
8325 TREE_TYPE (stmt) = void_type_node;
8326 OACC_DATA_CLAUSES (stmt) = clauses;
8327 OACC_DATA_BODY (stmt) = block;
8328
8329 return add_stmt (stmt);
8330 }
8331
8332 /* Generate OACC_HOST_DATA, with CLAUSES and BLOCK as its compound
8333 statement. */
8334
8335 tree
8336 finish_oacc_host_data (tree clauses, tree block)
8337 {
8338 tree stmt;
8339
8340 block = finish_omp_structured_block (block);
8341
8342 stmt = make_node (OACC_HOST_DATA);
8343 TREE_TYPE (stmt) = void_type_node;
8344 OACC_HOST_DATA_CLAUSES (stmt) = clauses;
8345 OACC_HOST_DATA_BODY (stmt) = block;
8346
8347 return add_stmt (stmt);
8348 }
8349
8350 /* Generate OMP construct CODE, with BODY and CLAUSES as its compound
8351 statement. */
8352
8353 tree
8354 finish_omp_construct (enum tree_code code, tree body, tree clauses)
8355 {
8356 body = finish_omp_structured_block (body);
8357
8358 tree stmt = make_node (code);
8359 TREE_TYPE (stmt) = void_type_node;
8360 OMP_BODY (stmt) = body;
8361 OMP_CLAUSES (stmt) = clauses;
8362
8363 return add_stmt (stmt);
8364 }
8365
8366 tree
8367 finish_omp_parallel (tree clauses, tree body)
8368 {
8369 tree stmt;
8370
8371 body = finish_omp_structured_block (body);
8372
8373 stmt = make_node (OMP_PARALLEL);
8374 TREE_TYPE (stmt) = void_type_node;
8375 OMP_PARALLEL_CLAUSES (stmt) = clauses;
8376 OMP_PARALLEL_BODY (stmt) = body;
8377
8378 return add_stmt (stmt);
8379 }
8380
8381 tree
8382 begin_omp_task (void)
8383 {
8384 keep_next_level (true);
8385 return begin_omp_structured_block ();
8386 }
8387
8388 tree
8389 finish_omp_task (tree clauses, tree body)
8390 {
8391 tree stmt;
8392
8393 body = finish_omp_structured_block (body);
8394
8395 stmt = make_node (OMP_TASK);
8396 TREE_TYPE (stmt) = void_type_node;
8397 OMP_TASK_CLAUSES (stmt) = clauses;
8398 OMP_TASK_BODY (stmt) = body;
8399
8400 return add_stmt (stmt);
8401 }
8402
8403 /* Helper function for finish_omp_for. Convert Ith random access iterator
8404 into integral iterator. Return FALSE if successful. */
8405
8406 static bool
8407 handle_omp_for_class_iterator (int i, location_t locus, enum tree_code code,
8408 tree declv, tree orig_declv, tree initv,
8409 tree condv, tree incrv, tree *body,
8410 tree *pre_body, tree &clauses,
8411 int collapse, int ordered)
8412 {
8413 tree diff, iter_init, iter_incr = NULL, last;
8414 tree incr_var = NULL, orig_pre_body, orig_body, c;
8415 tree decl = TREE_VEC_ELT (declv, i);
8416 tree init = TREE_VEC_ELT (initv, i);
8417 tree cond = TREE_VEC_ELT (condv, i);
8418 tree incr = TREE_VEC_ELT (incrv, i);
8419 tree iter = decl;
8420 location_t elocus = locus;
8421
8422 if (init && EXPR_HAS_LOCATION (init))
8423 elocus = EXPR_LOCATION (init);
8424
8425 switch (TREE_CODE (cond))
8426 {
8427 case GT_EXPR:
8428 case GE_EXPR:
8429 case LT_EXPR:
8430 case LE_EXPR:
8431 case NE_EXPR:
8432 if (TREE_OPERAND (cond, 1) == iter)
8433 cond = build2 (swap_tree_comparison (TREE_CODE (cond)),
8434 TREE_TYPE (cond), iter, TREE_OPERAND (cond, 0));
8435 if (TREE_OPERAND (cond, 0) != iter)
8436 cond = error_mark_node;
8437 else
8438 {
8439 tree tem = build_x_binary_op (EXPR_LOCATION (cond),
8440 TREE_CODE (cond),
8441 iter, ERROR_MARK,
8442 TREE_OPERAND (cond, 1), ERROR_MARK,
8443 NULL, tf_warning_or_error);
8444 if (error_operand_p (tem))
8445 return true;
8446 }
8447 break;
8448 default:
8449 cond = error_mark_node;
8450 break;
8451 }
8452 if (cond == error_mark_node)
8453 {
8454 error_at (elocus, "invalid controlling predicate");
8455 return true;
8456 }
8457 diff = build_x_binary_op (elocus, MINUS_EXPR, TREE_OPERAND (cond, 1),
8458 ERROR_MARK, iter, ERROR_MARK, NULL,
8459 tf_warning_or_error);
8460 diff = cp_fully_fold (diff);
8461 if (error_operand_p (diff))
8462 return true;
8463 if (TREE_CODE (TREE_TYPE (diff)) != INTEGER_TYPE)
8464 {
8465 error_at (elocus, "difference between %qE and %qD does not have integer type",
8466 TREE_OPERAND (cond, 1), iter);
8467 return true;
8468 }
8469 if (!c_omp_check_loop_iv_exprs (locus, orig_declv,
8470 TREE_VEC_ELT (declv, i), NULL_TREE,
8471 cond, cp_walk_subtrees))
8472 return true;
8473
8474 switch (TREE_CODE (incr))
8475 {
8476 case PREINCREMENT_EXPR:
8477 case PREDECREMENT_EXPR:
8478 case POSTINCREMENT_EXPR:
8479 case POSTDECREMENT_EXPR:
8480 if (TREE_OPERAND (incr, 0) != iter)
8481 {
8482 incr = error_mark_node;
8483 break;
8484 }
8485 iter_incr = build_x_unary_op (EXPR_LOCATION (incr),
8486 TREE_CODE (incr), iter,
8487 tf_warning_or_error);
8488 if (error_operand_p (iter_incr))
8489 return true;
8490 else if (TREE_CODE (incr) == PREINCREMENT_EXPR
8491 || TREE_CODE (incr) == POSTINCREMENT_EXPR)
8492 incr = integer_one_node;
8493 else
8494 incr = integer_minus_one_node;
8495 break;
8496 case MODIFY_EXPR:
8497 if (TREE_OPERAND (incr, 0) != iter)
8498 incr = error_mark_node;
8499 else if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR
8500 || TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR)
8501 {
8502 tree rhs = TREE_OPERAND (incr, 1);
8503 if (TREE_OPERAND (rhs, 0) == iter)
8504 {
8505 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 1)))
8506 != INTEGER_TYPE)
8507 incr = error_mark_node;
8508 else
8509 {
8510 iter_incr = build_x_modify_expr (EXPR_LOCATION (rhs),
8511 iter, TREE_CODE (rhs),
8512 TREE_OPERAND (rhs, 1),
8513 tf_warning_or_error);
8514 if (error_operand_p (iter_incr))
8515 return true;
8516 incr = TREE_OPERAND (rhs, 1);
8517 incr = cp_convert (TREE_TYPE (diff), incr,
8518 tf_warning_or_error);
8519 if (TREE_CODE (rhs) == MINUS_EXPR)
8520 {
8521 incr = build1 (NEGATE_EXPR, TREE_TYPE (diff), incr);
8522 incr = fold_simple (incr);
8523 }
8524 if (TREE_CODE (incr) != INTEGER_CST
8525 && (TREE_CODE (incr) != NOP_EXPR
8526 || (TREE_CODE (TREE_OPERAND (incr, 0))
8527 != INTEGER_CST)))
8528 iter_incr = NULL;
8529 }
8530 }
8531 else if (TREE_OPERAND (rhs, 1) == iter)
8532 {
8533 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) != INTEGER_TYPE
8534 || TREE_CODE (rhs) != PLUS_EXPR)
8535 incr = error_mark_node;
8536 else
8537 {
8538 iter_incr = build_x_binary_op (EXPR_LOCATION (rhs),
8539 PLUS_EXPR,
8540 TREE_OPERAND (rhs, 0),
8541 ERROR_MARK, iter,
8542 ERROR_MARK, NULL,
8543 tf_warning_or_error);
8544 if (error_operand_p (iter_incr))
8545 return true;
8546 iter_incr = build_x_modify_expr (EXPR_LOCATION (rhs),
8547 iter, NOP_EXPR,
8548 iter_incr,
8549 tf_warning_or_error);
8550 if (error_operand_p (iter_incr))
8551 return true;
8552 incr = TREE_OPERAND (rhs, 0);
8553 iter_incr = NULL;
8554 }
8555 }
8556 else
8557 incr = error_mark_node;
8558 }
8559 else
8560 incr = error_mark_node;
8561 break;
8562 default:
8563 incr = error_mark_node;
8564 break;
8565 }
8566
8567 if (incr == error_mark_node)
8568 {
8569 error_at (elocus, "invalid increment expression");
8570 return true;
8571 }
8572
8573 incr = cp_convert (TREE_TYPE (diff), incr, tf_warning_or_error);
8574 incr = cp_fully_fold (incr);
8575 tree loop_iv_seen = NULL_TREE;
8576 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8577 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8578 && OMP_CLAUSE_DECL (c) == iter)
8579 {
8580 if (code == OMP_TASKLOOP || code == OMP_LOOP)
8581 {
8582 loop_iv_seen = c;
8583 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) = 1;
8584 }
8585 break;
8586 }
8587 else if ((code == OMP_TASKLOOP || code == OMP_LOOP)
8588 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
8589 && OMP_CLAUSE_DECL (c) == iter)
8590 {
8591 loop_iv_seen = c;
8592 if (code == OMP_TASKLOOP)
8593 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c) = 1;
8594 }
8595
8596 decl = create_temporary_var (TREE_TYPE (diff));
8597 pushdecl (decl);
8598 add_decl_expr (decl);
8599 last = create_temporary_var (TREE_TYPE (diff));
8600 pushdecl (last);
8601 add_decl_expr (last);
8602 if (c && iter_incr == NULL && TREE_CODE (incr) != INTEGER_CST
8603 && (!ordered || (i < collapse && collapse > 1)))
8604 {
8605 incr_var = create_temporary_var (TREE_TYPE (diff));
8606 pushdecl (incr_var);
8607 add_decl_expr (incr_var);
8608 }
8609 gcc_assert (stmts_are_full_exprs_p ());
8610 tree diffvar = NULL_TREE;
8611 if (code == OMP_TASKLOOP)
8612 {
8613 if (!loop_iv_seen)
8614 {
8615 tree ivc = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE);
8616 OMP_CLAUSE_DECL (ivc) = iter;
8617 cxx_omp_finish_clause (ivc, NULL);
8618 OMP_CLAUSE_CHAIN (ivc) = clauses;
8619 clauses = ivc;
8620 }
8621 tree lvc = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE);
8622 OMP_CLAUSE_DECL (lvc) = last;
8623 OMP_CLAUSE_CHAIN (lvc) = clauses;
8624 clauses = lvc;
8625 diffvar = create_temporary_var (TREE_TYPE (diff));
8626 pushdecl (diffvar);
8627 add_decl_expr (diffvar);
8628 }
8629 else if (code == OMP_LOOP)
8630 {
8631 if (!loop_iv_seen)
8632 {
8633 /* While iterators on the loop construct are predetermined
8634 lastprivate, if the decl is not declared inside of the
8635 loop, OMP_CLAUSE_LASTPRIVATE should have been added
8636 already. */
8637 loop_iv_seen = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE);
8638 OMP_CLAUSE_DECL (loop_iv_seen) = iter;
8639 OMP_CLAUSE_CHAIN (loop_iv_seen) = clauses;
8640 clauses = loop_iv_seen;
8641 }
8642 else if (OMP_CLAUSE_CODE (loop_iv_seen) == OMP_CLAUSE_PRIVATE)
8643 {
8644 OMP_CLAUSE_PRIVATE_DEBUG (loop_iv_seen) = 0;
8645 OMP_CLAUSE_PRIVATE_OUTER_REF (loop_iv_seen) = 0;
8646 OMP_CLAUSE_CODE (loop_iv_seen) = OMP_CLAUSE_FIRSTPRIVATE;
8647 }
8648 if (OMP_CLAUSE_CODE (loop_iv_seen) == OMP_CLAUSE_FIRSTPRIVATE)
8649 cxx_omp_finish_clause (loop_iv_seen, NULL);
8650 }
8651
8652 orig_pre_body = *pre_body;
8653 *pre_body = push_stmt_list ();
8654 if (orig_pre_body)
8655 add_stmt (orig_pre_body);
8656 if (init != NULL)
8657 finish_expr_stmt (build_x_modify_expr (elocus,
8658 iter, NOP_EXPR, init,
8659 tf_warning_or_error));
8660 init = build_int_cst (TREE_TYPE (diff), 0);
8661 if (c && iter_incr == NULL
8662 && (!ordered || (i < collapse && collapse > 1)))
8663 {
8664 if (incr_var)
8665 {
8666 finish_expr_stmt (build_x_modify_expr (elocus,
8667 incr_var, NOP_EXPR,
8668 incr, tf_warning_or_error));
8669 incr = incr_var;
8670 }
8671 iter_incr = build_x_modify_expr (elocus,
8672 iter, PLUS_EXPR, incr,
8673 tf_warning_or_error);
8674 }
8675 if (c && ordered && i < collapse && collapse > 1)
8676 iter_incr = incr;
8677 finish_expr_stmt (build_x_modify_expr (elocus,
8678 last, NOP_EXPR, init,
8679 tf_warning_or_error));
8680 if (diffvar)
8681 {
8682 finish_expr_stmt (build_x_modify_expr (elocus,
8683 diffvar, NOP_EXPR,
8684 diff, tf_warning_or_error));
8685 diff = diffvar;
8686 }
8687 *pre_body = pop_stmt_list (*pre_body);
8688
8689 cond = cp_build_binary_op (elocus,
8690 TREE_CODE (cond), decl, diff,
8691 tf_warning_or_error);
8692 incr = build_modify_expr (elocus, decl, NULL_TREE, PLUS_EXPR,
8693 elocus, incr, NULL_TREE);
8694
8695 orig_body = *body;
8696 *body = push_stmt_list ();
8697 iter_init = build2 (MINUS_EXPR, TREE_TYPE (diff), decl, last);
8698 iter_init = build_x_modify_expr (elocus,
8699 iter, PLUS_EXPR, iter_init,
8700 tf_warning_or_error);
8701 if (iter_init != error_mark_node)
8702 iter_init = build1 (NOP_EXPR, void_type_node, iter_init);
8703 finish_expr_stmt (iter_init);
8704 finish_expr_stmt (build_x_modify_expr (elocus,
8705 last, NOP_EXPR, decl,
8706 tf_warning_or_error));
8707 add_stmt (orig_body);
8708 *body = pop_stmt_list (*body);
8709
8710 if (c)
8711 {
8712 OMP_CLAUSE_LASTPRIVATE_STMT (c) = push_stmt_list ();
8713 if (!ordered)
8714 finish_expr_stmt (iter_incr);
8715 else
8716 {
8717 iter_init = decl;
8718 if (i < collapse && collapse > 1 && !error_operand_p (iter_incr))
8719 iter_init = build2 (PLUS_EXPR, TREE_TYPE (diff),
8720 iter_init, iter_incr);
8721 iter_init = build2 (MINUS_EXPR, TREE_TYPE (diff), iter_init, last);
8722 iter_init = build_x_modify_expr (elocus,
8723 iter, PLUS_EXPR, iter_init,
8724 tf_warning_or_error);
8725 if (iter_init != error_mark_node)
8726 iter_init = build1 (NOP_EXPR, void_type_node, iter_init);
8727 finish_expr_stmt (iter_init);
8728 }
8729 OMP_CLAUSE_LASTPRIVATE_STMT (c)
8730 = pop_stmt_list (OMP_CLAUSE_LASTPRIVATE_STMT (c));
8731 }
8732
8733 if (TREE_CODE (TREE_VEC_ELT (orig_declv, i)) == TREE_LIST)
8734 {
8735 tree t = TREE_VEC_ELT (orig_declv, i);
8736 gcc_assert (TREE_PURPOSE (t) == NULL_TREE
8737 && TREE_VALUE (t) == NULL_TREE
8738 && TREE_CODE (TREE_CHAIN (t)) == TREE_VEC);
8739 TREE_PURPOSE (t) = TREE_VEC_ELT (declv, i);
8740 TREE_VALUE (t) = last;
8741 }
8742 else
8743 TREE_VEC_ELT (orig_declv, i)
8744 = tree_cons (TREE_VEC_ELT (declv, i), last, NULL_TREE);
8745 TREE_VEC_ELT (declv, i) = decl;
8746 TREE_VEC_ELT (initv, i) = init;
8747 TREE_VEC_ELT (condv, i) = cond;
8748 TREE_VEC_ELT (incrv, i) = incr;
8749
8750 return false;
8751 }
8752
8753 /* Build and validate an OMP_FOR statement. CLAUSES, BODY, COND, INCR
8754 are directly for their associated operands in the statement. DECL
8755 and INIT are a combo; if DECL is NULL then INIT ought to be a
8756 MODIFY_EXPR, and the DECL should be extracted. PRE_BODY are
8757 optional statements that need to go before the loop into its
8758 sk_omp scope. */
8759
8760 tree
8761 finish_omp_for (location_t locus, enum tree_code code, tree declv,
8762 tree orig_declv, tree initv, tree condv, tree incrv,
8763 tree body, tree pre_body, vec<tree> *orig_inits, tree clauses)
8764 {
8765 tree omp_for = NULL, orig_incr = NULL;
8766 tree decl = NULL, init, cond, incr;
8767 location_t elocus;
8768 int i;
8769 int collapse = 1;
8770 int ordered = 0;
8771
8772 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (initv));
8773 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (condv));
8774 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (incrv));
8775 if (TREE_VEC_LENGTH (declv) > 1)
8776 {
8777 tree c;
8778
8779 c = omp_find_clause (clauses, OMP_CLAUSE_TILE);
8780 if (c)
8781 collapse = list_length (OMP_CLAUSE_TILE_LIST (c));
8782 else
8783 {
8784 c = omp_find_clause (clauses, OMP_CLAUSE_COLLAPSE);
8785 if (c)
8786 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
8787 if (collapse != TREE_VEC_LENGTH (declv))
8788 ordered = TREE_VEC_LENGTH (declv);
8789 }
8790 }
8791 for (i = 0; i < TREE_VEC_LENGTH (declv); i++)
8792 {
8793 decl = TREE_VEC_ELT (declv, i);
8794 init = TREE_VEC_ELT (initv, i);
8795 cond = TREE_VEC_ELT (condv, i);
8796 incr = TREE_VEC_ELT (incrv, i);
8797 elocus = locus;
8798
8799 if (decl == NULL)
8800 {
8801 if (init != NULL)
8802 switch (TREE_CODE (init))
8803 {
8804 case MODIFY_EXPR:
8805 decl = TREE_OPERAND (init, 0);
8806 init = TREE_OPERAND (init, 1);
8807 break;
8808 case MODOP_EXPR:
8809 if (TREE_CODE (TREE_OPERAND (init, 1)) == NOP_EXPR)
8810 {
8811 decl = TREE_OPERAND (init, 0);
8812 init = TREE_OPERAND (init, 2);
8813 }
8814 break;
8815 default:
8816 break;
8817 }
8818
8819 if (decl == NULL)
8820 {
8821 error_at (locus,
8822 "expected iteration declaration or initialization");
8823 return NULL;
8824 }
8825 }
8826
8827 if (init && EXPR_HAS_LOCATION (init))
8828 elocus = EXPR_LOCATION (init);
8829
8830 if (cond == global_namespace)
8831 continue;
8832
8833 if (cond == NULL)
8834 {
8835 error_at (elocus, "missing controlling predicate");
8836 return NULL;
8837 }
8838
8839 if (incr == NULL)
8840 {
8841 error_at (elocus, "missing increment expression");
8842 return NULL;
8843 }
8844
8845 TREE_VEC_ELT (declv, i) = decl;
8846 TREE_VEC_ELT (initv, i) = init;
8847 }
8848
8849 if (orig_inits)
8850 {
8851 bool fail = false;
8852 tree orig_init;
8853 FOR_EACH_VEC_ELT (*orig_inits, i, orig_init)
8854 if (orig_init
8855 && !c_omp_check_loop_iv_exprs (locus, orig_declv
8856 ? orig_declv : declv,
8857 TREE_VEC_ELT (declv, i), orig_init,
8858 NULL_TREE, cp_walk_subtrees))
8859 fail = true;
8860 if (fail)
8861 return NULL;
8862 }
8863
8864 if (dependent_omp_for_p (declv, initv, condv, incrv))
8865 {
8866 tree stmt;
8867
8868 stmt = make_node (code);
8869
8870 for (i = 0; i < TREE_VEC_LENGTH (declv); i++)
8871 {
8872 /* This is really just a place-holder. We'll be decomposing this
8873 again and going through the cp_build_modify_expr path below when
8874 we instantiate the thing. */
8875 TREE_VEC_ELT (initv, i)
8876 = build2 (MODIFY_EXPR, void_type_node, TREE_VEC_ELT (declv, i),
8877 TREE_VEC_ELT (initv, i));
8878 }
8879
8880 TREE_TYPE (stmt) = void_type_node;
8881 OMP_FOR_INIT (stmt) = initv;
8882 OMP_FOR_COND (stmt) = condv;
8883 OMP_FOR_INCR (stmt) = incrv;
8884 OMP_FOR_BODY (stmt) = body;
8885 OMP_FOR_PRE_BODY (stmt) = pre_body;
8886 OMP_FOR_CLAUSES (stmt) = clauses;
8887
8888 SET_EXPR_LOCATION (stmt, locus);
8889 return add_stmt (stmt);
8890 }
8891
8892 if (!orig_declv)
8893 orig_declv = copy_node (declv);
8894
8895 if (processing_template_decl)
8896 orig_incr = make_tree_vec (TREE_VEC_LENGTH (incrv));
8897
8898 for (i = 0; i < TREE_VEC_LENGTH (declv); )
8899 {
8900 decl = TREE_VEC_ELT (declv, i);
8901 init = TREE_VEC_ELT (initv, i);
8902 cond = TREE_VEC_ELT (condv, i);
8903 incr = TREE_VEC_ELT (incrv, i);
8904 if (orig_incr)
8905 TREE_VEC_ELT (orig_incr, i) = incr;
8906 elocus = locus;
8907
8908 if (init && EXPR_HAS_LOCATION (init))
8909 elocus = EXPR_LOCATION (init);
8910
8911 if (!DECL_P (decl))
8912 {
8913 error_at (elocus, "expected iteration declaration or initialization");
8914 return NULL;
8915 }
8916
8917 if (incr && TREE_CODE (incr) == MODOP_EXPR)
8918 {
8919 if (orig_incr)
8920 TREE_VEC_ELT (orig_incr, i) = incr;
8921 incr = cp_build_modify_expr (elocus, TREE_OPERAND (incr, 0),
8922 TREE_CODE (TREE_OPERAND (incr, 1)),
8923 TREE_OPERAND (incr, 2),
8924 tf_warning_or_error);
8925 }
8926
8927 if (CLASS_TYPE_P (TREE_TYPE (decl)))
8928 {
8929 if (code == OMP_SIMD)
8930 {
8931 error_at (elocus, "%<#pragma omp simd%> used with class "
8932 "iteration variable %qE", decl);
8933 return NULL;
8934 }
8935 if (handle_omp_for_class_iterator (i, locus, code, declv, orig_declv,
8936 initv, condv, incrv, &body,
8937 &pre_body, clauses,
8938 collapse, ordered))
8939 return NULL;
8940 continue;
8941 }
8942
8943 if (!INTEGRAL_TYPE_P (TREE_TYPE (decl))
8944 && !TYPE_PTR_P (TREE_TYPE (decl)))
8945 {
8946 error_at (elocus, "invalid type for iteration variable %qE", decl);
8947 return NULL;
8948 }
8949
8950 if (!processing_template_decl)
8951 {
8952 init = fold_build_cleanup_point_expr (TREE_TYPE (init), init);
8953 init = cp_build_modify_expr (elocus, decl, NOP_EXPR, init,
8954 tf_warning_or_error);
8955 }
8956 else
8957 init = build2 (MODIFY_EXPR, void_type_node, decl, init);
8958 if (cond
8959 && TREE_SIDE_EFFECTS (cond)
8960 && COMPARISON_CLASS_P (cond)
8961 && !processing_template_decl)
8962 {
8963 tree t = TREE_OPERAND (cond, 0);
8964 if (TREE_SIDE_EFFECTS (t)
8965 && t != decl
8966 && (TREE_CODE (t) != NOP_EXPR
8967 || TREE_OPERAND (t, 0) != decl))
8968 TREE_OPERAND (cond, 0)
8969 = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8970
8971 t = TREE_OPERAND (cond, 1);
8972 if (TREE_SIDE_EFFECTS (t)
8973 && t != decl
8974 && (TREE_CODE (t) != NOP_EXPR
8975 || TREE_OPERAND (t, 0) != decl))
8976 TREE_OPERAND (cond, 1)
8977 = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8978 }
8979 if (decl == error_mark_node || init == error_mark_node)
8980 return NULL;
8981
8982 TREE_VEC_ELT (declv, i) = decl;
8983 TREE_VEC_ELT (initv, i) = init;
8984 TREE_VEC_ELT (condv, i) = cond;
8985 TREE_VEC_ELT (incrv, i) = incr;
8986 i++;
8987 }
8988
8989 if (pre_body && IS_EMPTY_STMT (pre_body))
8990 pre_body = NULL;
8991
8992 omp_for = c_finish_omp_for (locus, code, declv, orig_declv, initv, condv,
8993 incrv, body, pre_body,
8994 !processing_template_decl);
8995
8996 /* Check for iterators appearing in lb, b or incr expressions. */
8997 if (omp_for && !c_omp_check_loop_iv (omp_for, orig_declv, cp_walk_subtrees))
8998 omp_for = NULL_TREE;
8999
9000 if (omp_for == NULL)
9001 return NULL;
9002
9003 add_stmt (omp_for);
9004
9005 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INCR (omp_for)); i++)
9006 {
9007 decl = TREE_OPERAND (TREE_VEC_ELT (OMP_FOR_INIT (omp_for), i), 0);
9008 incr = TREE_VEC_ELT (OMP_FOR_INCR (omp_for), i);
9009
9010 if (TREE_CODE (incr) != MODIFY_EXPR)
9011 continue;
9012
9013 if (TREE_SIDE_EFFECTS (TREE_OPERAND (incr, 1))
9014 && BINARY_CLASS_P (TREE_OPERAND (incr, 1))
9015 && !processing_template_decl)
9016 {
9017 tree t = TREE_OPERAND (TREE_OPERAND (incr, 1), 0);
9018 if (TREE_SIDE_EFFECTS (t)
9019 && t != decl
9020 && (TREE_CODE (t) != NOP_EXPR
9021 || TREE_OPERAND (t, 0) != decl))
9022 TREE_OPERAND (TREE_OPERAND (incr, 1), 0)
9023 = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
9024
9025 t = TREE_OPERAND (TREE_OPERAND (incr, 1), 1);
9026 if (TREE_SIDE_EFFECTS (t)
9027 && t != decl
9028 && (TREE_CODE (t) != NOP_EXPR
9029 || TREE_OPERAND (t, 0) != decl))
9030 TREE_OPERAND (TREE_OPERAND (incr, 1), 1)
9031 = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
9032 }
9033
9034 if (orig_incr)
9035 TREE_VEC_ELT (OMP_FOR_INCR (omp_for), i) = TREE_VEC_ELT (orig_incr, i);
9036 }
9037 OMP_FOR_CLAUSES (omp_for) = clauses;
9038
9039 /* For simd loops with non-static data member iterators, we could have added
9040 OMP_CLAUSE_LINEAR clauses without OMP_CLAUSE_LINEAR_STEP. As we know the
9041 step at this point, fill it in. */
9042 if (code == OMP_SIMD && !processing_template_decl
9043 && TREE_VEC_LENGTH (OMP_FOR_INCR (omp_for)) == 1)
9044 for (tree c = omp_find_clause (clauses, OMP_CLAUSE_LINEAR); c;
9045 c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_LINEAR))
9046 if (OMP_CLAUSE_LINEAR_STEP (c) == NULL_TREE)
9047 {
9048 decl = TREE_OPERAND (TREE_VEC_ELT (OMP_FOR_INIT (omp_for), 0), 0);
9049 gcc_assert (decl == OMP_CLAUSE_DECL (c));
9050 incr = TREE_VEC_ELT (OMP_FOR_INCR (omp_for), 0);
9051 tree step, stept;
9052 switch (TREE_CODE (incr))
9053 {
9054 case PREINCREMENT_EXPR:
9055 case POSTINCREMENT_EXPR:
9056 /* c_omp_for_incr_canonicalize_ptr() should have been
9057 called to massage things appropriately. */
9058 gcc_assert (!INDIRECT_TYPE_P (TREE_TYPE (decl)));
9059 OMP_CLAUSE_LINEAR_STEP (c) = build_int_cst (TREE_TYPE (decl), 1);
9060 break;
9061 case PREDECREMENT_EXPR:
9062 case POSTDECREMENT_EXPR:
9063 /* c_omp_for_incr_canonicalize_ptr() should have been
9064 called to massage things appropriately. */
9065 gcc_assert (!INDIRECT_TYPE_P (TREE_TYPE (decl)));
9066 OMP_CLAUSE_LINEAR_STEP (c)
9067 = build_int_cst (TREE_TYPE (decl), -1);
9068 break;
9069 case MODIFY_EXPR:
9070 gcc_assert (TREE_OPERAND (incr, 0) == decl);
9071 incr = TREE_OPERAND (incr, 1);
9072 switch (TREE_CODE (incr))
9073 {
9074 case PLUS_EXPR:
9075 if (TREE_OPERAND (incr, 1) == decl)
9076 step = TREE_OPERAND (incr, 0);
9077 else
9078 step = TREE_OPERAND (incr, 1);
9079 break;
9080 case MINUS_EXPR:
9081 case POINTER_PLUS_EXPR:
9082 gcc_assert (TREE_OPERAND (incr, 0) == decl);
9083 step = TREE_OPERAND (incr, 1);
9084 break;
9085 default:
9086 gcc_unreachable ();
9087 }
9088 stept = TREE_TYPE (decl);
9089 if (INDIRECT_TYPE_P (stept))
9090 stept = sizetype;
9091 step = fold_convert (stept, step);
9092 if (TREE_CODE (incr) == MINUS_EXPR)
9093 step = fold_build1 (NEGATE_EXPR, stept, step);
9094 OMP_CLAUSE_LINEAR_STEP (c) = step;
9095 break;
9096 default:
9097 gcc_unreachable ();
9098 }
9099 }
9100 /* Override saved methods on OMP_LOOP's OMP_CLAUSE_LASTPRIVATE_LOOP_IV
9101 clauses, we need copy ctor for those rather than default ctor,
9102 plus as for other lastprivates assignment op and dtor. */
9103 if (code == OMP_LOOP && !processing_template_decl)
9104 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9105 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9106 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
9107 && cxx_omp_create_clause_info (c, TREE_TYPE (OMP_CLAUSE_DECL (c)),
9108 false, true, true, true))
9109 CP_OMP_CLAUSE_INFO (c) = NULL_TREE;
9110
9111 return omp_for;
9112 }
9113
9114 /* Fix up range for decls. Those decls were pushed into BIND's BIND_EXPR_VARS
9115 and need to be moved into the BIND_EXPR inside of the OMP_FOR's body. */
9116
9117 tree
9118 finish_omp_for_block (tree bind, tree omp_for)
9119 {
9120 if (omp_for == NULL_TREE
9121 || !OMP_FOR_ORIG_DECLS (omp_for)
9122 || bind == NULL_TREE
9123 || TREE_CODE (bind) != BIND_EXPR)
9124 return bind;
9125 tree b = NULL_TREE;
9126 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (omp_for)); i++)
9127 if (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (omp_for), i)) == TREE_LIST
9128 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (omp_for), i)))
9129 {
9130 tree v = TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (omp_for), i));
9131 gcc_assert (BIND_EXPR_BLOCK (bind)
9132 && (BIND_EXPR_VARS (bind)
9133 == BLOCK_VARS (BIND_EXPR_BLOCK (bind))));
9134 for (int j = 2; j < TREE_VEC_LENGTH (v); j++)
9135 for (tree *p = &BIND_EXPR_VARS (bind); *p; p = &DECL_CHAIN (*p))
9136 {
9137 if (*p == TREE_VEC_ELT (v, j))
9138 {
9139 tree var = *p;
9140 *p = DECL_CHAIN (*p);
9141 if (b == NULL_TREE)
9142 {
9143 b = make_node (BLOCK);
9144 b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
9145 OMP_FOR_BODY (omp_for), b);
9146 TREE_SIDE_EFFECTS (b) = 1;
9147 OMP_FOR_BODY (omp_for) = b;
9148 }
9149 DECL_CHAIN (var) = BIND_EXPR_VARS (b);
9150 BIND_EXPR_VARS (b) = var;
9151 BLOCK_VARS (BIND_EXPR_BLOCK (b)) = var;
9152 }
9153 }
9154 BLOCK_VARS (BIND_EXPR_BLOCK (bind)) = BIND_EXPR_VARS (bind);
9155 }
9156 return bind;
9157 }
9158
9159 void
9160 finish_omp_atomic (location_t loc, enum tree_code code, enum tree_code opcode,
9161 tree lhs, tree rhs, tree v, tree lhs1, tree rhs1,
9162 tree clauses, enum omp_memory_order mo)
9163 {
9164 tree orig_lhs;
9165 tree orig_rhs;
9166 tree orig_v;
9167 tree orig_lhs1;
9168 tree orig_rhs1;
9169 bool dependent_p;
9170 tree stmt;
9171
9172 orig_lhs = lhs;
9173 orig_rhs = rhs;
9174 orig_v = v;
9175 orig_lhs1 = lhs1;
9176 orig_rhs1 = rhs1;
9177 dependent_p = false;
9178 stmt = NULL_TREE;
9179
9180 /* Even in a template, we can detect invalid uses of the atomic
9181 pragma if neither LHS nor RHS is type-dependent. */
9182 if (processing_template_decl)
9183 {
9184 dependent_p = (type_dependent_expression_p (lhs)
9185 || (rhs && type_dependent_expression_p (rhs))
9186 || (v && type_dependent_expression_p (v))
9187 || (lhs1 && type_dependent_expression_p (lhs1))
9188 || (rhs1 && type_dependent_expression_p (rhs1)));
9189 if (clauses)
9190 {
9191 gcc_assert (TREE_CODE (clauses) == OMP_CLAUSE
9192 && OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_HINT
9193 && OMP_CLAUSE_CHAIN (clauses) == NULL_TREE);
9194 if (type_dependent_expression_p (OMP_CLAUSE_HINT_EXPR (clauses))
9195 || TREE_CODE (OMP_CLAUSE_HINT_EXPR (clauses)) != INTEGER_CST)
9196 dependent_p = true;
9197 }
9198 if (!dependent_p)
9199 {
9200 lhs = build_non_dependent_expr (lhs);
9201 if (rhs)
9202 rhs = build_non_dependent_expr (rhs);
9203 if (v)
9204 v = build_non_dependent_expr (v);
9205 if (lhs1)
9206 lhs1 = build_non_dependent_expr (lhs1);
9207 if (rhs1)
9208 rhs1 = build_non_dependent_expr (rhs1);
9209 }
9210 }
9211 if (!dependent_p)
9212 {
9213 bool swapped = false;
9214 if (rhs1 && cp_tree_equal (lhs, rhs))
9215 {
9216 std::swap (rhs, rhs1);
9217 swapped = !commutative_tree_code (opcode);
9218 }
9219 if (rhs1 && !cp_tree_equal (lhs, rhs1))
9220 {
9221 if (code == OMP_ATOMIC)
9222 error ("%<#pragma omp atomic update%> uses two different "
9223 "expressions for memory");
9224 else
9225 error ("%<#pragma omp atomic capture%> uses two different "
9226 "expressions for memory");
9227 return;
9228 }
9229 if (lhs1 && !cp_tree_equal (lhs, lhs1))
9230 {
9231 if (code == OMP_ATOMIC)
9232 error ("%<#pragma omp atomic update%> uses two different "
9233 "expressions for memory");
9234 else
9235 error ("%<#pragma omp atomic capture%> uses two different "
9236 "expressions for memory");
9237 return;
9238 }
9239 stmt = c_finish_omp_atomic (loc, code, opcode, lhs, rhs,
9240 v, lhs1, rhs1, swapped, mo,
9241 processing_template_decl != 0);
9242 if (stmt == error_mark_node)
9243 return;
9244 }
9245 if (processing_template_decl)
9246 {
9247 if (code == OMP_ATOMIC_READ)
9248 {
9249 stmt = build_min_nt_loc (loc, OMP_ATOMIC_READ, orig_lhs);
9250 OMP_ATOMIC_MEMORY_ORDER (stmt) = mo;
9251 stmt = build2 (MODIFY_EXPR, void_type_node, orig_v, stmt);
9252 }
9253 else
9254 {
9255 if (opcode == NOP_EXPR)
9256 stmt = build2 (MODIFY_EXPR, void_type_node, orig_lhs, orig_rhs);
9257 else
9258 stmt = build2 (opcode, void_type_node, orig_lhs, orig_rhs);
9259 if (orig_rhs1)
9260 stmt = build_min_nt_loc (EXPR_LOCATION (orig_rhs1),
9261 COMPOUND_EXPR, orig_rhs1, stmt);
9262 if (code != OMP_ATOMIC)
9263 {
9264 stmt = build_min_nt_loc (loc, code, orig_lhs1, stmt);
9265 OMP_ATOMIC_MEMORY_ORDER (stmt) = mo;
9266 stmt = build2 (MODIFY_EXPR, void_type_node, orig_v, stmt);
9267 }
9268 }
9269 stmt = build2 (OMP_ATOMIC, void_type_node,
9270 clauses ? clauses : integer_zero_node, stmt);
9271 OMP_ATOMIC_MEMORY_ORDER (stmt) = mo;
9272 SET_EXPR_LOCATION (stmt, loc);
9273 }
9274
9275 /* Avoid -Wunused-value warnings here, the whole construct has side-effects
9276 and even if it might be wrapped from fold-const.c or c-omp.c wrapped
9277 in some tree that appears to be unused, the value is not unused. */
9278 warning_sentinel w (warn_unused_value);
9279 finish_expr_stmt (stmt);
9280 }
9281
9282 void
9283 finish_omp_barrier (void)
9284 {
9285 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
9286 releasing_vec vec;
9287 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
9288 finish_expr_stmt (stmt);
9289 }
9290
9291 void
9292 finish_omp_depobj (location_t loc, tree depobj,
9293 enum omp_clause_depend_kind kind, tree clause)
9294 {
9295 if (!error_operand_p (depobj) && !type_dependent_expression_p (depobj))
9296 {
9297 if (!lvalue_p (depobj))
9298 {
9299 error_at (EXPR_LOC_OR_LOC (depobj, loc),
9300 "%<depobj%> expression is not lvalue expression");
9301 depobj = error_mark_node;
9302 }
9303 }
9304
9305 if (processing_template_decl)
9306 {
9307 if (clause == NULL_TREE)
9308 clause = build_int_cst (integer_type_node, kind);
9309 add_stmt (build_min_nt_loc (loc, OMP_DEPOBJ, depobj, clause));
9310 return;
9311 }
9312
9313 if (!error_operand_p (depobj))
9314 {
9315 tree addr = cp_build_addr_expr (depobj, tf_warning_or_error);
9316 if (addr == error_mark_node)
9317 depobj = error_mark_node;
9318 else
9319 depobj = cp_build_indirect_ref (loc, addr, RO_UNARY_STAR,
9320 tf_warning_or_error);
9321 }
9322
9323 c_finish_omp_depobj (loc, depobj, kind, clause);
9324 }
9325
9326 void
9327 finish_omp_flush (int mo)
9328 {
9329 tree fn = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
9330 releasing_vec vec;
9331 if (mo != MEMMODEL_LAST)
9332 {
9333 fn = builtin_decl_explicit (BUILT_IN_ATOMIC_THREAD_FENCE);
9334 vec->quick_push (build_int_cst (integer_type_node, mo));
9335 }
9336 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
9337 finish_expr_stmt (stmt);
9338 }
9339
9340 void
9341 finish_omp_taskwait (void)
9342 {
9343 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
9344 releasing_vec vec;
9345 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
9346 finish_expr_stmt (stmt);
9347 }
9348
9349 void
9350 finish_omp_taskyield (void)
9351 {
9352 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
9353 releasing_vec vec;
9354 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
9355 finish_expr_stmt (stmt);
9356 }
9357
9358 void
9359 finish_omp_cancel (tree clauses)
9360 {
9361 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
9362 int mask = 0;
9363 if (omp_find_clause (clauses, OMP_CLAUSE_PARALLEL))
9364 mask = 1;
9365 else if (omp_find_clause (clauses, OMP_CLAUSE_FOR))
9366 mask = 2;
9367 else if (omp_find_clause (clauses, OMP_CLAUSE_SECTIONS))
9368 mask = 4;
9369 else if (omp_find_clause (clauses, OMP_CLAUSE_TASKGROUP))
9370 mask = 8;
9371 else
9372 {
9373 error ("%<#pragma omp cancel%> must specify one of "
9374 "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> clauses");
9375 return;
9376 }
9377 releasing_vec vec;
9378 tree ifc = omp_find_clause (clauses, OMP_CLAUSE_IF);
9379 if (ifc != NULL_TREE)
9380 {
9381 if (OMP_CLAUSE_IF_MODIFIER (ifc) != ERROR_MARK
9382 && OMP_CLAUSE_IF_MODIFIER (ifc) != VOID_CST)
9383 error_at (OMP_CLAUSE_LOCATION (ifc),
9384 "expected %<cancel%> %<if%> clause modifier");
9385 else
9386 {
9387 tree ifc2 = omp_find_clause (OMP_CLAUSE_CHAIN (ifc), OMP_CLAUSE_IF);
9388 if (ifc2 != NULL_TREE)
9389 {
9390 gcc_assert (OMP_CLAUSE_IF_MODIFIER (ifc) == VOID_CST
9391 && OMP_CLAUSE_IF_MODIFIER (ifc2) != ERROR_MARK
9392 && OMP_CLAUSE_IF_MODIFIER (ifc2) != VOID_CST);
9393 error_at (OMP_CLAUSE_LOCATION (ifc2),
9394 "expected %<cancel%> %<if%> clause modifier");
9395 }
9396 }
9397
9398 if (!processing_template_decl)
9399 ifc = maybe_convert_cond (OMP_CLAUSE_IF_EXPR (ifc));
9400 else
9401 ifc = build_x_binary_op (OMP_CLAUSE_LOCATION (ifc), NE_EXPR,
9402 OMP_CLAUSE_IF_EXPR (ifc), ERROR_MARK,
9403 integer_zero_node, ERROR_MARK,
9404 NULL, tf_warning_or_error);
9405 }
9406 else
9407 ifc = boolean_true_node;
9408 vec->quick_push (build_int_cst (integer_type_node, mask));
9409 vec->quick_push (ifc);
9410 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
9411 finish_expr_stmt (stmt);
9412 }
9413
9414 void
9415 finish_omp_cancellation_point (tree clauses)
9416 {
9417 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT);
9418 int mask = 0;
9419 if (omp_find_clause (clauses, OMP_CLAUSE_PARALLEL))
9420 mask = 1;
9421 else if (omp_find_clause (clauses, OMP_CLAUSE_FOR))
9422 mask = 2;
9423 else if (omp_find_clause (clauses, OMP_CLAUSE_SECTIONS))
9424 mask = 4;
9425 else if (omp_find_clause (clauses, OMP_CLAUSE_TASKGROUP))
9426 mask = 8;
9427 else
9428 {
9429 error ("%<#pragma omp cancellation point%> must specify one of "
9430 "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> clauses");
9431 return;
9432 }
9433 releasing_vec vec
9434 = make_tree_vector_single (build_int_cst (integer_type_node, mask));
9435 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
9436 finish_expr_stmt (stmt);
9437 }
9438 \f
9439 /* Begin a __transaction_atomic or __transaction_relaxed statement.
9440 If PCOMPOUND is non-null, this is for a function-transaction-block, and we
9441 should create an extra compound stmt. */
9442
9443 tree
9444 begin_transaction_stmt (location_t loc, tree *pcompound, int flags)
9445 {
9446 tree r;
9447
9448 if (pcompound)
9449 *pcompound = begin_compound_stmt (0);
9450
9451 r = build_stmt (loc, TRANSACTION_EXPR, NULL_TREE);
9452
9453 /* Only add the statement to the function if support enabled. */
9454 if (flag_tm)
9455 add_stmt (r);
9456 else
9457 error_at (loc, ((flags & TM_STMT_ATTR_RELAXED) != 0
9458 ? G_("%<__transaction_relaxed%> without "
9459 "transactional memory support enabled")
9460 : G_("%<__transaction_atomic%> without "
9461 "transactional memory support enabled")));
9462
9463 TRANSACTION_EXPR_BODY (r) = push_stmt_list ();
9464 TREE_SIDE_EFFECTS (r) = 1;
9465 return r;
9466 }
9467
9468 /* End a __transaction_atomic or __transaction_relaxed statement.
9469 If COMPOUND_STMT is non-null, this is for a function-transaction-block,
9470 and we should end the compound. If NOEX is non-NULL, we wrap the body in
9471 a MUST_NOT_THROW_EXPR with NOEX as condition. */
9472
9473 void
9474 finish_transaction_stmt (tree stmt, tree compound_stmt, int flags, tree noex)
9475 {
9476 TRANSACTION_EXPR_BODY (stmt) = pop_stmt_list (TRANSACTION_EXPR_BODY (stmt));
9477 TRANSACTION_EXPR_OUTER (stmt) = (flags & TM_STMT_ATTR_OUTER) != 0;
9478 TRANSACTION_EXPR_RELAXED (stmt) = (flags & TM_STMT_ATTR_RELAXED) != 0;
9479 TRANSACTION_EXPR_IS_STMT (stmt) = 1;
9480
9481 /* noexcept specifications are not allowed for function transactions. */
9482 gcc_assert (!(noex && compound_stmt));
9483 if (noex)
9484 {
9485 tree body = build_must_not_throw_expr (TRANSACTION_EXPR_BODY (stmt),
9486 noex);
9487 protected_set_expr_location
9488 (body, EXPR_LOCATION (TRANSACTION_EXPR_BODY (stmt)));
9489 TREE_SIDE_EFFECTS (body) = 1;
9490 TRANSACTION_EXPR_BODY (stmt) = body;
9491 }
9492
9493 if (compound_stmt)
9494 finish_compound_stmt (compound_stmt);
9495 }
9496
9497 /* Build a __transaction_atomic or __transaction_relaxed expression. If
9498 NOEX is non-NULL, we wrap the body in a MUST_NOT_THROW_EXPR with NOEX as
9499 condition. */
9500
9501 tree
9502 build_transaction_expr (location_t loc, tree expr, int flags, tree noex)
9503 {
9504 tree ret;
9505 if (noex)
9506 {
9507 expr = build_must_not_throw_expr (expr, noex);
9508 protected_set_expr_location (expr, loc);
9509 TREE_SIDE_EFFECTS (expr) = 1;
9510 }
9511 ret = build1 (TRANSACTION_EXPR, TREE_TYPE (expr), expr);
9512 if (flags & TM_STMT_ATTR_RELAXED)
9513 TRANSACTION_EXPR_RELAXED (ret) = 1;
9514 TREE_SIDE_EFFECTS (ret) = 1;
9515 SET_EXPR_LOCATION (ret, loc);
9516 return ret;
9517 }
9518 \f
9519 void
9520 init_cp_semantics (void)
9521 {
9522 }
9523 \f
9524 /* Build a STATIC_ASSERT for a static assertion with the condition
9525 CONDITION and the message text MESSAGE. LOCATION is the location
9526 of the static assertion in the source code. When MEMBER_P, this
9527 static assertion is a member of a class. */
9528 void
9529 finish_static_assert (tree condition, tree message, location_t location,
9530 bool member_p)
9531 {
9532 tsubst_flags_t complain = tf_warning_or_error;
9533
9534 if (message == NULL_TREE
9535 || message == error_mark_node
9536 || condition == NULL_TREE
9537 || condition == error_mark_node)
9538 return;
9539
9540 if (check_for_bare_parameter_packs (condition))
9541 condition = error_mark_node;
9542
9543 if (instantiation_dependent_expression_p (condition))
9544 {
9545 /* We're in a template; build a STATIC_ASSERT and put it in
9546 the right place. */
9547 tree assertion;
9548
9549 assertion = make_node (STATIC_ASSERT);
9550 STATIC_ASSERT_CONDITION (assertion) = condition;
9551 STATIC_ASSERT_MESSAGE (assertion) = message;
9552 STATIC_ASSERT_SOURCE_LOCATION (assertion) = location;
9553
9554 if (member_p)
9555 maybe_add_class_template_decl_list (current_class_type,
9556 assertion,
9557 /*friend_p=*/0);
9558 else
9559 add_stmt (assertion);
9560
9561 return;
9562 }
9563
9564 /* Save the condition in case it was a concept check. */
9565 tree orig_condition = condition;
9566
9567 /* Fold the expression and convert it to a boolean value. */
9568 condition = perform_implicit_conversion_flags (boolean_type_node, condition,
9569 complain, LOOKUP_NORMAL);
9570 condition = fold_non_dependent_expr (condition, complain,
9571 /*manifestly_const_eval=*/true);
9572
9573 if (TREE_CODE (condition) == INTEGER_CST && !integer_zerop (condition))
9574 /* Do nothing; the condition is satisfied. */
9575 ;
9576 else
9577 {
9578 location_t saved_loc = input_location;
9579
9580 input_location = location;
9581 if (TREE_CODE (condition) == INTEGER_CST
9582 && integer_zerop (condition))
9583 {
9584 int sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT
9585 (TREE_TYPE (TREE_TYPE (message))));
9586 int len = TREE_STRING_LENGTH (message) / sz - 1;
9587 /* Report the error. */
9588 if (len == 0)
9589 error ("static assertion failed");
9590 else
9591 error ("static assertion failed: %s",
9592 TREE_STRING_POINTER (message));
9593
9594 /* Actually explain the failure if this is a concept check. */
9595 if (concept_check_p (orig_condition))
9596 diagnose_constraints (location, orig_condition, NULL_TREE);
9597 }
9598 else if (condition && condition != error_mark_node)
9599 {
9600 error ("non-constant condition for static assertion");
9601 if (require_rvalue_constant_expression (condition))
9602 cxx_constant_value (condition);
9603 }
9604 input_location = saved_loc;
9605 }
9606 }
9607 \f
9608 /* Implements the C++0x decltype keyword. Returns the type of EXPR,
9609 suitable for use as a type-specifier.
9610
9611 ID_EXPRESSION_OR_MEMBER_ACCESS_P is true when EXPR was parsed as an
9612 id-expression or a class member access, FALSE when it was parsed as
9613 a full expression. */
9614
9615 tree
9616 finish_decltype_type (tree expr, bool id_expression_or_member_access_p,
9617 tsubst_flags_t complain)
9618 {
9619 tree type = NULL_TREE;
9620
9621 if (!expr || error_operand_p (expr))
9622 return error_mark_node;
9623
9624 if (TYPE_P (expr)
9625 || TREE_CODE (expr) == TYPE_DECL
9626 || (TREE_CODE (expr) == BIT_NOT_EXPR
9627 && TYPE_P (TREE_OPERAND (expr, 0))))
9628 {
9629 if (complain & tf_error)
9630 error ("argument to %<decltype%> must be an expression");
9631 return error_mark_node;
9632 }
9633
9634 /* Depending on the resolution of DR 1172, we may later need to distinguish
9635 instantiation-dependent but not type-dependent expressions so that, say,
9636 A<decltype(sizeof(T))>::U doesn't require 'typename'. */
9637 if (instantiation_dependent_uneval_expression_p (expr))
9638 {
9639 type = cxx_make_type (DECLTYPE_TYPE);
9640 DECLTYPE_TYPE_EXPR (type) = expr;
9641 DECLTYPE_TYPE_ID_EXPR_OR_MEMBER_ACCESS_P (type)
9642 = id_expression_or_member_access_p;
9643 SET_TYPE_STRUCTURAL_EQUALITY (type);
9644
9645 return type;
9646 }
9647
9648 /* The type denoted by decltype(e) is defined as follows: */
9649
9650 expr = resolve_nondeduced_context (expr, complain);
9651
9652 if (invalid_nonstatic_memfn_p (input_location, expr, complain))
9653 return error_mark_node;
9654
9655 if (type_unknown_p (expr))
9656 {
9657 if (complain & tf_error)
9658 error ("%<decltype%> cannot resolve address of overloaded function");
9659 return error_mark_node;
9660 }
9661
9662 /* To get the size of a static data member declared as an array of
9663 unknown bound, we need to instantiate it. */
9664 if (VAR_P (expr)
9665 && VAR_HAD_UNKNOWN_BOUND (expr)
9666 && DECL_TEMPLATE_INSTANTIATION (expr))
9667 instantiate_decl (expr, /*defer_ok*/true, /*expl_inst_mem*/false);
9668
9669 if (id_expression_or_member_access_p)
9670 {
9671 /* If e is an id-expression or a class member access (5.2.5
9672 [expr.ref]), decltype(e) is defined as the type of the entity
9673 named by e. If there is no such entity, or e names a set of
9674 overloaded functions, the program is ill-formed. */
9675 if (identifier_p (expr))
9676 expr = lookup_name (expr);
9677
9678 if (INDIRECT_REF_P (expr)
9679 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
9680 /* This can happen when the expression is, e.g., "a.b". Just
9681 look at the underlying operand. */
9682 expr = TREE_OPERAND (expr, 0);
9683
9684 if (TREE_CODE (expr) == OFFSET_REF
9685 || TREE_CODE (expr) == MEMBER_REF
9686 || TREE_CODE (expr) == SCOPE_REF)
9687 /* We're only interested in the field itself. If it is a
9688 BASELINK, we will need to see through it in the next
9689 step. */
9690 expr = TREE_OPERAND (expr, 1);
9691
9692 if (BASELINK_P (expr))
9693 /* See through BASELINK nodes to the underlying function. */
9694 expr = BASELINK_FUNCTIONS (expr);
9695
9696 /* decltype of a decomposition name drops references in the tuple case
9697 (unlike decltype of a normal variable) and keeps cv-qualifiers from
9698 the containing object in the other cases (unlike decltype of a member
9699 access expression). */
9700 if (DECL_DECOMPOSITION_P (expr))
9701 {
9702 if (DECL_HAS_VALUE_EXPR_P (expr))
9703 /* Expr is an array or struct subobject proxy, handle
9704 bit-fields properly. */
9705 return unlowered_expr_type (expr);
9706 else
9707 /* Expr is a reference variable for the tuple case. */
9708 return lookup_decomp_type (expr);
9709 }
9710
9711 switch (TREE_CODE (expr))
9712 {
9713 case FIELD_DECL:
9714 if (DECL_BIT_FIELD_TYPE (expr))
9715 {
9716 type = DECL_BIT_FIELD_TYPE (expr);
9717 break;
9718 }
9719 /* Fall through for fields that aren't bitfields. */
9720 gcc_fallthrough ();
9721
9722 case FUNCTION_DECL:
9723 case VAR_DECL:
9724 case CONST_DECL:
9725 case PARM_DECL:
9726 case RESULT_DECL:
9727 case TEMPLATE_PARM_INDEX:
9728 expr = mark_type_use (expr);
9729 type = TREE_TYPE (expr);
9730 break;
9731
9732 case ERROR_MARK:
9733 type = error_mark_node;
9734 break;
9735
9736 case COMPONENT_REF:
9737 case COMPOUND_EXPR:
9738 mark_type_use (expr);
9739 type = is_bitfield_expr_with_lowered_type (expr);
9740 if (!type)
9741 type = TREE_TYPE (TREE_OPERAND (expr, 1));
9742 break;
9743
9744 case BIT_FIELD_REF:
9745 gcc_unreachable ();
9746
9747 case INTEGER_CST:
9748 case PTRMEM_CST:
9749 /* We can get here when the id-expression refers to an
9750 enumerator or non-type template parameter. */
9751 type = TREE_TYPE (expr);
9752 break;
9753
9754 default:
9755 /* Handle instantiated template non-type arguments. */
9756 type = TREE_TYPE (expr);
9757 break;
9758 }
9759 }
9760 else
9761 {
9762 /* Within a lambda-expression:
9763
9764 Every occurrence of decltype((x)) where x is a possibly
9765 parenthesized id-expression that names an entity of
9766 automatic storage duration is treated as if x were
9767 transformed into an access to a corresponding data member
9768 of the closure type that would have been declared if x
9769 were a use of the denoted entity. */
9770 if (outer_automatic_var_p (expr)
9771 && current_function_decl
9772 && LAMBDA_FUNCTION_P (current_function_decl))
9773 type = capture_decltype (expr);
9774 else if (error_operand_p (expr))
9775 type = error_mark_node;
9776 else if (expr == current_class_ptr)
9777 /* If the expression is just "this", we want the
9778 cv-unqualified pointer for the "this" type. */
9779 type = TYPE_MAIN_VARIANT (TREE_TYPE (expr));
9780 else
9781 {
9782 /* Otherwise, where T is the type of e, if e is an lvalue,
9783 decltype(e) is defined as T&; if an xvalue, T&&; otherwise, T. */
9784 cp_lvalue_kind clk = lvalue_kind (expr);
9785 type = unlowered_expr_type (expr);
9786 gcc_assert (!TYPE_REF_P (type));
9787
9788 /* For vector types, pick a non-opaque variant. */
9789 if (VECTOR_TYPE_P (type))
9790 type = strip_typedefs (type);
9791
9792 if (clk != clk_none && !(clk & clk_class))
9793 type = cp_build_reference_type (type, (clk & clk_rvalueref));
9794 }
9795 }
9796
9797 return type;
9798 }
9799
9800 /* Called from trait_expr_value to evaluate either __has_nothrow_assign or
9801 __has_nothrow_copy, depending on assign_p. Returns true iff all
9802 the copy {ctor,assign} fns are nothrow. */
9803
9804 static bool
9805 classtype_has_nothrow_assign_or_copy_p (tree type, bool assign_p)
9806 {
9807 tree fns = NULL_TREE;
9808
9809 if (assign_p || TYPE_HAS_COPY_CTOR (type))
9810 fns = get_class_binding (type, assign_p ? assign_op_identifier
9811 : ctor_identifier);
9812
9813 bool saw_copy = false;
9814 for (ovl_iterator iter (fns); iter; ++iter)
9815 {
9816 tree fn = *iter;
9817
9818 if (copy_fn_p (fn) > 0)
9819 {
9820 saw_copy = true;
9821 if (!maybe_instantiate_noexcept (fn)
9822 || !TYPE_NOTHROW_P (TREE_TYPE (fn)))
9823 return false;
9824 }
9825 }
9826
9827 return saw_copy;
9828 }
9829
9830 /* Actually evaluates the trait. */
9831
9832 static bool
9833 trait_expr_value (cp_trait_kind kind, tree type1, tree type2)
9834 {
9835 enum tree_code type_code1;
9836 tree t;
9837
9838 type_code1 = TREE_CODE (type1);
9839
9840 switch (kind)
9841 {
9842 case CPTK_HAS_NOTHROW_ASSIGN:
9843 type1 = strip_array_types (type1);
9844 return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE
9845 && (trait_expr_value (CPTK_HAS_TRIVIAL_ASSIGN, type1, type2)
9846 || (CLASS_TYPE_P (type1)
9847 && classtype_has_nothrow_assign_or_copy_p (type1,
9848 true))));
9849
9850 case CPTK_HAS_TRIVIAL_ASSIGN:
9851 /* ??? The standard seems to be missing the "or array of such a class
9852 type" wording for this trait. */
9853 type1 = strip_array_types (type1);
9854 return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE
9855 && (trivial_type_p (type1)
9856 || (CLASS_TYPE_P (type1)
9857 && TYPE_HAS_TRIVIAL_COPY_ASSIGN (type1))));
9858
9859 case CPTK_HAS_NOTHROW_CONSTRUCTOR:
9860 type1 = strip_array_types (type1);
9861 return (trait_expr_value (CPTK_HAS_TRIVIAL_CONSTRUCTOR, type1, type2)
9862 || (CLASS_TYPE_P (type1)
9863 && (t = locate_ctor (type1))
9864 && maybe_instantiate_noexcept (t)
9865 && TYPE_NOTHROW_P (TREE_TYPE (t))));
9866
9867 case CPTK_HAS_TRIVIAL_CONSTRUCTOR:
9868 type1 = strip_array_types (type1);
9869 return (trivial_type_p (type1)
9870 || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_DFLT (type1)));
9871
9872 case CPTK_HAS_NOTHROW_COPY:
9873 type1 = strip_array_types (type1);
9874 return (trait_expr_value (CPTK_HAS_TRIVIAL_COPY, type1, type2)
9875 || (CLASS_TYPE_P (type1)
9876 && classtype_has_nothrow_assign_or_copy_p (type1, false)));
9877
9878 case CPTK_HAS_TRIVIAL_COPY:
9879 /* ??? The standard seems to be missing the "or array of such a class
9880 type" wording for this trait. */
9881 type1 = strip_array_types (type1);
9882 return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE
9883 || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_COPY_CTOR (type1)));
9884
9885 case CPTK_HAS_TRIVIAL_DESTRUCTOR:
9886 type1 = strip_array_types (type1);
9887 return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE
9888 || (CLASS_TYPE_P (type1)
9889 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type1)));
9890
9891 case CPTK_HAS_VIRTUAL_DESTRUCTOR:
9892 return type_has_virtual_destructor (type1);
9893
9894 case CPTK_HAS_UNIQUE_OBJ_REPRESENTATIONS:
9895 return type_has_unique_obj_representations (type1);
9896
9897 case CPTK_IS_ABSTRACT:
9898 return ABSTRACT_CLASS_TYPE_P (type1);
9899
9900 case CPTK_IS_AGGREGATE:
9901 return CP_AGGREGATE_TYPE_P (type1);
9902
9903 case CPTK_IS_BASE_OF:
9904 return (NON_UNION_CLASS_TYPE_P (type1) && NON_UNION_CLASS_TYPE_P (type2)
9905 && (same_type_ignoring_top_level_qualifiers_p (type1, type2)
9906 || DERIVED_FROM_P (type1, type2)));
9907
9908 case CPTK_IS_CLASS:
9909 return NON_UNION_CLASS_TYPE_P (type1);
9910
9911 case CPTK_IS_EMPTY:
9912 return NON_UNION_CLASS_TYPE_P (type1) && CLASSTYPE_EMPTY_P (type1);
9913
9914 case CPTK_IS_ENUM:
9915 return type_code1 == ENUMERAL_TYPE;
9916
9917 case CPTK_IS_FINAL:
9918 return CLASS_TYPE_P (type1) && CLASSTYPE_FINAL (type1);
9919
9920 case CPTK_IS_LITERAL_TYPE:
9921 return literal_type_p (type1);
9922
9923 case CPTK_IS_POD:
9924 return pod_type_p (type1);
9925
9926 case CPTK_IS_POLYMORPHIC:
9927 return CLASS_TYPE_P (type1) && TYPE_POLYMORPHIC_P (type1);
9928
9929 case CPTK_IS_SAME_AS:
9930 return same_type_p (type1, type2);
9931
9932 case CPTK_IS_STD_LAYOUT:
9933 return std_layout_type_p (type1);
9934
9935 case CPTK_IS_TRIVIAL:
9936 return trivial_type_p (type1);
9937
9938 case CPTK_IS_TRIVIALLY_ASSIGNABLE:
9939 return is_trivially_xible (MODIFY_EXPR, type1, type2);
9940
9941 case CPTK_IS_TRIVIALLY_CONSTRUCTIBLE:
9942 return is_trivially_xible (INIT_EXPR, type1, type2);
9943
9944 case CPTK_IS_TRIVIALLY_COPYABLE:
9945 return trivially_copyable_p (type1);
9946
9947 case CPTK_IS_UNION:
9948 return type_code1 == UNION_TYPE;
9949
9950 case CPTK_IS_ASSIGNABLE:
9951 return is_xible (MODIFY_EXPR, type1, type2);
9952
9953 case CPTK_IS_CONSTRUCTIBLE:
9954 return is_xible (INIT_EXPR, type1, type2);
9955
9956 default:
9957 gcc_unreachable ();
9958 return false;
9959 }
9960 }
9961
9962 /* If TYPE is an array of unknown bound, or (possibly cv-qualified)
9963 void, or a complete type, returns true, otherwise false. */
9964
9965 static bool
9966 check_trait_type (tree type)
9967 {
9968 if (type == NULL_TREE)
9969 return true;
9970
9971 if (TREE_CODE (type) == TREE_LIST)
9972 return (check_trait_type (TREE_VALUE (type))
9973 && check_trait_type (TREE_CHAIN (type)));
9974
9975 if (TREE_CODE (type) == ARRAY_TYPE && !TYPE_DOMAIN (type)
9976 && COMPLETE_TYPE_P (TREE_TYPE (type)))
9977 return true;
9978
9979 if (VOID_TYPE_P (type))
9980 return true;
9981
9982 return !!complete_type_or_else (strip_array_types (type), NULL_TREE);
9983 }
9984
9985 /* Process a trait expression. */
9986
9987 tree
9988 finish_trait_expr (location_t loc, cp_trait_kind kind, tree type1, tree type2)
9989 {
9990 if (type1 == error_mark_node
9991 || type2 == error_mark_node)
9992 return error_mark_node;
9993
9994 if (processing_template_decl)
9995 {
9996 tree trait_expr = make_node (TRAIT_EXPR);
9997 TREE_TYPE (trait_expr) = boolean_type_node;
9998 TRAIT_EXPR_TYPE1 (trait_expr) = type1;
9999 TRAIT_EXPR_TYPE2 (trait_expr) = type2;
10000 TRAIT_EXPR_KIND (trait_expr) = kind;
10001 TRAIT_EXPR_LOCATION (trait_expr) = loc;
10002 return trait_expr;
10003 }
10004
10005 switch (kind)
10006 {
10007 case CPTK_HAS_NOTHROW_ASSIGN:
10008 case CPTK_HAS_TRIVIAL_ASSIGN:
10009 case CPTK_HAS_NOTHROW_CONSTRUCTOR:
10010 case CPTK_HAS_TRIVIAL_CONSTRUCTOR:
10011 case CPTK_HAS_NOTHROW_COPY:
10012 case CPTK_HAS_TRIVIAL_COPY:
10013 case CPTK_HAS_TRIVIAL_DESTRUCTOR:
10014 case CPTK_HAS_UNIQUE_OBJ_REPRESENTATIONS:
10015 case CPTK_HAS_VIRTUAL_DESTRUCTOR:
10016 case CPTK_IS_ABSTRACT:
10017 case CPTK_IS_AGGREGATE:
10018 case CPTK_IS_EMPTY:
10019 case CPTK_IS_FINAL:
10020 case CPTK_IS_LITERAL_TYPE:
10021 case CPTK_IS_POD:
10022 case CPTK_IS_POLYMORPHIC:
10023 case CPTK_IS_STD_LAYOUT:
10024 case CPTK_IS_TRIVIAL:
10025 case CPTK_IS_TRIVIALLY_COPYABLE:
10026 if (!check_trait_type (type1))
10027 return error_mark_node;
10028 break;
10029
10030 case CPTK_IS_ASSIGNABLE:
10031 case CPTK_IS_CONSTRUCTIBLE:
10032 break;
10033
10034 case CPTK_IS_TRIVIALLY_ASSIGNABLE:
10035 case CPTK_IS_TRIVIALLY_CONSTRUCTIBLE:
10036 if (!check_trait_type (type1)
10037 || !check_trait_type (type2))
10038 return error_mark_node;
10039 break;
10040
10041 case CPTK_IS_BASE_OF:
10042 if (NON_UNION_CLASS_TYPE_P (type1) && NON_UNION_CLASS_TYPE_P (type2)
10043 && !same_type_ignoring_top_level_qualifiers_p (type1, type2)
10044 && !complete_type_or_else (type2, NULL_TREE))
10045 /* We already issued an error. */
10046 return error_mark_node;
10047 break;
10048
10049 case CPTK_IS_CLASS:
10050 case CPTK_IS_ENUM:
10051 case CPTK_IS_UNION:
10052 case CPTK_IS_SAME_AS:
10053 break;
10054
10055 default:
10056 gcc_unreachable ();
10057 }
10058
10059 tree val = (trait_expr_value (kind, type1, type2)
10060 ? boolean_true_node : boolean_false_node);
10061 return maybe_wrap_with_location (val, loc);
10062 }
10063
10064 /* Do-nothing variants of functions to handle pragma FLOAT_CONST_DECIMAL64,
10065 which is ignored for C++. */
10066
10067 void
10068 set_float_const_decimal64 (void)
10069 {
10070 }
10071
10072 void
10073 clear_float_const_decimal64 (void)
10074 {
10075 }
10076
10077 bool
10078 float_const_decimal64_p (void)
10079 {
10080 return 0;
10081 }
10082
10083 \f
10084 /* Return true if T designates the implied `this' parameter. */
10085
10086 bool
10087 is_this_parameter (tree t)
10088 {
10089 if (!DECL_P (t) || DECL_NAME (t) != this_identifier)
10090 return false;
10091 gcc_assert (TREE_CODE (t) == PARM_DECL || is_capture_proxy (t)
10092 || (cp_binding_oracle && TREE_CODE (t) == VAR_DECL));
10093 return true;
10094 }
10095
10096 /* Insert the deduced return type for an auto function. */
10097
10098 void
10099 apply_deduced_return_type (tree fco, tree return_type)
10100 {
10101 tree result;
10102
10103 if (return_type == error_mark_node)
10104 return;
10105
10106 if (DECL_CONV_FN_P (fco))
10107 DECL_NAME (fco) = make_conv_op_name (return_type);
10108
10109 TREE_TYPE (fco) = change_return_type (return_type, TREE_TYPE (fco));
10110
10111 result = DECL_RESULT (fco);
10112 if (result == NULL_TREE)
10113 return;
10114 if (TREE_TYPE (result) == return_type)
10115 return;
10116
10117 if (!processing_template_decl && !VOID_TYPE_P (return_type)
10118 && !complete_type_or_else (return_type, NULL_TREE))
10119 return;
10120
10121 /* We already have a DECL_RESULT from start_preparsed_function.
10122 Now we need to redo the work it and allocate_struct_function
10123 did to reflect the new type. */
10124 gcc_assert (current_function_decl == fco);
10125 result = build_decl (input_location, RESULT_DECL, NULL_TREE,
10126 TYPE_MAIN_VARIANT (return_type));
10127 DECL_ARTIFICIAL (result) = 1;
10128 DECL_IGNORED_P (result) = 1;
10129 cp_apply_type_quals_to_decl (cp_type_quals (return_type),
10130 result);
10131
10132 DECL_RESULT (fco) = result;
10133
10134 if (!processing_template_decl)
10135 {
10136 bool aggr = aggregate_value_p (result, fco);
10137 #ifdef PCC_STATIC_STRUCT_RETURN
10138 cfun->returns_pcc_struct = aggr;
10139 #endif
10140 cfun->returns_struct = aggr;
10141 }
10142 }
10143
10144 /* DECL is a local variable or parameter from the surrounding scope of a
10145 lambda-expression. Returns the decltype for a use of the capture field
10146 for DECL even if it hasn't been captured yet. */
10147
10148 static tree
10149 capture_decltype (tree decl)
10150 {
10151 tree lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
10152 tree cap = lookup_name_real (DECL_NAME (decl), /*type*/0, /*nonclass*/1,
10153 /*block_p=*/true, /*ns*/0, LOOKUP_HIDDEN);
10154 tree type;
10155
10156 if (cap && is_capture_proxy (cap))
10157 type = TREE_TYPE (cap);
10158 else
10159 switch (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam))
10160 {
10161 case CPLD_NONE:
10162 error ("%qD is not captured", decl);
10163 return error_mark_node;
10164
10165 case CPLD_COPY:
10166 type = TREE_TYPE (decl);
10167 if (TYPE_REF_P (type)
10168 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
10169 type = TREE_TYPE (type);
10170 break;
10171
10172 case CPLD_REFERENCE:
10173 type = TREE_TYPE (decl);
10174 if (!TYPE_REF_P (type))
10175 type = build_reference_type (TREE_TYPE (decl));
10176 break;
10177
10178 default:
10179 gcc_unreachable ();
10180 }
10181
10182 if (!TYPE_REF_P (type))
10183 {
10184 if (!LAMBDA_EXPR_MUTABLE_P (lam))
10185 type = cp_build_qualified_type (type, (cp_type_quals (type)
10186 |TYPE_QUAL_CONST));
10187 type = build_reference_type (type);
10188 }
10189 return type;
10190 }
10191
10192 /* Build a unary fold expression of EXPR over OP. If IS_RIGHT is true,
10193 this is a right unary fold. Otherwise it is a left unary fold. */
10194
10195 static tree
10196 finish_unary_fold_expr (tree expr, int op, tree_code dir)
10197 {
10198 /* Build a pack expansion (assuming expr has pack type). */
10199 if (!uses_parameter_packs (expr))
10200 {
10201 error_at (location_of (expr), "operand of fold expression has no "
10202 "unexpanded parameter packs");
10203 return error_mark_node;
10204 }
10205 tree pack = make_pack_expansion (expr);
10206
10207 /* Build the fold expression. */
10208 tree code = build_int_cstu (integer_type_node, abs (op));
10209 tree fold = build_min_nt_loc (UNKNOWN_LOCATION, dir, code, pack);
10210 FOLD_EXPR_MODIFY_P (fold) = (op < 0);
10211 return fold;
10212 }
10213
10214 tree
10215 finish_left_unary_fold_expr (tree expr, int op)
10216 {
10217 return finish_unary_fold_expr (expr, op, UNARY_LEFT_FOLD_EXPR);
10218 }
10219
10220 tree
10221 finish_right_unary_fold_expr (tree expr, int op)
10222 {
10223 return finish_unary_fold_expr (expr, op, UNARY_RIGHT_FOLD_EXPR);
10224 }
10225
10226 /* Build a binary fold expression over EXPR1 and EXPR2. The
10227 associativity of the fold is determined by EXPR1 and EXPR2 (whichever
10228 has an unexpanded parameter pack). */
10229
10230 tree
10231 finish_binary_fold_expr (tree pack, tree init, int op, tree_code dir)
10232 {
10233 pack = make_pack_expansion (pack);
10234 tree code = build_int_cstu (integer_type_node, abs (op));
10235 tree fold = build_min_nt_loc (UNKNOWN_LOCATION, dir, code, pack, init);
10236 FOLD_EXPR_MODIFY_P (fold) = (op < 0);
10237 return fold;
10238 }
10239
10240 tree
10241 finish_binary_fold_expr (tree expr1, tree expr2, int op)
10242 {
10243 // Determine which expr has an unexpanded parameter pack and
10244 // set the pack and initial term.
10245 bool pack1 = uses_parameter_packs (expr1);
10246 bool pack2 = uses_parameter_packs (expr2);
10247 if (pack1 && !pack2)
10248 return finish_binary_fold_expr (expr1, expr2, op, BINARY_RIGHT_FOLD_EXPR);
10249 else if (pack2 && !pack1)
10250 return finish_binary_fold_expr (expr2, expr1, op, BINARY_LEFT_FOLD_EXPR);
10251 else
10252 {
10253 if (pack1)
10254 error ("both arguments in binary fold have unexpanded parameter packs");
10255 else
10256 error ("no unexpanded parameter packs in binary fold");
10257 }
10258 return error_mark_node;
10259 }
10260
10261 /* Finish __builtin_launder (arg). */
10262
10263 tree
10264 finish_builtin_launder (location_t loc, tree arg, tsubst_flags_t complain)
10265 {
10266 tree orig_arg = arg;
10267 if (!type_dependent_expression_p (arg))
10268 arg = decay_conversion (arg, complain);
10269 if (error_operand_p (arg))
10270 return error_mark_node;
10271 if (!type_dependent_expression_p (arg)
10272 && !TYPE_PTR_P (TREE_TYPE (arg)))
10273 {
10274 error_at (loc, "non-pointer argument to %<__builtin_launder%>");
10275 return error_mark_node;
10276 }
10277 if (processing_template_decl)
10278 arg = orig_arg;
10279 return build_call_expr_internal_loc (loc, IFN_LAUNDER,
10280 TREE_TYPE (arg), 1, arg);
10281 }
10282
10283 /* Finish __builtin_convertvector (arg, type). */
10284
10285 tree
10286 cp_build_vec_convert (tree arg, location_t loc, tree type,
10287 tsubst_flags_t complain)
10288 {
10289 if (error_operand_p (type))
10290 return error_mark_node;
10291 if (error_operand_p (arg))
10292 return error_mark_node;
10293
10294 tree ret = NULL_TREE;
10295 if (!type_dependent_expression_p (arg) && !dependent_type_p (type))
10296 ret = c_build_vec_convert (cp_expr_loc_or_input_loc (arg), arg,
10297 loc, type, (complain & tf_error) != 0);
10298
10299 if (!processing_template_decl)
10300 return ret;
10301
10302 return build_call_expr_internal_loc (loc, IFN_VEC_CONVERT, type, 1, arg);
10303 }
10304
10305 #include "gt-cp-semantics.h"