c++: local-scope OMP UDR reductions have no template head
[gcc.git] / gcc / cp / cp-gimplify.c
1 /* C++-specific tree lowering bits; see also c-gimplify.c and gimple.c.
2
3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43 #include "omp-general.h"
44
45 /* Forward declarations. */
46
47 static tree cp_genericize_r (tree *, int *, void *);
48 static tree cp_fold_r (tree *, int *, void *);
49 static void cp_genericize_tree (tree*, bool);
50 static tree cp_fold (tree);
51
52 /* Local declarations. */
53
54 enum bc_t { bc_break = 0, bc_continue = 1 };
55
56 /* Stack of labels which are targets for "break" or "continue",
57 linked through TREE_CHAIN. */
58 static tree bc_label[2];
59
60 /* Begin a scope which can be exited by a break or continue statement. BC
61 indicates which.
62
63 Just creates a label with location LOCATION and pushes it into the current
64 context. */
65
66 static tree
67 begin_bc_block (enum bc_t bc, location_t location)
68 {
69 tree label = create_artificial_label (location);
70 DECL_CHAIN (label) = bc_label[bc];
71 bc_label[bc] = label;
72 if (bc == bc_break)
73 LABEL_DECL_BREAK (label) = true;
74 else
75 LABEL_DECL_CONTINUE (label) = true;
76 return label;
77 }
78
79 /* Finish a scope which can be exited by a break or continue statement.
80 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
81 an expression for the contents of the scope.
82
83 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
84 BLOCK. Otherwise, just forget the label. */
85
86 static void
87 finish_bc_block (tree *block, enum bc_t bc, tree label)
88 {
89 gcc_assert (label == bc_label[bc]);
90
91 if (TREE_USED (label))
92 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
93 block);
94
95 bc_label[bc] = DECL_CHAIN (label);
96 DECL_CHAIN (label) = NULL_TREE;
97 }
98
99 /* Get the LABEL_EXPR to represent a break or continue statement
100 in the current block scope. BC indicates which. */
101
102 static tree
103 get_bc_label (enum bc_t bc)
104 {
105 tree label = bc_label[bc];
106
107 /* Mark the label used for finish_bc_block. */
108 TREE_USED (label) = 1;
109 return label;
110 }
111
112 /* Genericize a TRY_BLOCK. */
113
114 static void
115 genericize_try_block (tree *stmt_p)
116 {
117 tree body = TRY_STMTS (*stmt_p);
118 tree cleanup = TRY_HANDLERS (*stmt_p);
119
120 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
121 }
122
123 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
124
125 static void
126 genericize_catch_block (tree *stmt_p)
127 {
128 tree type = HANDLER_TYPE (*stmt_p);
129 tree body = HANDLER_BODY (*stmt_p);
130
131 /* FIXME should the caught type go in TREE_TYPE? */
132 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
133 }
134
135 /* A terser interface for building a representation of an exception
136 specification. */
137
138 static tree
139 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
140 {
141 tree t;
142
143 /* FIXME should the allowed types go in TREE_TYPE? */
144 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
145 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
146
147 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
148 append_to_statement_list (body, &TREE_OPERAND (t, 0));
149
150 return t;
151 }
152
153 /* Genericize an EH_SPEC_BLOCK by converting it to a
154 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
155
156 static void
157 genericize_eh_spec_block (tree *stmt_p)
158 {
159 tree body = EH_SPEC_STMTS (*stmt_p);
160 tree allowed = EH_SPEC_RAISES (*stmt_p);
161 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
162
163 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
164 TREE_NO_WARNING (*stmt_p) = true;
165 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
166 }
167
168 /* Return the first non-compound statement in STMT. */
169
170 tree
171 first_stmt (tree stmt)
172 {
173 switch (TREE_CODE (stmt))
174 {
175 case STATEMENT_LIST:
176 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
177 return first_stmt (p->stmt);
178 return void_node;
179
180 case BIND_EXPR:
181 return first_stmt (BIND_EXPR_BODY (stmt));
182
183 default:
184 return stmt;
185 }
186 }
187
188 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
189
190 static void
191 genericize_if_stmt (tree *stmt_p)
192 {
193 tree stmt, cond, then_, else_;
194 location_t locus = EXPR_LOCATION (*stmt_p);
195
196 stmt = *stmt_p;
197 cond = IF_COND (stmt);
198 then_ = THEN_CLAUSE (stmt);
199 else_ = ELSE_CLAUSE (stmt);
200
201 if (then_ && else_)
202 {
203 tree ft = first_stmt (then_);
204 tree fe = first_stmt (else_);
205 br_predictor pr;
206 if (TREE_CODE (ft) == PREDICT_EXPR
207 && TREE_CODE (fe) == PREDICT_EXPR
208 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
209 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
210 {
211 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
212 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
213 warning_at (&richloc, OPT_Wattributes,
214 "both branches of %<if%> statement marked as %qs",
215 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
216 }
217 }
218
219 if (!then_)
220 then_ = build_empty_stmt (locus);
221 if (!else_)
222 else_ = build_empty_stmt (locus);
223
224 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
225 stmt = then_;
226 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
227 stmt = else_;
228 else
229 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
230 protected_set_expr_location_if_unset (stmt, locus);
231 *stmt_p = stmt;
232 }
233
234 /* Build a generic representation of one of the C loop forms. COND is the
235 loop condition or NULL_TREE. BODY is the (possibly compound) statement
236 controlled by the loop. INCR is the increment expression of a for-loop,
237 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
238 evaluated before the loop body as in while and for loops, or after the
239 loop body as in do-while loops. */
240
241 static void
242 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
243 tree incr, bool cond_is_first, int *walk_subtrees,
244 void *data)
245 {
246 tree blab, clab;
247 tree exit = NULL;
248 tree stmt_list = NULL;
249 tree debug_begin = NULL;
250
251 protected_set_expr_location_if_unset (incr, start_locus);
252
253 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
254 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
255
256 blab = begin_bc_block (bc_break, start_locus);
257 clab = begin_bc_block (bc_continue, start_locus);
258
259 cp_walk_tree (&body, cp_genericize_r, data, NULL);
260 *walk_subtrees = 0;
261
262 if (MAY_HAVE_DEBUG_MARKER_STMTS
263 && (!cond || !integer_zerop (cond)))
264 {
265 debug_begin = build0 (DEBUG_BEGIN_STMT, void_type_node);
266 SET_EXPR_LOCATION (debug_begin, cp_expr_loc_or_loc (cond, start_locus));
267 }
268
269 if (cond && TREE_CODE (cond) != INTEGER_CST)
270 {
271 /* If COND is constant, don't bother building an exit. If it's false,
272 we won't build a loop. If it's true, any exits are in the body. */
273 location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
274 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
275 get_bc_label (bc_break));
276 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
277 build_empty_stmt (cloc), exit);
278 }
279
280 if (exit && cond_is_first)
281 {
282 append_to_statement_list (debug_begin, &stmt_list);
283 debug_begin = NULL_TREE;
284 append_to_statement_list (exit, &stmt_list);
285 }
286 append_to_statement_list (body, &stmt_list);
287 finish_bc_block (&stmt_list, bc_continue, clab);
288 if (incr)
289 {
290 if (MAY_HAVE_DEBUG_MARKER_STMTS)
291 {
292 tree d = build0 (DEBUG_BEGIN_STMT, void_type_node);
293 SET_EXPR_LOCATION (d, cp_expr_loc_or_loc (incr, start_locus));
294 append_to_statement_list (d, &stmt_list);
295 }
296 append_to_statement_list (incr, &stmt_list);
297 }
298 append_to_statement_list (debug_begin, &stmt_list);
299 if (exit && !cond_is_first)
300 append_to_statement_list (exit, &stmt_list);
301
302 if (!stmt_list)
303 stmt_list = build_empty_stmt (start_locus);
304
305 tree loop;
306 if (cond && integer_zerop (cond))
307 {
308 if (cond_is_first)
309 loop = fold_build3_loc (start_locus, COND_EXPR,
310 void_type_node, cond, stmt_list,
311 build_empty_stmt (start_locus));
312 else
313 loop = stmt_list;
314 }
315 else
316 {
317 location_t loc = start_locus;
318 if (!cond || integer_nonzerop (cond))
319 loc = EXPR_LOCATION (expr_first (body));
320 if (loc == UNKNOWN_LOCATION)
321 loc = start_locus;
322 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
323 }
324
325 stmt_list = NULL;
326 append_to_statement_list (loop, &stmt_list);
327 finish_bc_block (&stmt_list, bc_break, blab);
328 if (!stmt_list)
329 stmt_list = build_empty_stmt (start_locus);
330
331 *stmt_p = stmt_list;
332 }
333
334 /* Genericize a FOR_STMT node *STMT_P. */
335
336 static void
337 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
338 {
339 tree stmt = *stmt_p;
340 tree expr = NULL;
341 tree loop;
342 tree init = FOR_INIT_STMT (stmt);
343
344 if (init)
345 {
346 cp_walk_tree (&init, cp_genericize_r, data, NULL);
347 append_to_statement_list (init, &expr);
348 }
349
350 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
351 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
352 append_to_statement_list (loop, &expr);
353 if (expr == NULL_TREE)
354 expr = loop;
355 *stmt_p = expr;
356 }
357
358 /* Genericize a WHILE_STMT node *STMT_P. */
359
360 static void
361 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
362 {
363 tree stmt = *stmt_p;
364 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
365 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
366 }
367
368 /* Genericize a DO_STMT node *STMT_P. */
369
370 static void
371 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
372 {
373 tree stmt = *stmt_p;
374 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
375 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
376 }
377
378 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
379
380 static void
381 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
382 {
383 tree stmt = *stmt_p;
384 tree break_block, body, cond, type;
385 location_t stmt_locus = EXPR_LOCATION (stmt);
386
387 body = SWITCH_STMT_BODY (stmt);
388 if (!body)
389 body = build_empty_stmt (stmt_locus);
390 cond = SWITCH_STMT_COND (stmt);
391 type = SWITCH_STMT_TYPE (stmt);
392
393 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
394
395 break_block = begin_bc_block (bc_break, stmt_locus);
396
397 cp_walk_tree (&body, cp_genericize_r, data, NULL);
398 cp_walk_tree (&type, cp_genericize_r, data, NULL);
399 *walk_subtrees = 0;
400
401 if (TREE_USED (break_block))
402 SWITCH_BREAK_LABEL_P (break_block) = 1;
403 finish_bc_block (&body, bc_break, break_block);
404 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
405 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
406 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
407 || !TREE_USED (break_block));
408 }
409
410 /* Genericize a CONTINUE_STMT node *STMT_P. */
411
412 static void
413 genericize_continue_stmt (tree *stmt_p)
414 {
415 tree stmt_list = NULL;
416 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
417 tree label = get_bc_label (bc_continue);
418 location_t location = EXPR_LOCATION (*stmt_p);
419 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
420 append_to_statement_list_force (pred, &stmt_list);
421 append_to_statement_list (jump, &stmt_list);
422 *stmt_p = stmt_list;
423 }
424
425 /* Genericize a BREAK_STMT node *STMT_P. */
426
427 static void
428 genericize_break_stmt (tree *stmt_p)
429 {
430 tree label = get_bc_label (bc_break);
431 location_t location = EXPR_LOCATION (*stmt_p);
432 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
433 }
434
435 /* Genericize a OMP_FOR node *STMT_P. */
436
437 static void
438 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
439 {
440 tree stmt = *stmt_p;
441 location_t locus = EXPR_LOCATION (stmt);
442 tree clab = begin_bc_block (bc_continue, locus);
443
444 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
445 if (TREE_CODE (stmt) != OMP_TASKLOOP)
446 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
447 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
448 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
449 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
450 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
451 *walk_subtrees = 0;
452
453 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
454 }
455
456 /* Hook into the middle of gimplifying an OMP_FOR node. */
457
458 static enum gimplify_status
459 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
460 {
461 tree for_stmt = *expr_p;
462 gimple_seq seq = NULL;
463
464 /* Protect ourselves from recursion. */
465 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
466 return GS_UNHANDLED;
467 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
468
469 gimplify_and_add (for_stmt, &seq);
470 gimple_seq_add_seq (pre_p, seq);
471
472 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
473
474 return GS_ALL_DONE;
475 }
476
477 /* Gimplify an EXPR_STMT node. */
478
479 static void
480 gimplify_expr_stmt (tree *stmt_p)
481 {
482 tree stmt = EXPR_STMT_EXPR (*stmt_p);
483
484 if (stmt == error_mark_node)
485 stmt = NULL;
486
487 /* Gimplification of a statement expression will nullify the
488 statement if all its side effects are moved to *PRE_P and *POST_P.
489
490 In this case we will not want to emit the gimplified statement.
491 However, we may still want to emit a warning, so we do that before
492 gimplification. */
493 if (stmt && warn_unused_value)
494 {
495 if (!TREE_SIDE_EFFECTS (stmt))
496 {
497 if (!IS_EMPTY_STMT (stmt)
498 && !VOID_TYPE_P (TREE_TYPE (stmt))
499 && !TREE_NO_WARNING (stmt))
500 warning (OPT_Wunused_value, "statement with no effect");
501 }
502 else
503 warn_if_unused_value (stmt, input_location);
504 }
505
506 if (stmt == NULL_TREE)
507 stmt = alloc_stmt_list ();
508
509 *stmt_p = stmt;
510 }
511
512 /* Gimplify initialization from an AGGR_INIT_EXPR. */
513
514 static void
515 cp_gimplify_init_expr (tree *expr_p)
516 {
517 tree from = TREE_OPERAND (*expr_p, 1);
518 tree to = TREE_OPERAND (*expr_p, 0);
519 tree t;
520
521 /* What about code that pulls out the temp and uses it elsewhere? I
522 think that such code never uses the TARGET_EXPR as an initializer. If
523 I'm wrong, we'll abort because the temp won't have any RTL. In that
524 case, I guess we'll need to replace references somehow. */
525 if (TREE_CODE (from) == TARGET_EXPR && TARGET_EXPR_INITIAL (from))
526 from = TARGET_EXPR_INITIAL (from);
527
528 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
529 inside the TARGET_EXPR. */
530 for (t = from; t; )
531 {
532 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
533
534 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
535 replace the slot operand with our target.
536
537 Should we add a target parm to gimplify_expr instead? No, as in this
538 case we want to replace the INIT_EXPR. */
539 if (TREE_CODE (sub) == AGGR_INIT_EXPR
540 || TREE_CODE (sub) == VEC_INIT_EXPR)
541 {
542 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
543 AGGR_INIT_EXPR_SLOT (sub) = to;
544 else
545 VEC_INIT_EXPR_SLOT (sub) = to;
546 *expr_p = from;
547
548 /* The initialization is now a side-effect, so the container can
549 become void. */
550 if (from != sub)
551 TREE_TYPE (from) = void_type_node;
552 }
553
554 /* Handle aggregate NSDMI. */
555 replace_placeholders (sub, to);
556
557 if (t == sub)
558 break;
559 else
560 t = TREE_OPERAND (t, 1);
561 }
562
563 }
564
565 /* Gimplify a MUST_NOT_THROW_EXPR. */
566
567 static enum gimplify_status
568 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
569 {
570 tree stmt = *expr_p;
571 tree temp = voidify_wrapper_expr (stmt, NULL);
572 tree body = TREE_OPERAND (stmt, 0);
573 gimple_seq try_ = NULL;
574 gimple_seq catch_ = NULL;
575 gimple *mnt;
576
577 gimplify_and_add (body, &try_);
578 mnt = gimple_build_eh_must_not_throw (terminate_fn);
579 gimple_seq_add_stmt_without_update (&catch_, mnt);
580 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
581
582 gimple_seq_add_stmt_without_update (pre_p, mnt);
583 if (temp)
584 {
585 *expr_p = temp;
586 return GS_OK;
587 }
588
589 *expr_p = NULL;
590 return GS_ALL_DONE;
591 }
592
593 /* Return TRUE if an operand (OP) of a given TYPE being copied is
594 really just an empty class copy.
595
596 Check that the operand has a simple form so that TARGET_EXPRs and
597 non-empty CONSTRUCTORs get reduced properly, and we leave the
598 return slot optimization alone because it isn't a copy. */
599
600 bool
601 simple_empty_class_p (tree type, tree op, tree_code code)
602 {
603 if (TREE_CODE (op) == COMPOUND_EXPR)
604 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
605 if (SIMPLE_TARGET_EXPR_P (op)
606 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
607 /* The TARGET_EXPR is itself a simple copy, look through it. */
608 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
609 return
610 (TREE_CODE (op) == EMPTY_CLASS_EXPR
611 || code == MODIFY_EXPR
612 || is_gimple_lvalue (op)
613 || INDIRECT_REF_P (op)
614 || (TREE_CODE (op) == CONSTRUCTOR
615 && CONSTRUCTOR_NELTS (op) == 0)
616 || (TREE_CODE (op) == CALL_EXPR
617 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
618 && !TREE_CLOBBER_P (op)
619 && is_really_empty_class (type, /*ignore_vptr*/true);
620 }
621
622 /* Returns true if evaluating E as an lvalue has side-effects;
623 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
624 have side-effects until there is a read or write through it. */
625
626 static bool
627 lvalue_has_side_effects (tree e)
628 {
629 if (!TREE_SIDE_EFFECTS (e))
630 return false;
631 while (handled_component_p (e))
632 {
633 if (TREE_CODE (e) == ARRAY_REF
634 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
635 return true;
636 e = TREE_OPERAND (e, 0);
637 }
638 if (DECL_P (e))
639 /* Just naming a variable has no side-effects. */
640 return false;
641 else if (INDIRECT_REF_P (e))
642 /* Similarly, indirection has no side-effects. */
643 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
644 else
645 /* For anything else, trust TREE_SIDE_EFFECTS. */
646 return TREE_SIDE_EFFECTS (e);
647 }
648
649 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
650 by expressions with side-effects in other operands. */
651
652 static enum gimplify_status
653 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
654 bool (*gimple_test_f) (tree))
655 {
656 enum gimplify_status t
657 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
658 if (t == GS_ERROR)
659 return GS_ERROR;
660 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
661 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
662 return t;
663 }
664
665 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
666
667 int
668 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
669 {
670 int saved_stmts_are_full_exprs_p = 0;
671 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
672 enum tree_code code = TREE_CODE (*expr_p);
673 enum gimplify_status ret;
674
675 if (STATEMENT_CODE_P (code))
676 {
677 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
678 current_stmt_tree ()->stmts_are_full_exprs_p
679 = STMT_IS_FULL_EXPR_P (*expr_p);
680 }
681
682 switch (code)
683 {
684 case AGGR_INIT_EXPR:
685 simplify_aggr_init_expr (expr_p);
686 ret = GS_OK;
687 break;
688
689 case VEC_INIT_EXPR:
690 {
691 location_t loc = input_location;
692 tree init = VEC_INIT_EXPR_INIT (*expr_p);
693 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
694 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
695 input_location = EXPR_LOCATION (*expr_p);
696 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
697 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
698 from_array,
699 tf_warning_or_error);
700 hash_set<tree> pset;
701 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
702 cp_genericize_tree (expr_p, false);
703 copy_if_shared (expr_p);
704 ret = GS_OK;
705 input_location = loc;
706 }
707 break;
708
709 case THROW_EXPR:
710 /* FIXME communicate throw type to back end, probably by moving
711 THROW_EXPR into ../tree.def. */
712 *expr_p = TREE_OPERAND (*expr_p, 0);
713 ret = GS_OK;
714 break;
715
716 case MUST_NOT_THROW_EXPR:
717 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
718 break;
719
720 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
721 LHS of an assignment might also be involved in the RHS, as in bug
722 25979. */
723 case INIT_EXPR:
724 cp_gimplify_init_expr (expr_p);
725 if (TREE_CODE (*expr_p) != INIT_EXPR)
726 return GS_OK;
727 /* Fall through. */
728 case MODIFY_EXPR:
729 modify_expr_case:
730 {
731 /* If the back end isn't clever enough to know that the lhs and rhs
732 types are the same, add an explicit conversion. */
733 tree op0 = TREE_OPERAND (*expr_p, 0);
734 tree op1 = TREE_OPERAND (*expr_p, 1);
735
736 if (!error_operand_p (op0)
737 && !error_operand_p (op1)
738 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
739 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
740 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
741 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
742 TREE_TYPE (op0), op1);
743
744 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
745 {
746 while (TREE_CODE (op1) == TARGET_EXPR)
747 /* We're disconnecting the initializer from its target,
748 don't create a temporary. */
749 op1 = TARGET_EXPR_INITIAL (op1);
750
751 /* Remove any copies of empty classes. Also drop volatile
752 variables on the RHS to avoid infinite recursion from
753 gimplify_expr trying to load the value. */
754 if (TREE_SIDE_EFFECTS (op1))
755 {
756 if (TREE_THIS_VOLATILE (op1)
757 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
758 op1 = build_fold_addr_expr (op1);
759
760 gimplify_and_add (op1, pre_p);
761 }
762 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
763 is_gimple_lvalue, fb_lvalue);
764 *expr_p = TREE_OPERAND (*expr_p, 0);
765 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
766 /* Avoid 'return *<retval>;' */
767 *expr_p = TREE_OPERAND (*expr_p, 0);
768 }
769 /* P0145 says that the RHS is sequenced before the LHS.
770 gimplify_modify_expr gimplifies the RHS before the LHS, but that
771 isn't quite strong enough in two cases:
772
773 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
774 mean it's evaluated after the LHS.
775
776 2) the value calculation of the RHS is also sequenced before the
777 LHS, so for scalar assignment we need to preevaluate if the
778 RHS could be affected by LHS side-effects even if it has no
779 side-effects of its own. We don't need this for classes because
780 class assignment takes its RHS by reference. */
781 else if (flag_strong_eval_order > 1
782 && TREE_CODE (*expr_p) == MODIFY_EXPR
783 && lvalue_has_side_effects (op0)
784 && (TREE_CODE (op1) == CALL_EXPR
785 || (SCALAR_TYPE_P (TREE_TYPE (op1))
786 && !TREE_CONSTANT (op1))))
787 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
788 }
789 ret = GS_OK;
790 break;
791
792 case EMPTY_CLASS_EXPR:
793 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
794 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
795 ret = GS_OK;
796 break;
797
798 case BASELINK:
799 *expr_p = BASELINK_FUNCTIONS (*expr_p);
800 ret = GS_OK;
801 break;
802
803 case TRY_BLOCK:
804 genericize_try_block (expr_p);
805 ret = GS_OK;
806 break;
807
808 case HANDLER:
809 genericize_catch_block (expr_p);
810 ret = GS_OK;
811 break;
812
813 case EH_SPEC_BLOCK:
814 genericize_eh_spec_block (expr_p);
815 ret = GS_OK;
816 break;
817
818 case USING_STMT:
819 gcc_unreachable ();
820
821 case FOR_STMT:
822 case WHILE_STMT:
823 case DO_STMT:
824 case SWITCH_STMT:
825 case CONTINUE_STMT:
826 case BREAK_STMT:
827 gcc_unreachable ();
828
829 case OMP_FOR:
830 case OMP_SIMD:
831 case OMP_DISTRIBUTE:
832 case OMP_LOOP:
833 case OMP_TASKLOOP:
834 ret = cp_gimplify_omp_for (expr_p, pre_p);
835 break;
836
837 case EXPR_STMT:
838 gimplify_expr_stmt (expr_p);
839 ret = GS_OK;
840 break;
841
842 case UNARY_PLUS_EXPR:
843 {
844 tree arg = TREE_OPERAND (*expr_p, 0);
845 tree type = TREE_TYPE (*expr_p);
846 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
847 : arg;
848 ret = GS_OK;
849 }
850 break;
851
852 case CALL_EXPR:
853 ret = GS_OK;
854 if (flag_strong_eval_order == 2
855 && CALL_EXPR_FN (*expr_p)
856 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
857 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
858 {
859 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
860 enum gimplify_status t
861 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
862 is_gimple_call_addr);
863 if (t == GS_ERROR)
864 ret = GS_ERROR;
865 /* GIMPLE considers most pointer conversion useless, but for
866 calls we actually care about the exact function pointer type. */
867 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
868 CALL_EXPR_FN (*expr_p)
869 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
870 }
871 if (!CALL_EXPR_FN (*expr_p))
872 /* Internal function call. */;
873 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
874 {
875 /* This is a call to a (compound) assignment operator that used
876 the operator syntax; gimplify the RHS first. */
877 gcc_assert (call_expr_nargs (*expr_p) == 2);
878 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
879 enum gimplify_status t
880 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
881 if (t == GS_ERROR)
882 ret = GS_ERROR;
883 }
884 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
885 {
886 /* Leave the last argument for gimplify_call_expr, to avoid problems
887 with __builtin_va_arg_pack(). */
888 int nargs = call_expr_nargs (*expr_p) - 1;
889 for (int i = 0; i < nargs; ++i)
890 {
891 enum gimplify_status t
892 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
893 if (t == GS_ERROR)
894 ret = GS_ERROR;
895 }
896 }
897 else if (flag_strong_eval_order
898 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
899 {
900 /* If flag_strong_eval_order, evaluate the object argument first. */
901 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
902 if (INDIRECT_TYPE_P (fntype))
903 fntype = TREE_TYPE (fntype);
904 if (TREE_CODE (fntype) == METHOD_TYPE)
905 {
906 enum gimplify_status t
907 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
908 if (t == GS_ERROR)
909 ret = GS_ERROR;
910 }
911 }
912 if (ret != GS_ERROR)
913 {
914 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
915 if (decl
916 && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
917 BUILT_IN_FRONTEND))
918 *expr_p = boolean_false_node;
919 else if (decl
920 && fndecl_built_in_p (decl, CP_BUILT_IN_SOURCE_LOCATION,
921 BUILT_IN_FRONTEND))
922 *expr_p = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
923 }
924 break;
925
926 case TARGET_EXPR:
927 /* A TARGET_EXPR that expresses direct-initialization should have been
928 elided by cp_gimplify_init_expr. */
929 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
930 ret = GS_UNHANDLED;
931 break;
932
933 case RETURN_EXPR:
934 if (TREE_OPERAND (*expr_p, 0)
935 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
936 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
937 {
938 expr_p = &TREE_OPERAND (*expr_p, 0);
939 /* Avoid going through the INIT_EXPR case, which can
940 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
941 goto modify_expr_case;
942 }
943 /* Fall through. */
944
945 default:
946 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
947 break;
948 }
949
950 /* Restore saved state. */
951 if (STATEMENT_CODE_P (code))
952 current_stmt_tree ()->stmts_are_full_exprs_p
953 = saved_stmts_are_full_exprs_p;
954
955 return ret;
956 }
957
958 static inline bool
959 is_invisiref_parm (const_tree t)
960 {
961 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
962 && DECL_BY_REFERENCE (t));
963 }
964
965 /* Return true if the uid in both int tree maps are equal. */
966
967 bool
968 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
969 {
970 return (a->uid == b->uid);
971 }
972
973 /* Hash a UID in a cxx_int_tree_map. */
974
975 unsigned int
976 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
977 {
978 return item->uid;
979 }
980
981 /* A stable comparison routine for use with splay trees and DECLs. */
982
983 static int
984 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
985 {
986 tree a = (tree) xa;
987 tree b = (tree) xb;
988
989 return DECL_UID (a) - DECL_UID (b);
990 }
991
992 /* OpenMP context during genericization. */
993
994 struct cp_genericize_omp_taskreg
995 {
996 bool is_parallel;
997 bool default_shared;
998 struct cp_genericize_omp_taskreg *outer;
999 splay_tree variables;
1000 };
1001
1002 /* Return true if genericization should try to determine if
1003 DECL is firstprivate or shared within task regions. */
1004
1005 static bool
1006 omp_var_to_track (tree decl)
1007 {
1008 tree type = TREE_TYPE (decl);
1009 if (is_invisiref_parm (decl))
1010 type = TREE_TYPE (type);
1011 else if (TYPE_REF_P (type))
1012 type = TREE_TYPE (type);
1013 while (TREE_CODE (type) == ARRAY_TYPE)
1014 type = TREE_TYPE (type);
1015 if (type == error_mark_node || !CLASS_TYPE_P (type))
1016 return false;
1017 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
1018 return false;
1019 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
1020 return false;
1021 return true;
1022 }
1023
1024 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
1025
1026 static void
1027 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
1028 {
1029 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
1030 (splay_tree_key) decl);
1031 if (n == NULL)
1032 {
1033 int flags = OMP_CLAUSE_DEFAULT_SHARED;
1034 if (omp_ctx->outer)
1035 omp_cxx_notice_variable (omp_ctx->outer, decl);
1036 if (!omp_ctx->default_shared)
1037 {
1038 struct cp_genericize_omp_taskreg *octx;
1039
1040 for (octx = omp_ctx->outer; octx; octx = octx->outer)
1041 {
1042 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
1043 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
1044 {
1045 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1046 break;
1047 }
1048 if (octx->is_parallel)
1049 break;
1050 }
1051 if (octx == NULL
1052 && (TREE_CODE (decl) == PARM_DECL
1053 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1054 && DECL_CONTEXT (decl) == current_function_decl)))
1055 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1056 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
1057 {
1058 /* DECL is implicitly determined firstprivate in
1059 the current task construct. Ensure copy ctor and
1060 dtor are instantiated, because during gimplification
1061 it will be already too late. */
1062 tree type = TREE_TYPE (decl);
1063 if (is_invisiref_parm (decl))
1064 type = TREE_TYPE (type);
1065 else if (TYPE_REF_P (type))
1066 type = TREE_TYPE (type);
1067 while (TREE_CODE (type) == ARRAY_TYPE)
1068 type = TREE_TYPE (type);
1069 get_copy_ctor (type, tf_none);
1070 get_dtor (type, tf_none);
1071 }
1072 }
1073 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
1074 }
1075 }
1076
1077 /* Genericization context. */
1078
1079 struct cp_genericize_data
1080 {
1081 hash_set<tree> *p_set;
1082 vec<tree> bind_expr_stack;
1083 struct cp_genericize_omp_taskreg *omp_ctx;
1084 tree try_block;
1085 bool no_sanitize_p;
1086 bool handle_invisiref_parm_p;
1087 };
1088
1089 /* Perform any pre-gimplification folding of C++ front end trees to
1090 GENERIC.
1091 Note: The folding of none-omp cases is something to move into
1092 the middle-end. As for now we have most foldings only on GENERIC
1093 in fold-const, we need to perform this before transformation to
1094 GIMPLE-form. */
1095
1096 static tree
1097 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1098 {
1099 tree stmt;
1100 enum tree_code code;
1101
1102 *stmt_p = stmt = cp_fold (*stmt_p);
1103
1104 if (((hash_set<tree> *) data)->add (stmt))
1105 {
1106 /* Don't walk subtrees of stmts we've already walked once, otherwise
1107 we can have exponential complexity with e.g. lots of nested
1108 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1109 always the same tree, which the first time cp_fold_r has been
1110 called on it had the subtrees walked. */
1111 *walk_subtrees = 0;
1112 return NULL;
1113 }
1114
1115 code = TREE_CODE (stmt);
1116 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1117 || code == OMP_LOOP || code == OMP_TASKLOOP || code == OACC_LOOP)
1118 {
1119 tree x;
1120 int i, n;
1121
1122 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1123 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1124 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1125 x = OMP_FOR_COND (stmt);
1126 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1127 {
1128 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1129 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1130 }
1131 else if (x && TREE_CODE (x) == TREE_VEC)
1132 {
1133 n = TREE_VEC_LENGTH (x);
1134 for (i = 0; i < n; i++)
1135 {
1136 tree o = TREE_VEC_ELT (x, i);
1137 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1138 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1139 }
1140 }
1141 x = OMP_FOR_INCR (stmt);
1142 if (x && TREE_CODE (x) == TREE_VEC)
1143 {
1144 n = TREE_VEC_LENGTH (x);
1145 for (i = 0; i < n; i++)
1146 {
1147 tree o = TREE_VEC_ELT (x, i);
1148 if (o && TREE_CODE (o) == MODIFY_EXPR)
1149 o = TREE_OPERAND (o, 1);
1150 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1151 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1152 {
1153 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1154 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1155 }
1156 }
1157 }
1158 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1159 *walk_subtrees = 0;
1160 }
1161
1162 return NULL;
1163 }
1164
1165 /* Fold ALL the trees! FIXME we should be able to remove this, but
1166 apparently that still causes optimization regressions. */
1167
1168 void
1169 cp_fold_function (tree fndecl)
1170 {
1171 hash_set<tree> pset;
1172 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1173 }
1174
1175 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1176
1177 static tree genericize_spaceship (tree expr)
1178 {
1179 iloc_sentinel s (cp_expr_location (expr));
1180 tree type = TREE_TYPE (expr);
1181 tree op0 = TREE_OPERAND (expr, 0);
1182 tree op1 = TREE_OPERAND (expr, 1);
1183 return genericize_spaceship (type, op0, op1);
1184 }
1185
1186 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1187 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1188 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1189 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1190
1191 tree
1192 predeclare_vla (tree expr)
1193 {
1194 tree type = TREE_TYPE (expr);
1195 if (type == error_mark_node)
1196 return expr;
1197 if (is_typedef_decl (expr))
1198 type = DECL_ORIGINAL_TYPE (expr);
1199
1200 /* We need to strip pointers for gimplify_type_sizes. */
1201 tree vla = type;
1202 while (POINTER_TYPE_P (vla))
1203 {
1204 if (TYPE_NAME (vla))
1205 return expr;
1206 vla = TREE_TYPE (vla);
1207 }
1208 if (vla == type || TYPE_NAME (vla)
1209 || !variably_modified_type_p (vla, NULL_TREE))
1210 return expr;
1211
1212 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1213 DECL_ARTIFICIAL (decl) = 1;
1214 TYPE_NAME (vla) = decl;
1215 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1216 if (DECL_P (expr))
1217 {
1218 add_stmt (dexp);
1219 return NULL_TREE;
1220 }
1221 else
1222 {
1223 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1224 return expr;
1225 }
1226 }
1227
1228 /* Perform any pre-gimplification lowering of C++ front end trees to
1229 GENERIC. */
1230
1231 static tree
1232 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1233 {
1234 tree stmt = *stmt_p;
1235 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1236 hash_set<tree> *p_set = wtd->p_set;
1237
1238 /* If in an OpenMP context, note var uses. */
1239 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1240 && (VAR_P (stmt)
1241 || TREE_CODE (stmt) == PARM_DECL
1242 || TREE_CODE (stmt) == RESULT_DECL)
1243 && omp_var_to_track (stmt))
1244 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1245
1246 /* Don't dereference parms in a thunk, pass the references through. */
1247 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1248 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1249 {
1250 *walk_subtrees = 0;
1251 return NULL;
1252 }
1253
1254 /* Dereference invisible reference parms. */
1255 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1256 {
1257 *stmt_p = convert_from_reference (stmt);
1258 p_set->add (*stmt_p);
1259 *walk_subtrees = 0;
1260 return NULL;
1261 }
1262
1263 /* Map block scope extern declarations to visible declarations with the
1264 same name and type in outer scopes if any. */
1265 if (cp_function_chain->extern_decl_map
1266 && VAR_OR_FUNCTION_DECL_P (stmt)
1267 && DECL_EXTERNAL (stmt))
1268 {
1269 struct cxx_int_tree_map *h, in;
1270 in.uid = DECL_UID (stmt);
1271 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1272 if (h)
1273 {
1274 *stmt_p = h->to;
1275 TREE_USED (h->to) |= TREE_USED (stmt);
1276 *walk_subtrees = 0;
1277 return NULL;
1278 }
1279 }
1280
1281 if (TREE_CODE (stmt) == INTEGER_CST
1282 && TYPE_REF_P (TREE_TYPE (stmt))
1283 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1284 && !wtd->no_sanitize_p)
1285 {
1286 ubsan_maybe_instrument_reference (stmt_p);
1287 if (*stmt_p != stmt)
1288 {
1289 *walk_subtrees = 0;
1290 return NULL_TREE;
1291 }
1292 }
1293
1294 /* Other than invisiref parms, don't walk the same tree twice. */
1295 if (p_set->contains (stmt))
1296 {
1297 *walk_subtrees = 0;
1298 return NULL_TREE;
1299 }
1300
1301 switch (TREE_CODE (stmt))
1302 {
1303 case ADDR_EXPR:
1304 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1305 {
1306 /* If in an OpenMP context, note var uses. */
1307 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1308 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1309 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1310 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1311 *walk_subtrees = 0;
1312 }
1313 break;
1314
1315 case RETURN_EXPR:
1316 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1317 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1318 *walk_subtrees = 0;
1319 break;
1320
1321 case OMP_CLAUSE:
1322 switch (OMP_CLAUSE_CODE (stmt))
1323 {
1324 case OMP_CLAUSE_LASTPRIVATE:
1325 /* Don't dereference an invisiref in OpenMP clauses. */
1326 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1327 {
1328 *walk_subtrees = 0;
1329 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1330 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1331 cp_genericize_r, data, NULL);
1332 }
1333 break;
1334 case OMP_CLAUSE_PRIVATE:
1335 /* Don't dereference an invisiref in OpenMP clauses. */
1336 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1337 *walk_subtrees = 0;
1338 else if (wtd->omp_ctx != NULL)
1339 {
1340 /* Private clause doesn't cause any references to the
1341 var in outer contexts, avoid calling
1342 omp_cxx_notice_variable for it. */
1343 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1344 wtd->omp_ctx = NULL;
1345 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1346 data, NULL);
1347 wtd->omp_ctx = old;
1348 *walk_subtrees = 0;
1349 }
1350 break;
1351 case OMP_CLAUSE_SHARED:
1352 case OMP_CLAUSE_FIRSTPRIVATE:
1353 case OMP_CLAUSE_COPYIN:
1354 case OMP_CLAUSE_COPYPRIVATE:
1355 case OMP_CLAUSE_INCLUSIVE:
1356 case OMP_CLAUSE_EXCLUSIVE:
1357 /* Don't dereference an invisiref in OpenMP clauses. */
1358 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1359 *walk_subtrees = 0;
1360 break;
1361 case OMP_CLAUSE_REDUCTION:
1362 case OMP_CLAUSE_IN_REDUCTION:
1363 case OMP_CLAUSE_TASK_REDUCTION:
1364 /* Don't dereference an invisiref in reduction clause's
1365 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1366 still needs to be genericized. */
1367 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1368 {
1369 *walk_subtrees = 0;
1370 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1371 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1372 cp_genericize_r, data, NULL);
1373 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1374 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1375 cp_genericize_r, data, NULL);
1376 }
1377 break;
1378 default:
1379 break;
1380 }
1381 break;
1382
1383 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1384 to lower this construct before scanning it, so we need to lower these
1385 before doing anything else. */
1386 case CLEANUP_STMT:
1387 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1388 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1389 : TRY_FINALLY_EXPR,
1390 void_type_node,
1391 CLEANUP_BODY (stmt),
1392 CLEANUP_EXPR (stmt));
1393 break;
1394
1395 case IF_STMT:
1396 genericize_if_stmt (stmt_p);
1397 /* *stmt_p has changed, tail recurse to handle it again. */
1398 return cp_genericize_r (stmt_p, walk_subtrees, data);
1399
1400 /* COND_EXPR might have incompatible types in branches if one or both
1401 arms are bitfields. Fix it up now. */
1402 case COND_EXPR:
1403 {
1404 tree type_left
1405 = (TREE_OPERAND (stmt, 1)
1406 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1407 : NULL_TREE);
1408 tree type_right
1409 = (TREE_OPERAND (stmt, 2)
1410 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1411 : NULL_TREE);
1412 if (type_left
1413 && !useless_type_conversion_p (TREE_TYPE (stmt),
1414 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1415 {
1416 TREE_OPERAND (stmt, 1)
1417 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1418 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1419 type_left));
1420 }
1421 if (type_right
1422 && !useless_type_conversion_p (TREE_TYPE (stmt),
1423 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1424 {
1425 TREE_OPERAND (stmt, 2)
1426 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1427 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1428 type_right));
1429 }
1430 }
1431 break;
1432
1433 case BIND_EXPR:
1434 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1435 {
1436 tree decl;
1437 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1438 if (VAR_P (decl)
1439 && !DECL_EXTERNAL (decl)
1440 && omp_var_to_track (decl))
1441 {
1442 splay_tree_node n
1443 = splay_tree_lookup (wtd->omp_ctx->variables,
1444 (splay_tree_key) decl);
1445 if (n == NULL)
1446 splay_tree_insert (wtd->omp_ctx->variables,
1447 (splay_tree_key) decl,
1448 TREE_STATIC (decl)
1449 ? OMP_CLAUSE_DEFAULT_SHARED
1450 : OMP_CLAUSE_DEFAULT_PRIVATE);
1451 }
1452 }
1453 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1454 {
1455 /* The point here is to not sanitize static initializers. */
1456 bool no_sanitize_p = wtd->no_sanitize_p;
1457 wtd->no_sanitize_p = true;
1458 for (tree decl = BIND_EXPR_VARS (stmt);
1459 decl;
1460 decl = DECL_CHAIN (decl))
1461 if (VAR_P (decl)
1462 && TREE_STATIC (decl)
1463 && DECL_INITIAL (decl))
1464 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1465 wtd->no_sanitize_p = no_sanitize_p;
1466 }
1467 wtd->bind_expr_stack.safe_push (stmt);
1468 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1469 cp_genericize_r, data, NULL);
1470 wtd->bind_expr_stack.pop ();
1471 break;
1472
1473 case USING_STMT:
1474 {
1475 tree block = NULL_TREE;
1476
1477 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1478 BLOCK, and append an IMPORTED_DECL to its
1479 BLOCK_VARS chained list. */
1480 if (wtd->bind_expr_stack.exists ())
1481 {
1482 int i;
1483 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1484 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1485 break;
1486 }
1487 if (block)
1488 {
1489 tree decl = TREE_OPERAND (stmt, 0);
1490 gcc_assert (decl);
1491
1492 if (undeduced_auto_decl (decl))
1493 /* Omit from the GENERIC, the back-end can't handle it. */;
1494 else
1495 {
1496 tree using_directive = make_node (IMPORTED_DECL);
1497 TREE_TYPE (using_directive) = void_type_node;
1498 DECL_CONTEXT (using_directive) = current_function_decl;
1499
1500 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1501 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1502 BLOCK_VARS (block) = using_directive;
1503 }
1504 }
1505 /* The USING_STMT won't appear in GENERIC. */
1506 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1507 *walk_subtrees = 0;
1508 }
1509 break;
1510
1511 case DECL_EXPR:
1512 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1513 {
1514 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1515 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1516 *walk_subtrees = 0;
1517 }
1518 else
1519 {
1520 tree d = DECL_EXPR_DECL (stmt);
1521 if (VAR_P (d))
1522 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1523 }
1524 break;
1525
1526 case OMP_PARALLEL:
1527 case OMP_TASK:
1528 case OMP_TASKLOOP:
1529 {
1530 struct cp_genericize_omp_taskreg omp_ctx;
1531 tree c, decl;
1532 splay_tree_node n;
1533
1534 *walk_subtrees = 0;
1535 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1536 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1537 omp_ctx.default_shared = omp_ctx.is_parallel;
1538 omp_ctx.outer = wtd->omp_ctx;
1539 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1540 wtd->omp_ctx = &omp_ctx;
1541 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1542 switch (OMP_CLAUSE_CODE (c))
1543 {
1544 case OMP_CLAUSE_SHARED:
1545 case OMP_CLAUSE_PRIVATE:
1546 case OMP_CLAUSE_FIRSTPRIVATE:
1547 case OMP_CLAUSE_LASTPRIVATE:
1548 decl = OMP_CLAUSE_DECL (c);
1549 if (decl == error_mark_node || !omp_var_to_track (decl))
1550 break;
1551 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1552 if (n != NULL)
1553 break;
1554 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1555 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1556 ? OMP_CLAUSE_DEFAULT_SHARED
1557 : OMP_CLAUSE_DEFAULT_PRIVATE);
1558 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1559 omp_cxx_notice_variable (omp_ctx.outer, decl);
1560 break;
1561 case OMP_CLAUSE_DEFAULT:
1562 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1563 omp_ctx.default_shared = true;
1564 default:
1565 break;
1566 }
1567 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1568 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1569 else
1570 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1571 wtd->omp_ctx = omp_ctx.outer;
1572 splay_tree_delete (omp_ctx.variables);
1573 }
1574 break;
1575
1576 case OMP_TARGET:
1577 cfun->has_omp_target = true;
1578 break;
1579
1580 case TRY_BLOCK:
1581 {
1582 *walk_subtrees = 0;
1583 tree try_block = wtd->try_block;
1584 wtd->try_block = stmt;
1585 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1586 wtd->try_block = try_block;
1587 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1588 }
1589 break;
1590
1591 case MUST_NOT_THROW_EXPR:
1592 /* MUST_NOT_THROW_COND might be something else with TM. */
1593 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1594 {
1595 *walk_subtrees = 0;
1596 tree try_block = wtd->try_block;
1597 wtd->try_block = stmt;
1598 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1599 wtd->try_block = try_block;
1600 }
1601 break;
1602
1603 case THROW_EXPR:
1604 {
1605 location_t loc = location_of (stmt);
1606 if (TREE_NO_WARNING (stmt))
1607 /* Never mind. */;
1608 else if (wtd->try_block)
1609 {
1610 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1611 {
1612 auto_diagnostic_group d;
1613 if (warning_at (loc, OPT_Wterminate,
1614 "%<throw%> will always call %<terminate%>")
1615 && cxx_dialect >= cxx11
1616 && DECL_DESTRUCTOR_P (current_function_decl))
1617 inform (loc, "in C++11 destructors default to %<noexcept%>");
1618 }
1619 }
1620 else
1621 {
1622 if (warn_cxx11_compat && cxx_dialect < cxx11
1623 && DECL_DESTRUCTOR_P (current_function_decl)
1624 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1625 == NULL_TREE)
1626 && (get_defaulted_eh_spec (current_function_decl)
1627 == empty_except_spec))
1628 warning_at (loc, OPT_Wc__11_compat,
1629 "in C++11 this %<throw%> will call %<terminate%> "
1630 "because destructors default to %<noexcept%>");
1631 }
1632 }
1633 break;
1634
1635 case CONVERT_EXPR:
1636 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1637 break;
1638
1639 case FOR_STMT:
1640 genericize_for_stmt (stmt_p, walk_subtrees, data);
1641 break;
1642
1643 case WHILE_STMT:
1644 genericize_while_stmt (stmt_p, walk_subtrees, data);
1645 break;
1646
1647 case DO_STMT:
1648 genericize_do_stmt (stmt_p, walk_subtrees, data);
1649 break;
1650
1651 case SWITCH_STMT:
1652 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1653 break;
1654
1655 case CONTINUE_STMT:
1656 genericize_continue_stmt (stmt_p);
1657 break;
1658
1659 case BREAK_STMT:
1660 genericize_break_stmt (stmt_p);
1661 break;
1662
1663 case SPACESHIP_EXPR:
1664 *stmt_p = genericize_spaceship (*stmt_p);
1665 break;
1666
1667 case OMP_DISTRIBUTE:
1668 /* Need to explicitly instantiate copy ctors on class iterators of
1669 composite distribute parallel for. */
1670 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1671 {
1672 tree *data[4] = { NULL, NULL, NULL, NULL };
1673 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1674 find_combined_omp_for, data, NULL);
1675 if (inner != NULL_TREE
1676 && TREE_CODE (inner) == OMP_FOR)
1677 {
1678 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1679 if (OMP_FOR_ORIG_DECLS (inner)
1680 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1681 i)) == TREE_LIST
1682 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1683 i)))
1684 {
1685 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1686 /* Class iterators aren't allowed on OMP_SIMD, so the only
1687 case we need to solve is distribute parallel for. */
1688 gcc_assert (TREE_CODE (inner) == OMP_FOR
1689 && data[1]);
1690 tree orig_decl = TREE_PURPOSE (orig);
1691 tree c, cl = NULL_TREE;
1692 for (c = OMP_FOR_CLAUSES (inner);
1693 c; c = OMP_CLAUSE_CHAIN (c))
1694 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1695 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1696 && OMP_CLAUSE_DECL (c) == orig_decl)
1697 {
1698 cl = c;
1699 break;
1700 }
1701 if (cl == NULL_TREE)
1702 {
1703 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1704 c; c = OMP_CLAUSE_CHAIN (c))
1705 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1706 && OMP_CLAUSE_DECL (c) == orig_decl)
1707 {
1708 cl = c;
1709 break;
1710 }
1711 }
1712 if (cl)
1713 {
1714 orig_decl = require_complete_type (orig_decl);
1715 tree inner_type = TREE_TYPE (orig_decl);
1716 if (orig_decl == error_mark_node)
1717 continue;
1718 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1719 inner_type = TREE_TYPE (inner_type);
1720
1721 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1722 inner_type = TREE_TYPE (inner_type);
1723 get_copy_ctor (inner_type, tf_warning_or_error);
1724 }
1725 }
1726 }
1727 }
1728 /* FALLTHRU */
1729 case OMP_FOR:
1730 case OMP_SIMD:
1731 case OMP_LOOP:
1732 case OACC_LOOP:
1733 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1734 break;
1735
1736 case PTRMEM_CST:
1737 /* By the time we get here we're handing off to the back end, so we don't
1738 need or want to preserve PTRMEM_CST anymore. */
1739 *stmt_p = cplus_expand_constant (stmt);
1740 *walk_subtrees = 0;
1741 break;
1742
1743 case MEM_REF:
1744 /* For MEM_REF, make sure not to sanitize the second operand even
1745 if it has reference type. It is just an offset with a type
1746 holding other information. There is no other processing we
1747 need to do for INTEGER_CSTs, so just ignore the second argument
1748 unconditionally. */
1749 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1750 *walk_subtrees = 0;
1751 break;
1752
1753 case NOP_EXPR:
1754 *stmt_p = predeclare_vla (*stmt_p);
1755 if (!wtd->no_sanitize_p
1756 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1757 && TYPE_REF_P (TREE_TYPE (stmt)))
1758 ubsan_maybe_instrument_reference (stmt_p);
1759 break;
1760
1761 case CALL_EXPR:
1762 /* Evaluate function concept checks instead of treating them as
1763 normal functions. */
1764 if (concept_check_p (stmt))
1765 {
1766 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1767 * walk_subtrees = 0;
1768 break;
1769 }
1770
1771 if (!wtd->no_sanitize_p
1772 && sanitize_flags_p ((SANITIZE_NULL
1773 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1774 {
1775 tree fn = CALL_EXPR_FN (stmt);
1776 if (fn != NULL_TREE
1777 && !error_operand_p (fn)
1778 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1779 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1780 {
1781 bool is_ctor
1782 = TREE_CODE (fn) == ADDR_EXPR
1783 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1784 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1785 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1786 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1787 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1788 cp_ubsan_maybe_instrument_member_call (stmt);
1789 }
1790 else if (fn == NULL_TREE
1791 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1792 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1793 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1794 *walk_subtrees = 0;
1795 }
1796 /* Fall through. */
1797 case AGGR_INIT_EXPR:
1798 /* For calls to a multi-versioned function, overload resolution
1799 returns the function with the highest target priority, that is,
1800 the version that will checked for dispatching first. If this
1801 version is inlinable, a direct call to this version can be made
1802 otherwise the call should go through the dispatcher. */
1803 {
1804 tree fn = cp_get_callee_fndecl_nofold (stmt);
1805 if (fn && DECL_FUNCTION_VERSIONED (fn)
1806 && (current_function_decl == NULL
1807 || !targetm.target_option.can_inline_p (current_function_decl,
1808 fn)))
1809 if (tree dis = get_function_version_dispatcher (fn))
1810 {
1811 mark_versions_used (dis);
1812 dis = build_address (dis);
1813 if (TREE_CODE (stmt) == CALL_EXPR)
1814 CALL_EXPR_FN (stmt) = dis;
1815 else
1816 AGGR_INIT_EXPR_FN (stmt) = dis;
1817 }
1818 }
1819 break;
1820
1821 case TARGET_EXPR:
1822 if (TARGET_EXPR_INITIAL (stmt)
1823 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1824 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1825 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1826 break;
1827
1828 case REQUIRES_EXPR:
1829 /* Emit the value of the requires-expression. */
1830 *stmt_p = constant_boolean_node (constraints_satisfied_p (stmt),
1831 boolean_type_node);
1832 *walk_subtrees = 0;
1833 break;
1834
1835 case TEMPLATE_ID_EXPR:
1836 gcc_assert (concept_check_p (stmt));
1837 /* Emit the value of the concept check. */
1838 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1839 walk_subtrees = 0;
1840 break;
1841
1842 case STATEMENT_LIST:
1843 if (TREE_SIDE_EFFECTS (stmt))
1844 {
1845 tree_stmt_iterator i;
1846 int nondebug_stmts = 0;
1847 bool clear_side_effects = true;
1848 /* Genericization can clear TREE_SIDE_EFFECTS, e.g. when
1849 transforming an IF_STMT into COND_EXPR. If such stmt
1850 appears in a STATEMENT_LIST that contains only that
1851 stmt and some DEBUG_BEGIN_STMTs, without -g where the
1852 STATEMENT_LIST wouldn't be present at all the resulting
1853 expression wouldn't have TREE_SIDE_EFFECTS set, so make sure
1854 to clear it even on the STATEMENT_LIST in such cases. */
1855 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1856 {
1857 tree t = tsi_stmt (i);
1858 if (TREE_CODE (t) != DEBUG_BEGIN_STMT && nondebug_stmts < 2)
1859 nondebug_stmts++;
1860 cp_walk_tree (tsi_stmt_ptr (i), cp_genericize_r, data, NULL);
1861 if (TREE_CODE (t) != DEBUG_BEGIN_STMT
1862 && (nondebug_stmts > 1 || TREE_SIDE_EFFECTS (tsi_stmt (i))))
1863 clear_side_effects = false;
1864 }
1865 if (clear_side_effects)
1866 TREE_SIDE_EFFECTS (stmt) = 0;
1867 *walk_subtrees = 0;
1868 }
1869 break;
1870
1871 default:
1872 if (IS_TYPE_OR_DECL_P (stmt))
1873 *walk_subtrees = 0;
1874 break;
1875 }
1876
1877 p_set->add (*stmt_p);
1878
1879 return NULL;
1880 }
1881
1882 /* Lower C++ front end trees to GENERIC in T_P. */
1883
1884 static void
1885 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1886 {
1887 struct cp_genericize_data wtd;
1888
1889 wtd.p_set = new hash_set<tree>;
1890 wtd.bind_expr_stack.create (0);
1891 wtd.omp_ctx = NULL;
1892 wtd.try_block = NULL_TREE;
1893 wtd.no_sanitize_p = false;
1894 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1895 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1896 delete wtd.p_set;
1897 wtd.bind_expr_stack.release ();
1898 if (sanitize_flags_p (SANITIZE_VPTR))
1899 cp_ubsan_instrument_member_accesses (t_p);
1900 }
1901
1902 /* If a function that should end with a return in non-void
1903 function doesn't obviously end with return, add ubsan
1904 instrumentation code to verify it at runtime. If -fsanitize=return
1905 is not enabled, instrument __builtin_unreachable. */
1906
1907 static void
1908 cp_maybe_instrument_return (tree fndecl)
1909 {
1910 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1911 || DECL_CONSTRUCTOR_P (fndecl)
1912 || DECL_DESTRUCTOR_P (fndecl)
1913 || !targetm.warn_func_return (fndecl))
1914 return;
1915
1916 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1917 /* Don't add __builtin_unreachable () if not optimizing, it will not
1918 improve any optimizations in that case, just break UB code.
1919 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1920 UBSan covers this with ubsan_instrument_return above where sufficient
1921 information is provided, while the __builtin_unreachable () below
1922 if return sanitization is disabled will just result in hard to
1923 understand runtime error without location. */
1924 && (!optimize
1925 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1926 return;
1927
1928 tree t = DECL_SAVED_TREE (fndecl);
1929 while (t)
1930 {
1931 switch (TREE_CODE (t))
1932 {
1933 case BIND_EXPR:
1934 t = BIND_EXPR_BODY (t);
1935 continue;
1936 case TRY_FINALLY_EXPR:
1937 case CLEANUP_POINT_EXPR:
1938 t = TREE_OPERAND (t, 0);
1939 continue;
1940 case STATEMENT_LIST:
1941 {
1942 tree_stmt_iterator i = tsi_last (t);
1943 while (!tsi_end_p (i))
1944 {
1945 tree p = tsi_stmt (i);
1946 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1947 break;
1948 tsi_prev (&i);
1949 }
1950 if (!tsi_end_p (i))
1951 {
1952 t = tsi_stmt (i);
1953 continue;
1954 }
1955 }
1956 break;
1957 case RETURN_EXPR:
1958 return;
1959 default:
1960 break;
1961 }
1962 break;
1963 }
1964 if (t == NULL_TREE)
1965 return;
1966 tree *p = &DECL_SAVED_TREE (fndecl);
1967 if (TREE_CODE (*p) == BIND_EXPR)
1968 p = &BIND_EXPR_BODY (*p);
1969
1970 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1971 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1972 t = ubsan_instrument_return (loc);
1973 else
1974 {
1975 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1976 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1977 }
1978
1979 append_to_statement_list (t, p);
1980 }
1981
1982 void
1983 cp_genericize (tree fndecl)
1984 {
1985 tree t;
1986
1987 /* Fix up the types of parms passed by invisible reference. */
1988 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1989 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1990 {
1991 /* If a function's arguments are copied to create a thunk,
1992 then DECL_BY_REFERENCE will be set -- but the type of the
1993 argument will be a pointer type, so we will never get
1994 here. */
1995 gcc_assert (!DECL_BY_REFERENCE (t));
1996 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1997 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1998 DECL_BY_REFERENCE (t) = 1;
1999 TREE_ADDRESSABLE (t) = 0;
2000 relayout_decl (t);
2001 }
2002
2003 /* Do the same for the return value. */
2004 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
2005 {
2006 t = DECL_RESULT (fndecl);
2007 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
2008 DECL_BY_REFERENCE (t) = 1;
2009 TREE_ADDRESSABLE (t) = 0;
2010 relayout_decl (t);
2011 if (DECL_NAME (t))
2012 {
2013 /* Adjust DECL_VALUE_EXPR of the original var. */
2014 tree outer = outer_curly_brace_block (current_function_decl);
2015 tree var;
2016
2017 if (outer)
2018 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2019 if (VAR_P (var)
2020 && DECL_NAME (t) == DECL_NAME (var)
2021 && DECL_HAS_VALUE_EXPR_P (var)
2022 && DECL_VALUE_EXPR (var) == t)
2023 {
2024 tree val = convert_from_reference (t);
2025 SET_DECL_VALUE_EXPR (var, val);
2026 break;
2027 }
2028 }
2029 }
2030
2031 /* If we're a clone, the body is already GIMPLE. */
2032 if (DECL_CLONED_FUNCTION_P (fndecl))
2033 return;
2034
2035 /* Allow cp_genericize calls to be nested. */
2036 tree save_bc_label[2];
2037 save_bc_label[bc_break] = bc_label[bc_break];
2038 save_bc_label[bc_continue] = bc_label[bc_continue];
2039 bc_label[bc_break] = NULL_TREE;
2040 bc_label[bc_continue] = NULL_TREE;
2041
2042 /* We do want to see every occurrence of the parms, so we can't just use
2043 walk_tree's hash functionality. */
2044 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
2045
2046 cp_maybe_instrument_return (fndecl);
2047
2048 /* Do everything else. */
2049 c_genericize (fndecl);
2050
2051 gcc_assert (bc_label[bc_break] == NULL);
2052 gcc_assert (bc_label[bc_continue] == NULL);
2053 bc_label[bc_break] = save_bc_label[bc_break];
2054 bc_label[bc_continue] = save_bc_label[bc_continue];
2055 }
2056 \f
2057 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2058 NULL if there is in fact nothing to do. ARG2 may be null if FN
2059 actually only takes one argument. */
2060
2061 static tree
2062 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
2063 {
2064 tree defparm, parm, t;
2065 int i = 0;
2066 int nargs;
2067 tree *argarray;
2068
2069 if (fn == NULL)
2070 return NULL;
2071
2072 nargs = list_length (DECL_ARGUMENTS (fn));
2073 argarray = XALLOCAVEC (tree, nargs);
2074
2075 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
2076 if (arg2)
2077 defparm = TREE_CHAIN (defparm);
2078
2079 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
2080 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
2081 {
2082 tree inner_type = TREE_TYPE (arg1);
2083 tree start1, end1, p1;
2084 tree start2 = NULL, p2 = NULL;
2085 tree ret = NULL, lab;
2086
2087 start1 = arg1;
2088 start2 = arg2;
2089 do
2090 {
2091 inner_type = TREE_TYPE (inner_type);
2092 start1 = build4 (ARRAY_REF, inner_type, start1,
2093 size_zero_node, NULL, NULL);
2094 if (arg2)
2095 start2 = build4 (ARRAY_REF, inner_type, start2,
2096 size_zero_node, NULL, NULL);
2097 }
2098 while (TREE_CODE (inner_type) == ARRAY_TYPE);
2099 start1 = build_fold_addr_expr_loc (input_location, start1);
2100 if (arg2)
2101 start2 = build_fold_addr_expr_loc (input_location, start2);
2102
2103 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2104 end1 = fold_build_pointer_plus (start1, end1);
2105
2106 p1 = create_tmp_var (TREE_TYPE (start1));
2107 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2108 append_to_statement_list (t, &ret);
2109
2110 if (arg2)
2111 {
2112 p2 = create_tmp_var (TREE_TYPE (start2));
2113 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2114 append_to_statement_list (t, &ret);
2115 }
2116
2117 lab = create_artificial_label (input_location);
2118 t = build1 (LABEL_EXPR, void_type_node, lab);
2119 append_to_statement_list (t, &ret);
2120
2121 argarray[i++] = p1;
2122 if (arg2)
2123 argarray[i++] = p2;
2124 /* Handle default arguments. */
2125 for (parm = defparm; parm && parm != void_list_node;
2126 parm = TREE_CHAIN (parm), i++)
2127 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2128 TREE_PURPOSE (parm), fn,
2129 i - is_method, tf_warning_or_error);
2130 t = build_call_a (fn, i, argarray);
2131 t = fold_convert (void_type_node, t);
2132 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2133 append_to_statement_list (t, &ret);
2134
2135 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2136 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2137 append_to_statement_list (t, &ret);
2138
2139 if (arg2)
2140 {
2141 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2142 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2143 append_to_statement_list (t, &ret);
2144 }
2145
2146 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2147 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2148 append_to_statement_list (t, &ret);
2149
2150 return ret;
2151 }
2152 else
2153 {
2154 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2155 if (arg2)
2156 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2157 /* Handle default arguments. */
2158 for (parm = defparm; parm && parm != void_list_node;
2159 parm = TREE_CHAIN (parm), i++)
2160 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2161 TREE_PURPOSE (parm), fn,
2162 i - is_method, tf_warning_or_error);
2163 t = build_call_a (fn, i, argarray);
2164 t = fold_convert (void_type_node, t);
2165 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2166 }
2167 }
2168
2169 /* Return code to initialize DECL with its default constructor, or
2170 NULL if there's nothing to do. */
2171
2172 tree
2173 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2174 {
2175 tree info = CP_OMP_CLAUSE_INFO (clause);
2176 tree ret = NULL;
2177
2178 if (info)
2179 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2180
2181 return ret;
2182 }
2183
2184 /* Return code to initialize DST with a copy constructor from SRC. */
2185
2186 tree
2187 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2188 {
2189 tree info = CP_OMP_CLAUSE_INFO (clause);
2190 tree ret = NULL;
2191
2192 if (info)
2193 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2194 if (ret == NULL)
2195 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2196
2197 return ret;
2198 }
2199
2200 /* Similarly, except use an assignment operator instead. */
2201
2202 tree
2203 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2204 {
2205 tree info = CP_OMP_CLAUSE_INFO (clause);
2206 tree ret = NULL;
2207
2208 if (info)
2209 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2210 if (ret == NULL)
2211 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2212
2213 return ret;
2214 }
2215
2216 /* Return code to destroy DECL. */
2217
2218 tree
2219 cxx_omp_clause_dtor (tree clause, tree decl)
2220 {
2221 tree info = CP_OMP_CLAUSE_INFO (clause);
2222 tree ret = NULL;
2223
2224 if (info)
2225 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2226
2227 return ret;
2228 }
2229
2230 /* True if OpenMP should privatize what this DECL points to rather
2231 than the DECL itself. */
2232
2233 bool
2234 cxx_omp_privatize_by_reference (const_tree decl)
2235 {
2236 return (TYPE_REF_P (TREE_TYPE (decl))
2237 || is_invisiref_parm (decl));
2238 }
2239
2240 /* Return true if DECL is const qualified var having no mutable member. */
2241 bool
2242 cxx_omp_const_qual_no_mutable (tree decl)
2243 {
2244 tree type = TREE_TYPE (decl);
2245 if (TYPE_REF_P (type))
2246 {
2247 if (!is_invisiref_parm (decl))
2248 return false;
2249 type = TREE_TYPE (type);
2250
2251 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2252 {
2253 /* NVR doesn't preserve const qualification of the
2254 variable's type. */
2255 tree outer = outer_curly_brace_block (current_function_decl);
2256 tree var;
2257
2258 if (outer)
2259 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2260 if (VAR_P (var)
2261 && DECL_NAME (decl) == DECL_NAME (var)
2262 && (TYPE_MAIN_VARIANT (type)
2263 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2264 {
2265 if (TYPE_READONLY (TREE_TYPE (var)))
2266 type = TREE_TYPE (var);
2267 break;
2268 }
2269 }
2270 }
2271
2272 if (type == error_mark_node)
2273 return false;
2274
2275 /* Variables with const-qualified type having no mutable member
2276 are predetermined shared. */
2277 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2278 return true;
2279
2280 return false;
2281 }
2282
2283 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2284 of DECL is predetermined. */
2285
2286 enum omp_clause_default_kind
2287 cxx_omp_predetermined_sharing_1 (tree decl)
2288 {
2289 /* Static data members are predetermined shared. */
2290 if (TREE_STATIC (decl))
2291 {
2292 tree ctx = CP_DECL_CONTEXT (decl);
2293 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2294 return OMP_CLAUSE_DEFAULT_SHARED;
2295
2296 if (c_omp_predefined_variable (decl))
2297 return OMP_CLAUSE_DEFAULT_SHARED;
2298 }
2299
2300 /* this may not be specified in data-sharing clauses, still we need
2301 to predetermined it firstprivate. */
2302 if (decl == current_class_ptr)
2303 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2304
2305 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2306 }
2307
2308 /* Likewise, but also include the artificial vars. We don't want to
2309 disallow the artificial vars being mentioned in explicit clauses,
2310 as we use artificial vars e.g. for loop constructs with random
2311 access iterators other than pointers, but during gimplification
2312 we want to treat them as predetermined. */
2313
2314 enum omp_clause_default_kind
2315 cxx_omp_predetermined_sharing (tree decl)
2316 {
2317 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2318 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2319 return ret;
2320
2321 /* Predetermine artificial variables holding integral values, those
2322 are usually result of gimplify_one_sizepos or SAVE_EXPR
2323 gimplification. */
2324 if (VAR_P (decl)
2325 && DECL_ARTIFICIAL (decl)
2326 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2327 && !(DECL_LANG_SPECIFIC (decl)
2328 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2329 return OMP_CLAUSE_DEFAULT_SHARED;
2330
2331 /* Similarly for typeinfo symbols. */
2332 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2333 return OMP_CLAUSE_DEFAULT_SHARED;
2334
2335 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2336 }
2337
2338 enum omp_clause_defaultmap_kind
2339 cxx_omp_predetermined_mapping (tree decl)
2340 {
2341 /* Predetermine artificial variables holding integral values, those
2342 are usually result of gimplify_one_sizepos or SAVE_EXPR
2343 gimplification. */
2344 if (VAR_P (decl)
2345 && DECL_ARTIFICIAL (decl)
2346 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2347 && !(DECL_LANG_SPECIFIC (decl)
2348 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2349 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2350
2351 if (c_omp_predefined_variable (decl))
2352 return OMP_CLAUSE_DEFAULTMAP_TO;
2353
2354 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2355 }
2356
2357 /* Finalize an implicitly determined clause. */
2358
2359 void
2360 cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2361 {
2362 tree decl, inner_type;
2363 bool make_shared = false;
2364
2365 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2366 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2367 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2368 return;
2369
2370 decl = OMP_CLAUSE_DECL (c);
2371 decl = require_complete_type (decl);
2372 inner_type = TREE_TYPE (decl);
2373 if (decl == error_mark_node)
2374 make_shared = true;
2375 else if (TYPE_REF_P (TREE_TYPE (decl)))
2376 inner_type = TREE_TYPE (inner_type);
2377
2378 /* We're interested in the base element, not arrays. */
2379 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2380 inner_type = TREE_TYPE (inner_type);
2381
2382 /* Check for special function availability by building a call to one.
2383 Save the results, because later we won't be in the right context
2384 for making these queries. */
2385 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2386 if (!make_shared
2387 && CLASS_TYPE_P (inner_type)
2388 && cxx_omp_create_clause_info (c, inner_type, !first, first, !first,
2389 true))
2390 make_shared = true;
2391
2392 if (make_shared)
2393 {
2394 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2395 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2396 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2397 }
2398 }
2399
2400 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2401 disregarded in OpenMP construct, because it is going to be
2402 remapped during OpenMP lowering. SHARED is true if DECL
2403 is going to be shared, false if it is going to be privatized. */
2404
2405 bool
2406 cxx_omp_disregard_value_expr (tree decl, bool shared)
2407 {
2408 if (shared)
2409 return false;
2410 if (VAR_P (decl)
2411 && DECL_HAS_VALUE_EXPR_P (decl)
2412 && DECL_ARTIFICIAL (decl)
2413 && DECL_LANG_SPECIFIC (decl)
2414 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2415 return true;
2416 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2417 return true;
2418 return false;
2419 }
2420
2421 /* Fold expression X which is used as an rvalue if RVAL is true. */
2422
2423 tree
2424 cp_fold_maybe_rvalue (tree x, bool rval)
2425 {
2426 while (true)
2427 {
2428 x = cp_fold (x);
2429 if (rval)
2430 x = mark_rvalue_use (x);
2431 if (rval && DECL_P (x)
2432 && !TYPE_REF_P (TREE_TYPE (x)))
2433 {
2434 tree v = decl_constant_value (x);
2435 if (v != x && v != error_mark_node)
2436 {
2437 x = v;
2438 continue;
2439 }
2440 }
2441 break;
2442 }
2443 return x;
2444 }
2445
2446 /* Fold expression X which is used as an rvalue. */
2447
2448 tree
2449 cp_fold_rvalue (tree x)
2450 {
2451 return cp_fold_maybe_rvalue (x, true);
2452 }
2453
2454 /* Perform folding on expression X. */
2455
2456 tree
2457 cp_fully_fold (tree x)
2458 {
2459 if (processing_template_decl)
2460 return x;
2461 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2462 have to call both. */
2463 if (cxx_dialect >= cxx11)
2464 {
2465 x = maybe_constant_value (x);
2466 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2467 a TARGET_EXPR; undo that here. */
2468 if (TREE_CODE (x) == TARGET_EXPR)
2469 x = TARGET_EXPR_INITIAL (x);
2470 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2471 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2472 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2473 x = TREE_OPERAND (x, 0);
2474 }
2475 return cp_fold_rvalue (x);
2476 }
2477
2478 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2479 in some cases. */
2480
2481 tree
2482 cp_fully_fold_init (tree x)
2483 {
2484 if (processing_template_decl)
2485 return x;
2486 x = cp_fully_fold (x);
2487 hash_set<tree> pset;
2488 cp_walk_tree (&x, cp_fold_r, &pset, NULL);
2489 return x;
2490 }
2491
2492 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2493 and certain changes are made to the folding done. Or should be (FIXME). We
2494 never touch maybe_const, as it is only used for the C front-end
2495 C_MAYBE_CONST_EXPR. */
2496
2497 tree
2498 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2499 {
2500 return cp_fold_maybe_rvalue (x, !lval);
2501 }
2502
2503 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2504
2505 /* Dispose of the whole FOLD_CACHE. */
2506
2507 void
2508 clear_fold_cache (void)
2509 {
2510 if (fold_cache != NULL)
2511 fold_cache->empty ();
2512 }
2513
2514 /* This function tries to fold an expression X.
2515 To avoid combinatorial explosion, folding results are kept in fold_cache.
2516 If X is invalid, we don't fold at all.
2517 For performance reasons we don't cache expressions representing a
2518 declaration or constant.
2519 Function returns X or its folded variant. */
2520
2521 static tree
2522 cp_fold (tree x)
2523 {
2524 tree op0, op1, op2, op3;
2525 tree org_x = x, r = NULL_TREE;
2526 enum tree_code code;
2527 location_t loc;
2528 bool rval_ops = true;
2529
2530 if (!x || x == error_mark_node)
2531 return x;
2532
2533 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2534 return x;
2535
2536 /* Don't bother to cache DECLs or constants. */
2537 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2538 return x;
2539
2540 if (fold_cache == NULL)
2541 fold_cache = hash_map<tree, tree>::create_ggc (101);
2542
2543 if (tree *cached = fold_cache->get (x))
2544 return *cached;
2545
2546 uid_sensitive_constexpr_evaluation_checker c;
2547
2548 code = TREE_CODE (x);
2549 switch (code)
2550 {
2551 case CLEANUP_POINT_EXPR:
2552 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2553 effects. */
2554 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2555 if (!TREE_SIDE_EFFECTS (r))
2556 x = r;
2557 break;
2558
2559 case SIZEOF_EXPR:
2560 x = fold_sizeof_expr (x);
2561 break;
2562
2563 case VIEW_CONVERT_EXPR:
2564 rval_ops = false;
2565 /* FALLTHRU */
2566 case CONVERT_EXPR:
2567 case NOP_EXPR:
2568 case NON_LVALUE_EXPR:
2569
2570 if (VOID_TYPE_P (TREE_TYPE (x)))
2571 {
2572 /* This is just to make sure we don't end up with casts to
2573 void from error_mark_node. If we just return x, then
2574 cp_fold_r might fold the operand into error_mark_node and
2575 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2576 during gimplification doesn't like such casts.
2577 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2578 folding of the operand should be in the caches and if in cp_fold_r
2579 it will modify it in place. */
2580 op0 = cp_fold (TREE_OPERAND (x, 0));
2581 if (op0 == error_mark_node)
2582 x = error_mark_node;
2583 break;
2584 }
2585
2586 loc = EXPR_LOCATION (x);
2587 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2588
2589 if (code == CONVERT_EXPR
2590 && SCALAR_TYPE_P (TREE_TYPE (x))
2591 && op0 != void_node)
2592 /* During parsing we used convert_to_*_nofold; re-convert now using the
2593 folding variants, since fold() doesn't do those transformations. */
2594 x = fold (convert (TREE_TYPE (x), op0));
2595 else if (op0 != TREE_OPERAND (x, 0))
2596 {
2597 if (op0 == error_mark_node)
2598 x = error_mark_node;
2599 else
2600 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2601 }
2602 else
2603 x = fold (x);
2604
2605 /* Conversion of an out-of-range value has implementation-defined
2606 behavior; the language considers it different from arithmetic
2607 overflow, which is undefined. */
2608 if (TREE_CODE (op0) == INTEGER_CST
2609 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2610 TREE_OVERFLOW (x) = false;
2611
2612 break;
2613
2614 case INDIRECT_REF:
2615 /* We don't need the decltype(auto) obfuscation anymore. */
2616 if (REF_PARENTHESIZED_P (x))
2617 {
2618 tree p = maybe_undo_parenthesized_ref (x);
2619 return cp_fold (p);
2620 }
2621 goto unary;
2622
2623 case ADDR_EXPR:
2624 loc = EXPR_LOCATION (x);
2625 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2626
2627 /* Cope with user tricks that amount to offsetof. */
2628 if (op0 != error_mark_node
2629 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2630 {
2631 tree val = get_base_address (op0);
2632 if (val
2633 && INDIRECT_REF_P (val)
2634 && COMPLETE_TYPE_P (TREE_TYPE (val))
2635 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2636 {
2637 val = TREE_OPERAND (val, 0);
2638 STRIP_NOPS (val);
2639 val = maybe_constant_value (val);
2640 if (TREE_CODE (val) == INTEGER_CST)
2641 return fold_offsetof (op0, TREE_TYPE (x));
2642 }
2643 }
2644 goto finish_unary;
2645
2646 case REALPART_EXPR:
2647 case IMAGPART_EXPR:
2648 rval_ops = false;
2649 /* FALLTHRU */
2650 case CONJ_EXPR:
2651 case FIX_TRUNC_EXPR:
2652 case FLOAT_EXPR:
2653 case NEGATE_EXPR:
2654 case ABS_EXPR:
2655 case ABSU_EXPR:
2656 case BIT_NOT_EXPR:
2657 case TRUTH_NOT_EXPR:
2658 case FIXED_CONVERT_EXPR:
2659 unary:
2660
2661 loc = EXPR_LOCATION (x);
2662 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2663
2664 finish_unary:
2665 if (op0 != TREE_OPERAND (x, 0))
2666 {
2667 if (op0 == error_mark_node)
2668 x = error_mark_node;
2669 else
2670 {
2671 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2672 if (code == INDIRECT_REF
2673 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2674 {
2675 TREE_READONLY (x) = TREE_READONLY (org_x);
2676 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2677 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2678 }
2679 }
2680 }
2681 else
2682 x = fold (x);
2683
2684 gcc_assert (TREE_CODE (x) != COND_EXPR
2685 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2686 break;
2687
2688 case UNARY_PLUS_EXPR:
2689 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2690 if (op0 == error_mark_node)
2691 x = error_mark_node;
2692 else
2693 x = fold_convert (TREE_TYPE (x), op0);
2694 break;
2695
2696 case POSTDECREMENT_EXPR:
2697 case POSTINCREMENT_EXPR:
2698 case INIT_EXPR:
2699 case PREDECREMENT_EXPR:
2700 case PREINCREMENT_EXPR:
2701 case COMPOUND_EXPR:
2702 case MODIFY_EXPR:
2703 rval_ops = false;
2704 /* FALLTHRU */
2705 case POINTER_PLUS_EXPR:
2706 case PLUS_EXPR:
2707 case POINTER_DIFF_EXPR:
2708 case MINUS_EXPR:
2709 case MULT_EXPR:
2710 case TRUNC_DIV_EXPR:
2711 case CEIL_DIV_EXPR:
2712 case FLOOR_DIV_EXPR:
2713 case ROUND_DIV_EXPR:
2714 case TRUNC_MOD_EXPR:
2715 case CEIL_MOD_EXPR:
2716 case ROUND_MOD_EXPR:
2717 case RDIV_EXPR:
2718 case EXACT_DIV_EXPR:
2719 case MIN_EXPR:
2720 case MAX_EXPR:
2721 case LSHIFT_EXPR:
2722 case RSHIFT_EXPR:
2723 case LROTATE_EXPR:
2724 case RROTATE_EXPR:
2725 case BIT_AND_EXPR:
2726 case BIT_IOR_EXPR:
2727 case BIT_XOR_EXPR:
2728 case TRUTH_AND_EXPR:
2729 case TRUTH_ANDIF_EXPR:
2730 case TRUTH_OR_EXPR:
2731 case TRUTH_ORIF_EXPR:
2732 case TRUTH_XOR_EXPR:
2733 case LT_EXPR: case LE_EXPR:
2734 case GT_EXPR: case GE_EXPR:
2735 case EQ_EXPR: case NE_EXPR:
2736 case UNORDERED_EXPR: case ORDERED_EXPR:
2737 case UNLT_EXPR: case UNLE_EXPR:
2738 case UNGT_EXPR: case UNGE_EXPR:
2739 case UNEQ_EXPR: case LTGT_EXPR:
2740 case RANGE_EXPR: case COMPLEX_EXPR:
2741
2742 loc = EXPR_LOCATION (x);
2743 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2744 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2745
2746 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2747 {
2748 if (op0 == error_mark_node || op1 == error_mark_node)
2749 x = error_mark_node;
2750 else
2751 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2752 }
2753 else
2754 x = fold (x);
2755
2756 /* This is only needed for -Wnonnull-compare and only if
2757 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2758 generation, we do it always. */
2759 if (COMPARISON_CLASS_P (org_x))
2760 {
2761 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2762 ;
2763 else if (COMPARISON_CLASS_P (x))
2764 {
2765 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2766 TREE_NO_WARNING (x) = 1;
2767 }
2768 /* Otherwise give up on optimizing these, let GIMPLE folders
2769 optimize those later on. */
2770 else if (op0 != TREE_OPERAND (org_x, 0)
2771 || op1 != TREE_OPERAND (org_x, 1))
2772 {
2773 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2774 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2775 TREE_NO_WARNING (x) = 1;
2776 }
2777 else
2778 x = org_x;
2779 }
2780
2781 break;
2782
2783 case VEC_COND_EXPR:
2784 case COND_EXPR:
2785 loc = EXPR_LOCATION (x);
2786 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2787 op1 = cp_fold (TREE_OPERAND (x, 1));
2788 op2 = cp_fold (TREE_OPERAND (x, 2));
2789
2790 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2791 {
2792 warning_sentinel s (warn_int_in_bool_context);
2793 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2794 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2795 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2796 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2797 }
2798 else if (VOID_TYPE_P (TREE_TYPE (x)))
2799 {
2800 if (TREE_CODE (op0) == INTEGER_CST)
2801 {
2802 /* If the condition is constant, fold can fold away
2803 the COND_EXPR. If some statement-level uses of COND_EXPR
2804 have one of the branches NULL, avoid folding crash. */
2805 if (!op1)
2806 op1 = build_empty_stmt (loc);
2807 if (!op2)
2808 op2 = build_empty_stmt (loc);
2809 }
2810 else
2811 {
2812 /* Otherwise, don't bother folding a void condition, since
2813 it can't produce a constant value. */
2814 if (op0 != TREE_OPERAND (x, 0)
2815 || op1 != TREE_OPERAND (x, 1)
2816 || op2 != TREE_OPERAND (x, 2))
2817 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2818 break;
2819 }
2820 }
2821
2822 if (op0 != TREE_OPERAND (x, 0)
2823 || op1 != TREE_OPERAND (x, 1)
2824 || op2 != TREE_OPERAND (x, 2))
2825 {
2826 if (op0 == error_mark_node
2827 || op1 == error_mark_node
2828 || op2 == error_mark_node)
2829 x = error_mark_node;
2830 else
2831 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2832 }
2833 else
2834 x = fold (x);
2835
2836 /* A COND_EXPR might have incompatible types in branches if one or both
2837 arms are bitfields. If folding exposed such a branch, fix it up. */
2838 if (TREE_CODE (x) != code
2839 && x != error_mark_node
2840 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2841 x = fold_convert (TREE_TYPE (org_x), x);
2842
2843 break;
2844
2845 case CALL_EXPR:
2846 {
2847 int sv = optimize, nw = sv;
2848 tree callee = get_callee_fndecl (x);
2849
2850 /* Some built-in function calls will be evaluated at compile-time in
2851 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2852 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2853 if (callee && fndecl_built_in_p (callee) && !optimize
2854 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2855 && current_function_decl
2856 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2857 nw = 1;
2858
2859 /* Defer folding __builtin_is_constant_evaluated. */
2860 if (callee
2861 && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
2862 BUILT_IN_FRONTEND))
2863 break;
2864
2865 if (callee
2866 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2867 BUILT_IN_FRONTEND))
2868 {
2869 x = fold_builtin_source_location (EXPR_LOCATION (x));
2870 break;
2871 }
2872
2873 bool changed = false;
2874 int m = call_expr_nargs (x);
2875 for (int i = 0; i < m; i++)
2876 {
2877 r = cp_fold (CALL_EXPR_ARG (x, i));
2878 if (r != CALL_EXPR_ARG (x, i))
2879 {
2880 if (r == error_mark_node)
2881 {
2882 x = error_mark_node;
2883 break;
2884 }
2885 if (!changed)
2886 x = copy_node (x);
2887 CALL_EXPR_ARG (x, i) = r;
2888 changed = true;
2889 }
2890 }
2891 if (x == error_mark_node)
2892 break;
2893
2894 optimize = nw;
2895 r = fold (x);
2896 optimize = sv;
2897
2898 if (TREE_CODE (r) != CALL_EXPR)
2899 {
2900 x = cp_fold (r);
2901 break;
2902 }
2903
2904 optimize = nw;
2905
2906 /* Invoke maybe_constant_value for functions declared
2907 constexpr and not called with AGGR_INIT_EXPRs.
2908 TODO:
2909 Do constexpr expansion of expressions where the call itself is not
2910 constant, but the call followed by an INDIRECT_REF is. */
2911 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2912 && !flag_no_inline)
2913 r = maybe_constant_value (x);
2914 optimize = sv;
2915
2916 if (TREE_CODE (r) != CALL_EXPR)
2917 {
2918 if (DECL_CONSTRUCTOR_P (callee))
2919 {
2920 loc = EXPR_LOCATION (x);
2921 tree s = build_fold_indirect_ref_loc (loc,
2922 CALL_EXPR_ARG (x, 0));
2923 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2924 }
2925 x = r;
2926 break;
2927 }
2928
2929 break;
2930 }
2931
2932 case CONSTRUCTOR:
2933 {
2934 unsigned i;
2935 constructor_elt *p;
2936 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2937 vec<constructor_elt, va_gc> *nelts = NULL;
2938 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2939 {
2940 tree op = cp_fold (p->value);
2941 if (op != p->value)
2942 {
2943 if (op == error_mark_node)
2944 {
2945 x = error_mark_node;
2946 vec_free (nelts);
2947 break;
2948 }
2949 if (nelts == NULL)
2950 nelts = elts->copy ();
2951 (*nelts)[i].value = op;
2952 }
2953 }
2954 if (nelts)
2955 {
2956 x = build_constructor (TREE_TYPE (x), nelts);
2957 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2958 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2959 }
2960 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2961 x = fold (x);
2962 break;
2963 }
2964 case TREE_VEC:
2965 {
2966 bool changed = false;
2967 int n = TREE_VEC_LENGTH (x);
2968
2969 for (int i = 0; i < n; i++)
2970 {
2971 tree op = cp_fold (TREE_VEC_ELT (x, i));
2972 if (op != TREE_VEC_ELT (x, i))
2973 {
2974 if (!changed)
2975 x = copy_node (x);
2976 TREE_VEC_ELT (x, i) = op;
2977 changed = true;
2978 }
2979 }
2980 }
2981
2982 break;
2983
2984 case ARRAY_REF:
2985 case ARRAY_RANGE_REF:
2986
2987 loc = EXPR_LOCATION (x);
2988 op0 = cp_fold (TREE_OPERAND (x, 0));
2989 op1 = cp_fold (TREE_OPERAND (x, 1));
2990 op2 = cp_fold (TREE_OPERAND (x, 2));
2991 op3 = cp_fold (TREE_OPERAND (x, 3));
2992
2993 if (op0 != TREE_OPERAND (x, 0)
2994 || op1 != TREE_OPERAND (x, 1)
2995 || op2 != TREE_OPERAND (x, 2)
2996 || op3 != TREE_OPERAND (x, 3))
2997 {
2998 if (op0 == error_mark_node
2999 || op1 == error_mark_node
3000 || op2 == error_mark_node
3001 || op3 == error_mark_node)
3002 x = error_mark_node;
3003 else
3004 {
3005 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3006 TREE_READONLY (x) = TREE_READONLY (org_x);
3007 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3008 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3009 }
3010 }
3011
3012 x = fold (x);
3013 break;
3014
3015 case SAVE_EXPR:
3016 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3017 folding, evaluates to an invariant. In that case no need to wrap
3018 this folded tree with a SAVE_EXPR. */
3019 r = cp_fold (TREE_OPERAND (x, 0));
3020 if (tree_invariant_p (r))
3021 x = r;
3022 break;
3023
3024 default:
3025 return org_x;
3026 }
3027
3028 if (EXPR_P (x) && TREE_CODE (x) == code)
3029 {
3030 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3031 TREE_NO_WARNING (x) = TREE_NO_WARNING (org_x);
3032 }
3033
3034 if (!c.evaluation_restricted_p ())
3035 {
3036 fold_cache->put (org_x, x);
3037 /* Prevent that we try to fold an already folded result again. */
3038 if (x != org_x)
3039 fold_cache->put (x, x);
3040 }
3041
3042 return x;
3043 }
3044
3045 /* Look up either "hot" or "cold" in attribute list LIST. */
3046
3047 tree
3048 lookup_hotness_attribute (tree list)
3049 {
3050 for (; list; list = TREE_CHAIN (list))
3051 {
3052 tree name = get_attribute_name (list);
3053 if (is_attribute_p ("hot", name)
3054 || is_attribute_p ("cold", name)
3055 || is_attribute_p ("likely", name)
3056 || is_attribute_p ("unlikely", name))
3057 break;
3058 }
3059 return list;
3060 }
3061
3062 /* Remove both "hot" and "cold" attributes from LIST. */
3063
3064 static tree
3065 remove_hotness_attribute (tree list)
3066 {
3067 list = remove_attribute ("hot", list);
3068 list = remove_attribute ("cold", list);
3069 list = remove_attribute ("likely", list);
3070 list = remove_attribute ("unlikely", list);
3071 return list;
3072 }
3073
3074 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3075 PREDICT_EXPR. */
3076
3077 tree
3078 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3079 {
3080 if (std_attrs == error_mark_node)
3081 return std_attrs;
3082 if (tree attr = lookup_hotness_attribute (std_attrs))
3083 {
3084 tree name = get_attribute_name (attr);
3085 bool hot = (is_attribute_p ("hot", name)
3086 || is_attribute_p ("likely", name));
3087 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3088 hot ? TAKEN : NOT_TAKEN);
3089 SET_EXPR_LOCATION (pred, attrs_loc);
3090 add_stmt (pred);
3091 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3092 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3093 get_attribute_name (other), name);
3094 std_attrs = remove_hotness_attribute (std_attrs);
3095 }
3096 return std_attrs;
3097 }
3098
3099 /* Helper of fold_builtin_source_location, return the
3100 std::source_location::__impl type after performing verification
3101 on it. LOC is used for reporting any errors. */
3102
3103 static tree
3104 get_source_location_impl_type (location_t loc)
3105 {
3106 tree name = get_identifier ("source_location");
3107 tree decl = lookup_qualified_name (std_node, name);
3108 if (TREE_CODE (decl) != TYPE_DECL)
3109 {
3110 auto_diagnostic_group d;
3111 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3112 qualified_name_lookup_error (std_node, name, decl, loc);
3113 else
3114 error_at (loc, "%qD is not a type", decl);
3115 return error_mark_node;
3116 }
3117 name = get_identifier ("__impl");
3118 tree type = TREE_TYPE (decl);
3119 decl = lookup_qualified_name (type, name);
3120 if (TREE_CODE (decl) != TYPE_DECL)
3121 {
3122 auto_diagnostic_group d;
3123 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3124 qualified_name_lookup_error (type, name, decl, loc);
3125 else
3126 error_at (loc, "%qD is not a type", decl);
3127 return error_mark_node;
3128 }
3129 type = TREE_TYPE (decl);
3130 if (TREE_CODE (type) != RECORD_TYPE)
3131 {
3132 error_at (loc, "%qD is not a class type", decl);
3133 return error_mark_node;
3134 }
3135
3136 int cnt = 0;
3137 for (tree field = TYPE_FIELDS (type);
3138 (field = next_initializable_field (field)) != NULL_TREE;
3139 field = DECL_CHAIN (field))
3140 {
3141 if (DECL_NAME (field) != NULL_TREE)
3142 {
3143 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3144 if (strcmp (n, "_M_file_name") == 0
3145 || strcmp (n, "_M_function_name") == 0)
3146 {
3147 if (TREE_TYPE (field) != const_string_type_node)
3148 {
3149 error_at (loc, "%qD does not have %<const char *%> type",
3150 field);
3151 return error_mark_node;
3152 }
3153 cnt++;
3154 continue;
3155 }
3156 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3157 {
3158 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3159 {
3160 error_at (loc, "%qD does not have integral type", field);
3161 return error_mark_node;
3162 }
3163 cnt++;
3164 continue;
3165 }
3166 }
3167 cnt = 0;
3168 break;
3169 }
3170 if (cnt != 4)
3171 {
3172 error_at (loc, "%<std::source_location::__impl%> does not contain only "
3173 "non-static data members %<_M_file_name%>, "
3174 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3175 return error_mark_node;
3176 }
3177 return build_qualified_type (type, TYPE_QUAL_CONST);
3178 }
3179
3180 /* Type for source_location_table hash_set. */
3181 struct GTY((for_user)) source_location_table_entry {
3182 location_t loc;
3183 unsigned uid;
3184 tree var;
3185 };
3186
3187 /* Traits class for function start hash maps below. */
3188
3189 struct source_location_table_entry_hash
3190 : ggc_remove <source_location_table_entry>
3191 {
3192 typedef source_location_table_entry value_type;
3193 typedef source_location_table_entry compare_type;
3194
3195 static hashval_t
3196 hash (const source_location_table_entry &ref)
3197 {
3198 inchash::hash hstate (0);
3199 hstate.add_int (ref.loc);
3200 hstate.add_int (ref.uid);
3201 return hstate.end ();
3202 }
3203
3204 static bool
3205 equal (const source_location_table_entry &ref1,
3206 const source_location_table_entry &ref2)
3207 {
3208 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3209 }
3210
3211 static void
3212 mark_deleted (source_location_table_entry &ref)
3213 {
3214 ref.loc = UNKNOWN_LOCATION;
3215 ref.uid = -1U;
3216 ref.var = NULL_TREE;
3217 }
3218
3219 static const bool empty_zero_p = true;
3220
3221 static void
3222 mark_empty (source_location_table_entry &ref)
3223 {
3224 ref.loc = UNKNOWN_LOCATION;
3225 ref.uid = 0;
3226 ref.var = NULL_TREE;
3227 }
3228
3229 static bool
3230 is_deleted (const source_location_table_entry &ref)
3231 {
3232 return (ref.loc == UNKNOWN_LOCATION
3233 && ref.uid == -1U
3234 && ref.var == NULL_TREE);
3235 }
3236
3237 static bool
3238 is_empty (const source_location_table_entry &ref)
3239 {
3240 return (ref.loc == UNKNOWN_LOCATION
3241 && ref.uid == 0
3242 && ref.var == NULL_TREE);
3243 }
3244 };
3245
3246 static GTY(()) hash_table <source_location_table_entry_hash>
3247 *source_location_table;
3248 static GTY(()) unsigned int source_location_id;
3249
3250 /* Fold __builtin_source_location () call. LOC is the location
3251 of the call. */
3252
3253 tree
3254 fold_builtin_source_location (location_t loc)
3255 {
3256 if (source_location_impl == NULL_TREE)
3257 {
3258 auto_diagnostic_group d;
3259 source_location_impl = get_source_location_impl_type (loc);
3260 if (source_location_impl == error_mark_node)
3261 inform (loc, "evaluating %qs", "__builtin_source_location");
3262 }
3263 if (source_location_impl == error_mark_node)
3264 return build_zero_cst (const_ptr_type_node);
3265 if (source_location_table == NULL)
3266 source_location_table
3267 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3268 const line_map_ordinary *map;
3269 source_location_table_entry entry;
3270 entry.loc
3271 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3272 &map);
3273 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3274 entry.var = error_mark_node;
3275 source_location_table_entry *entryp
3276 = source_location_table->find_slot (entry, INSERT);
3277 tree var;
3278 if (entryp->var)
3279 var = entryp->var;
3280 else
3281 {
3282 char tmp_name[32];
3283 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3284 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3285 source_location_impl);
3286 TREE_STATIC (var) = 1;
3287 TREE_PUBLIC (var) = 0;
3288 DECL_ARTIFICIAL (var) = 1;
3289 DECL_IGNORED_P (var) = 1;
3290 DECL_EXTERNAL (var) = 0;
3291 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3292 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3293 layout_decl (var, 0);
3294
3295 vec<constructor_elt, va_gc> *v = NULL;
3296 vec_alloc (v, 4);
3297 for (tree field = TYPE_FIELDS (source_location_impl);
3298 (field = next_initializable_field (field)) != NULL_TREE;
3299 field = DECL_CHAIN (field))
3300 {
3301 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3302 tree val = NULL_TREE;
3303 if (strcmp (n, "_M_file_name") == 0)
3304 {
3305 if (const char *fname = LOCATION_FILE (loc))
3306 {
3307 fname = remap_macro_filename (fname);
3308 val = build_string_literal (strlen (fname) + 1, fname);
3309 }
3310 else
3311 val = build_string_literal (1, "");
3312 }
3313 else if (strcmp (n, "_M_function_name") == 0)
3314 {
3315 const char *name = "";
3316
3317 if (current_function_decl)
3318 name = cxx_printable_name (current_function_decl, 0);
3319
3320 val = build_string_literal (strlen (name) + 1, name);
3321 }
3322 else if (strcmp (n, "_M_line") == 0)
3323 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3324 else if (strcmp (n, "_M_column") == 0)
3325 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3326 else
3327 gcc_unreachable ();
3328 CONSTRUCTOR_APPEND_ELT (v, field, val);
3329 }
3330
3331 tree ctor = build_constructor (source_location_impl, v);
3332 TREE_CONSTANT (ctor) = 1;
3333 TREE_STATIC (ctor) = 1;
3334 DECL_INITIAL (var) = ctor;
3335 varpool_node::finalize_decl (var);
3336 *entryp = entry;
3337 entryp->var = var;
3338 }
3339
3340 return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3341 }
3342
3343 #include "gt-cp-cp-gimplify.h"