re PR c++/71528 (multiple extern reference declarations produce uninitialized access)
[gcc.git] / gcc / cp / cp-gimplify.c
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "cilk.h"
35 #include "cp-cilkplus.h"
36
37 /* Forward declarations. */
38
39 static tree cp_genericize_r (tree *, int *, void *);
40 static tree cp_fold_r (tree *, int *, void *);
41 static void cp_genericize_tree (tree*);
42 static tree cp_fold (tree);
43
44 /* Local declarations. */
45
46 enum bc_t { bc_break = 0, bc_continue = 1 };
47
48 /* Stack of labels which are targets for "break" or "continue",
49 linked through TREE_CHAIN. */
50 static tree bc_label[2];
51
52 /* Begin a scope which can be exited by a break or continue statement. BC
53 indicates which.
54
55 Just creates a label with location LOCATION and pushes it into the current
56 context. */
57
58 static tree
59 begin_bc_block (enum bc_t bc, location_t location)
60 {
61 tree label = create_artificial_label (location);
62 DECL_CHAIN (label) = bc_label[bc];
63 bc_label[bc] = label;
64 if (bc == bc_break)
65 LABEL_DECL_BREAK (label) = true;
66 else
67 LABEL_DECL_CONTINUE (label) = true;
68 return label;
69 }
70
71 /* Finish a scope which can be exited by a break or continue statement.
72 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
73 an expression for the contents of the scope.
74
75 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
76 BLOCK. Otherwise, just forget the label. */
77
78 static void
79 finish_bc_block (tree *block, enum bc_t bc, tree label)
80 {
81 gcc_assert (label == bc_label[bc]);
82
83 if (TREE_USED (label))
84 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
85 block);
86
87 bc_label[bc] = DECL_CHAIN (label);
88 DECL_CHAIN (label) = NULL_TREE;
89 }
90
91 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
92 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
93 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
94 of gimplify_cilk_spawn. */
95
96 static void
97 cilk_cp_gimplify_call_params_in_spawned_fn (tree *expr_p, gimple_seq *pre_p,
98 gimple_seq *post_p)
99 {
100 int ii = 0;
101
102 cilk_gimplify_call_params_in_spawned_fn (expr_p, pre_p);
103 if (TREE_CODE (*expr_p) == AGGR_INIT_EXPR)
104 for (ii = 0; ii < aggr_init_expr_nargs (*expr_p); ii++)
105 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p, ii), pre_p, post_p,
106 is_gimple_reg, fb_rvalue);
107 }
108
109
110 /* Get the LABEL_EXPR to represent a break or continue statement
111 in the current block scope. BC indicates which. */
112
113 static tree
114 get_bc_label (enum bc_t bc)
115 {
116 tree label = bc_label[bc];
117
118 /* Mark the label used for finish_bc_block. */
119 TREE_USED (label) = 1;
120 return label;
121 }
122
123 /* Genericize a TRY_BLOCK. */
124
125 static void
126 genericize_try_block (tree *stmt_p)
127 {
128 tree body = TRY_STMTS (*stmt_p);
129 tree cleanup = TRY_HANDLERS (*stmt_p);
130
131 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
132 }
133
134 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
135
136 static void
137 genericize_catch_block (tree *stmt_p)
138 {
139 tree type = HANDLER_TYPE (*stmt_p);
140 tree body = HANDLER_BODY (*stmt_p);
141
142 /* FIXME should the caught type go in TREE_TYPE? */
143 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
144 }
145
146 /* A terser interface for building a representation of an exception
147 specification. */
148
149 static tree
150 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
151 {
152 tree t;
153
154 /* FIXME should the allowed types go in TREE_TYPE? */
155 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
156 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
157
158 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
159 append_to_statement_list (body, &TREE_OPERAND (t, 0));
160
161 return t;
162 }
163
164 /* Genericize an EH_SPEC_BLOCK by converting it to a
165 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
166
167 static void
168 genericize_eh_spec_block (tree *stmt_p)
169 {
170 tree body = EH_SPEC_STMTS (*stmt_p);
171 tree allowed = EH_SPEC_RAISES (*stmt_p);
172 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
173
174 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
175 TREE_NO_WARNING (*stmt_p) = true;
176 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
177 }
178
179 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
180
181 static void
182 genericize_if_stmt (tree *stmt_p)
183 {
184 tree stmt, cond, then_, else_;
185 location_t locus = EXPR_LOCATION (*stmt_p);
186
187 stmt = *stmt_p;
188 cond = IF_COND (stmt);
189 then_ = THEN_CLAUSE (stmt);
190 else_ = ELSE_CLAUSE (stmt);
191
192 if (!then_)
193 then_ = build_empty_stmt (locus);
194 if (!else_)
195 else_ = build_empty_stmt (locus);
196
197 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
198 stmt = then_;
199 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
200 stmt = else_;
201 else
202 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
203 if (!EXPR_HAS_LOCATION (stmt))
204 protected_set_expr_location (stmt, locus);
205 *stmt_p = stmt;
206 }
207
208 /* Build a generic representation of one of the C loop forms. COND is the
209 loop condition or NULL_TREE. BODY is the (possibly compound) statement
210 controlled by the loop. INCR is the increment expression of a for-loop,
211 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
212 evaluated before the loop body as in while and for loops, or after the
213 loop body as in do-while loops. */
214
215 static void
216 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
217 tree incr, bool cond_is_first, int *walk_subtrees,
218 void *data)
219 {
220 tree blab, clab;
221 tree exit = NULL;
222 tree stmt_list = NULL;
223
224 blab = begin_bc_block (bc_break, start_locus);
225 clab = begin_bc_block (bc_continue, start_locus);
226
227 protected_set_expr_location (incr, start_locus);
228
229 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
230 cp_walk_tree (&body, cp_genericize_r, data, NULL);
231 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
232 *walk_subtrees = 0;
233
234 if (cond && TREE_CODE (cond) != INTEGER_CST)
235 {
236 /* If COND is constant, don't bother building an exit. If it's false,
237 we won't build a loop. If it's true, any exits are in the body. */
238 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
239 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
240 get_bc_label (bc_break));
241 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
242 build_empty_stmt (cloc), exit);
243 }
244
245 if (exit && cond_is_first)
246 append_to_statement_list (exit, &stmt_list);
247 append_to_statement_list (body, &stmt_list);
248 finish_bc_block (&stmt_list, bc_continue, clab);
249 append_to_statement_list (incr, &stmt_list);
250 if (exit && !cond_is_first)
251 append_to_statement_list (exit, &stmt_list);
252
253 if (!stmt_list)
254 stmt_list = build_empty_stmt (start_locus);
255
256 tree loop;
257 if (cond && integer_zerop (cond))
258 {
259 if (cond_is_first)
260 loop = fold_build3_loc (start_locus, COND_EXPR,
261 void_type_node, cond, stmt_list,
262 build_empty_stmt (start_locus));
263 else
264 loop = stmt_list;
265 }
266 else
267 {
268 location_t loc = start_locus;
269 if (!cond || integer_nonzerop (cond))
270 loc = EXPR_LOCATION (expr_first (body));
271 if (loc == UNKNOWN_LOCATION)
272 loc = start_locus;
273 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
274 }
275
276 stmt_list = NULL;
277 append_to_statement_list (loop, &stmt_list);
278 finish_bc_block (&stmt_list, bc_break, blab);
279 if (!stmt_list)
280 stmt_list = build_empty_stmt (start_locus);
281
282 *stmt_p = stmt_list;
283 }
284
285 /* Genericize a FOR_STMT node *STMT_P. */
286
287 static void
288 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
289 {
290 tree stmt = *stmt_p;
291 tree expr = NULL;
292 tree loop;
293 tree init = FOR_INIT_STMT (stmt);
294
295 if (init)
296 {
297 cp_walk_tree (&init, cp_genericize_r, data, NULL);
298 append_to_statement_list (init, &expr);
299 }
300
301 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
302 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
303 append_to_statement_list (loop, &expr);
304 if (expr == NULL_TREE)
305 expr = loop;
306 *stmt_p = expr;
307 }
308
309 /* Genericize a WHILE_STMT node *STMT_P. */
310
311 static void
312 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
313 {
314 tree stmt = *stmt_p;
315 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
316 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
317 }
318
319 /* Genericize a DO_STMT node *STMT_P. */
320
321 static void
322 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
323 {
324 tree stmt = *stmt_p;
325 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
326 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
327 }
328
329 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
330
331 static void
332 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
333 {
334 tree stmt = *stmt_p;
335 tree break_block, body, cond, type;
336 location_t stmt_locus = EXPR_LOCATION (stmt);
337
338 break_block = begin_bc_block (bc_break, stmt_locus);
339
340 body = SWITCH_STMT_BODY (stmt);
341 if (!body)
342 body = build_empty_stmt (stmt_locus);
343 cond = SWITCH_STMT_COND (stmt);
344 type = SWITCH_STMT_TYPE (stmt);
345
346 cp_walk_tree (&body, cp_genericize_r, data, NULL);
347 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
348 cp_walk_tree (&type, cp_genericize_r, data, NULL);
349 *walk_subtrees = 0;
350
351 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
352 finish_bc_block (stmt_p, bc_break, break_block);
353 }
354
355 /* Genericize a CONTINUE_STMT node *STMT_P. */
356
357 static void
358 genericize_continue_stmt (tree *stmt_p)
359 {
360 tree stmt_list = NULL;
361 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
362 tree label = get_bc_label (bc_continue);
363 location_t location = EXPR_LOCATION (*stmt_p);
364 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
365 append_to_statement_list_force (pred, &stmt_list);
366 append_to_statement_list (jump, &stmt_list);
367 *stmt_p = stmt_list;
368 }
369
370 /* Genericize a BREAK_STMT node *STMT_P. */
371
372 static void
373 genericize_break_stmt (tree *stmt_p)
374 {
375 tree label = get_bc_label (bc_break);
376 location_t location = EXPR_LOCATION (*stmt_p);
377 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
378 }
379
380 /* Genericize a OMP_FOR node *STMT_P. */
381
382 static void
383 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
384 {
385 tree stmt = *stmt_p;
386 location_t locus = EXPR_LOCATION (stmt);
387 tree clab = begin_bc_block (bc_continue, locus);
388
389 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
390 if (TREE_CODE (stmt) != OMP_TASKLOOP)
391 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
392 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
393 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
394 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
395 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
396 *walk_subtrees = 0;
397
398 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
399 }
400
401 /* Hook into the middle of gimplifying an OMP_FOR node. */
402
403 static enum gimplify_status
404 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
405 {
406 tree for_stmt = *expr_p;
407 gimple_seq seq = NULL;
408
409 /* Protect ourselves from recursion. */
410 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
411 return GS_UNHANDLED;
412 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
413
414 gimplify_and_add (for_stmt, &seq);
415 gimple_seq_add_seq (pre_p, seq);
416
417 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
418
419 return GS_ALL_DONE;
420 }
421
422 /* Gimplify an EXPR_STMT node. */
423
424 static void
425 gimplify_expr_stmt (tree *stmt_p)
426 {
427 tree stmt = EXPR_STMT_EXPR (*stmt_p);
428
429 if (stmt == error_mark_node)
430 stmt = NULL;
431
432 /* Gimplification of a statement expression will nullify the
433 statement if all its side effects are moved to *PRE_P and *POST_P.
434
435 In this case we will not want to emit the gimplified statement.
436 However, we may still want to emit a warning, so we do that before
437 gimplification. */
438 if (stmt && warn_unused_value)
439 {
440 if (!TREE_SIDE_EFFECTS (stmt))
441 {
442 if (!IS_EMPTY_STMT (stmt)
443 && !VOID_TYPE_P (TREE_TYPE (stmt))
444 && !TREE_NO_WARNING (stmt))
445 warning (OPT_Wunused_value, "statement with no effect");
446 }
447 else
448 warn_if_unused_value (stmt, input_location);
449 }
450
451 if (stmt == NULL_TREE)
452 stmt = alloc_stmt_list ();
453
454 *stmt_p = stmt;
455 }
456
457 /* Gimplify initialization from an AGGR_INIT_EXPR. */
458
459 static void
460 cp_gimplify_init_expr (tree *expr_p)
461 {
462 tree from = TREE_OPERAND (*expr_p, 1);
463 tree to = TREE_OPERAND (*expr_p, 0);
464 tree t;
465
466 /* What about code that pulls out the temp and uses it elsewhere? I
467 think that such code never uses the TARGET_EXPR as an initializer. If
468 I'm wrong, we'll abort because the temp won't have any RTL. In that
469 case, I guess we'll need to replace references somehow. */
470 if (TREE_CODE (from) == TARGET_EXPR)
471 from = TARGET_EXPR_INITIAL (from);
472
473 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
474 inside the TARGET_EXPR. */
475 for (t = from; t; )
476 {
477 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
478
479 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
480 replace the slot operand with our target.
481
482 Should we add a target parm to gimplify_expr instead? No, as in this
483 case we want to replace the INIT_EXPR. */
484 if (TREE_CODE (sub) == AGGR_INIT_EXPR
485 || TREE_CODE (sub) == VEC_INIT_EXPR)
486 {
487 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
488 AGGR_INIT_EXPR_SLOT (sub) = to;
489 else
490 VEC_INIT_EXPR_SLOT (sub) = to;
491 *expr_p = from;
492
493 /* The initialization is now a side-effect, so the container can
494 become void. */
495 if (from != sub)
496 TREE_TYPE (from) = void_type_node;
497 }
498
499 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
500 /* Handle aggregate NSDMI. */
501 replace_placeholders (sub, to);
502
503 if (t == sub)
504 break;
505 else
506 t = TREE_OPERAND (t, 1);
507 }
508
509 }
510
511 /* Gimplify a MUST_NOT_THROW_EXPR. */
512
513 static enum gimplify_status
514 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
515 {
516 tree stmt = *expr_p;
517 tree temp = voidify_wrapper_expr (stmt, NULL);
518 tree body = TREE_OPERAND (stmt, 0);
519 gimple_seq try_ = NULL;
520 gimple_seq catch_ = NULL;
521 gimple *mnt;
522
523 gimplify_and_add (body, &try_);
524 mnt = gimple_build_eh_must_not_throw (terminate_node);
525 gimple_seq_add_stmt_without_update (&catch_, mnt);
526 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
527
528 gimple_seq_add_stmt_without_update (pre_p, mnt);
529 if (temp)
530 {
531 *expr_p = temp;
532 return GS_OK;
533 }
534
535 *expr_p = NULL;
536 return GS_ALL_DONE;
537 }
538
539 /* Return TRUE if an operand (OP) of a given TYPE being copied is
540 really just an empty class copy.
541
542 Check that the operand has a simple form so that TARGET_EXPRs and
543 non-empty CONSTRUCTORs get reduced properly, and we leave the
544 return slot optimization alone because it isn't a copy. */
545
546 static bool
547 simple_empty_class_p (tree type, tree op)
548 {
549 return
550 ((TREE_CODE (op) == COMPOUND_EXPR
551 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
552 || is_gimple_lvalue (op)
553 || INDIRECT_REF_P (op)
554 || (TREE_CODE (op) == CONSTRUCTOR
555 && CONSTRUCTOR_NELTS (op) == 0
556 && !TREE_CLOBBER_P (op))
557 || (TREE_CODE (op) == CALL_EXPR
558 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
559 && is_really_empty_class (type);
560 }
561
562 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
563
564 int
565 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
566 {
567 int saved_stmts_are_full_exprs_p = 0;
568 enum tree_code code = TREE_CODE (*expr_p);
569 enum gimplify_status ret;
570
571 if (STATEMENT_CODE_P (code))
572 {
573 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
574 current_stmt_tree ()->stmts_are_full_exprs_p
575 = STMT_IS_FULL_EXPR_P (*expr_p);
576 }
577
578 switch (code)
579 {
580 case AGGR_INIT_EXPR:
581 simplify_aggr_init_expr (expr_p);
582 ret = GS_OK;
583 break;
584
585 case VEC_INIT_EXPR:
586 {
587 location_t loc = input_location;
588 tree init = VEC_INIT_EXPR_INIT (*expr_p);
589 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
590 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
591 input_location = EXPR_LOCATION (*expr_p);
592 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
593 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
594 from_array,
595 tf_warning_or_error);
596 cp_genericize_tree (expr_p);
597 ret = GS_OK;
598 input_location = loc;
599 }
600 break;
601
602 case THROW_EXPR:
603 /* FIXME communicate throw type to back end, probably by moving
604 THROW_EXPR into ../tree.def. */
605 *expr_p = TREE_OPERAND (*expr_p, 0);
606 ret = GS_OK;
607 break;
608
609 case MUST_NOT_THROW_EXPR:
610 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
611 break;
612
613 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
614 LHS of an assignment might also be involved in the RHS, as in bug
615 25979. */
616 case INIT_EXPR:
617 if (fn_contains_cilk_spawn_p (cfun))
618 {
619 if (cilk_cp_detect_spawn_and_unwrap (expr_p))
620 {
621 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p,
622 pre_p, post_p);
623 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
624 }
625 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
626 return GS_ERROR;
627 }
628
629 cp_gimplify_init_expr (expr_p);
630 if (TREE_CODE (*expr_p) != INIT_EXPR)
631 return GS_OK;
632 /* Otherwise fall through. */
633 case MODIFY_EXPR:
634 modify_expr_case:
635 {
636 if (fn_contains_cilk_spawn_p (cfun)
637 && cilk_cp_detect_spawn_and_unwrap (expr_p)
638 && !seen_error ())
639 {
640 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
641 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
642 }
643 /* If the back end isn't clever enough to know that the lhs and rhs
644 types are the same, add an explicit conversion. */
645 tree op0 = TREE_OPERAND (*expr_p, 0);
646 tree op1 = TREE_OPERAND (*expr_p, 1);
647
648 if (!error_operand_p (op0)
649 && !error_operand_p (op1)
650 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
651 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
652 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
653 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
654 TREE_TYPE (op0), op1);
655
656 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
657 {
658 /* Remove any copies of empty classes. Also drop volatile
659 variables on the RHS to avoid infinite recursion from
660 gimplify_expr trying to load the value. */
661 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
662 is_gimple_lvalue, fb_lvalue);
663 if (TREE_SIDE_EFFECTS (op1))
664 {
665 if (TREE_THIS_VOLATILE (op1)
666 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
667 op1 = build_fold_addr_expr (op1);
668
669 gimplify_and_add (op1, pre_p);
670 }
671 *expr_p = TREE_OPERAND (*expr_p, 0);
672 }
673 }
674 ret = GS_OK;
675 break;
676
677 case EMPTY_CLASS_EXPR:
678 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
679 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
680 ret = GS_OK;
681 break;
682
683 case BASELINK:
684 *expr_p = BASELINK_FUNCTIONS (*expr_p);
685 ret = GS_OK;
686 break;
687
688 case TRY_BLOCK:
689 genericize_try_block (expr_p);
690 ret = GS_OK;
691 break;
692
693 case HANDLER:
694 genericize_catch_block (expr_p);
695 ret = GS_OK;
696 break;
697
698 case EH_SPEC_BLOCK:
699 genericize_eh_spec_block (expr_p);
700 ret = GS_OK;
701 break;
702
703 case USING_STMT:
704 gcc_unreachable ();
705
706 case FOR_STMT:
707 case WHILE_STMT:
708 case DO_STMT:
709 case SWITCH_STMT:
710 case CONTINUE_STMT:
711 case BREAK_STMT:
712 gcc_unreachable ();
713
714 case OMP_FOR:
715 case OMP_SIMD:
716 case OMP_DISTRIBUTE:
717 case OMP_TASKLOOP:
718 ret = cp_gimplify_omp_for (expr_p, pre_p);
719 break;
720
721 case EXPR_STMT:
722 gimplify_expr_stmt (expr_p);
723 ret = GS_OK;
724 break;
725
726 case UNARY_PLUS_EXPR:
727 {
728 tree arg = TREE_OPERAND (*expr_p, 0);
729 tree type = TREE_TYPE (*expr_p);
730 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
731 : arg;
732 ret = GS_OK;
733 }
734 break;
735
736 case CILK_SPAWN_STMT:
737 gcc_assert(fn_contains_cilk_spawn_p (cfun)
738 && cilk_cp_detect_spawn_and_unwrap (expr_p));
739
740 if (!seen_error ())
741 {
742 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
743 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
744 }
745 return GS_ERROR;
746
747 case CALL_EXPR:
748 if (fn_contains_cilk_spawn_p (cfun)
749 && cilk_cp_detect_spawn_and_unwrap (expr_p)
750 && !seen_error ())
751 {
752 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
753 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
754 }
755 /* DR 1030 says that we need to evaluate the elements of an
756 initializer-list in forward order even when it's used as arguments to
757 a constructor. So if the target wants to evaluate them in reverse
758 order and there's more than one argument other than 'this', gimplify
759 them in order. */
760 ret = GS_OK;
761 if (PUSH_ARGS_REVERSED && CALL_EXPR_LIST_INIT_P (*expr_p)
762 && call_expr_nargs (*expr_p) > 2)
763 {
764 int nargs = call_expr_nargs (*expr_p);
765 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
766 for (int i = 1; i < nargs; ++i)
767 {
768 enum gimplify_status t
769 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
770 if (t == GS_ERROR)
771 ret = GS_ERROR;
772 }
773 }
774 break;
775
776 case RETURN_EXPR:
777 if (TREE_OPERAND (*expr_p, 0)
778 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
779 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
780 {
781 expr_p = &TREE_OPERAND (*expr_p, 0);
782 code = TREE_CODE (*expr_p);
783 /* Avoid going through the INIT_EXPR case, which can
784 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
785 goto modify_expr_case;
786 }
787 /* Fall through. */
788
789 default:
790 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
791 break;
792 }
793
794 /* Restore saved state. */
795 if (STATEMENT_CODE_P (code))
796 current_stmt_tree ()->stmts_are_full_exprs_p
797 = saved_stmts_are_full_exprs_p;
798
799 return ret;
800 }
801
802 static inline bool
803 is_invisiref_parm (const_tree t)
804 {
805 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
806 && DECL_BY_REFERENCE (t));
807 }
808
809 /* Return true if the uid in both int tree maps are equal. */
810
811 bool
812 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
813 {
814 return (a->uid == b->uid);
815 }
816
817 /* Hash a UID in a cxx_int_tree_map. */
818
819 unsigned int
820 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
821 {
822 return item->uid;
823 }
824
825 /* A stable comparison routine for use with splay trees and DECLs. */
826
827 static int
828 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
829 {
830 tree a = (tree) xa;
831 tree b = (tree) xb;
832
833 return DECL_UID (a) - DECL_UID (b);
834 }
835
836 /* OpenMP context during genericization. */
837
838 struct cp_genericize_omp_taskreg
839 {
840 bool is_parallel;
841 bool default_shared;
842 struct cp_genericize_omp_taskreg *outer;
843 splay_tree variables;
844 };
845
846 /* Return true if genericization should try to determine if
847 DECL is firstprivate or shared within task regions. */
848
849 static bool
850 omp_var_to_track (tree decl)
851 {
852 tree type = TREE_TYPE (decl);
853 if (is_invisiref_parm (decl))
854 type = TREE_TYPE (type);
855 while (TREE_CODE (type) == ARRAY_TYPE)
856 type = TREE_TYPE (type);
857 if (type == error_mark_node || !CLASS_TYPE_P (type))
858 return false;
859 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
860 return false;
861 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
862 return false;
863 return true;
864 }
865
866 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
867
868 static void
869 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
870 {
871 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
872 (splay_tree_key) decl);
873 if (n == NULL)
874 {
875 int flags = OMP_CLAUSE_DEFAULT_SHARED;
876 if (omp_ctx->outer)
877 omp_cxx_notice_variable (omp_ctx->outer, decl);
878 if (!omp_ctx->default_shared)
879 {
880 struct cp_genericize_omp_taskreg *octx;
881
882 for (octx = omp_ctx->outer; octx; octx = octx->outer)
883 {
884 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
885 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
886 {
887 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
888 break;
889 }
890 if (octx->is_parallel)
891 break;
892 }
893 if (octx == NULL
894 && (TREE_CODE (decl) == PARM_DECL
895 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
896 && DECL_CONTEXT (decl) == current_function_decl)))
897 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
898 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
899 {
900 /* DECL is implicitly determined firstprivate in
901 the current task construct. Ensure copy ctor and
902 dtor are instantiated, because during gimplification
903 it will be already too late. */
904 tree type = TREE_TYPE (decl);
905 if (is_invisiref_parm (decl))
906 type = TREE_TYPE (type);
907 while (TREE_CODE (type) == ARRAY_TYPE)
908 type = TREE_TYPE (type);
909 get_copy_ctor (type, tf_none);
910 get_dtor (type, tf_none);
911 }
912 }
913 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
914 }
915 }
916
917 /* Genericization context. */
918
919 struct cp_genericize_data
920 {
921 hash_set<tree> *p_set;
922 vec<tree> bind_expr_stack;
923 struct cp_genericize_omp_taskreg *omp_ctx;
924 tree try_block;
925 bool no_sanitize_p;
926 };
927
928 /* Perform any pre-gimplification folding of C++ front end trees to
929 GENERIC.
930 Note: The folding of none-omp cases is something to move into
931 the middle-end. As for now we have most foldings only on GENERIC
932 in fold-const, we need to perform this before transformation to
933 GIMPLE-form. */
934
935 static tree
936 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
937 {
938 tree stmt;
939 enum tree_code code;
940
941 *stmt_p = stmt = cp_fold (*stmt_p);
942
943 if (((hash_set<tree> *) data)->add (stmt))
944 {
945 /* Don't walk subtrees of stmts we've already walked once, otherwise
946 we can have exponential complexity with e.g. lots of nested
947 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
948 always the same tree, which the first time cp_fold_r has been
949 called on it had the subtrees walked. */
950 *walk_subtrees = 0;
951 return NULL;
952 }
953
954 code = TREE_CODE (stmt);
955 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
956 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
957 || code == OACC_LOOP)
958 {
959 tree x;
960 int i, n;
961
962 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
963 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
964 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
965 x = OMP_FOR_COND (stmt);
966 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
967 {
968 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
969 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
970 }
971 else if (x && TREE_CODE (x) == TREE_VEC)
972 {
973 n = TREE_VEC_LENGTH (x);
974 for (i = 0; i < n; i++)
975 {
976 tree o = TREE_VEC_ELT (x, i);
977 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
978 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
979 }
980 }
981 x = OMP_FOR_INCR (stmt);
982 if (x && TREE_CODE (x) == TREE_VEC)
983 {
984 n = TREE_VEC_LENGTH (x);
985 for (i = 0; i < n; i++)
986 {
987 tree o = TREE_VEC_ELT (x, i);
988 if (o && TREE_CODE (o) == MODIFY_EXPR)
989 o = TREE_OPERAND (o, 1);
990 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
991 || TREE_CODE (o) == POINTER_PLUS_EXPR))
992 {
993 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
994 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
995 }
996 }
997 }
998 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
999 *walk_subtrees = 0;
1000 }
1001
1002 return NULL;
1003 }
1004
1005 /* Fold ALL the trees! FIXME we should be able to remove this, but
1006 apparently that still causes optimization regressions. */
1007
1008 void
1009 cp_fold_function (tree fndecl)
1010 {
1011 hash_set<tree> pset;
1012 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1013 }
1014
1015 /* Perform any pre-gimplification lowering of C++ front end trees to
1016 GENERIC. */
1017
1018 static tree
1019 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1020 {
1021 tree stmt = *stmt_p;
1022 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1023 hash_set<tree> *p_set = wtd->p_set;
1024
1025 /* If in an OpenMP context, note var uses. */
1026 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1027 && (VAR_P (stmt)
1028 || TREE_CODE (stmt) == PARM_DECL
1029 || TREE_CODE (stmt) == RESULT_DECL)
1030 && omp_var_to_track (stmt))
1031 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1032
1033 /* Don't dereference parms in a thunk, pass the references through. */
1034 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1035 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1036 {
1037 *walk_subtrees = 0;
1038 return NULL;
1039 }
1040
1041 /* Otherwise, do dereference invisible reference parms. */
1042 if (is_invisiref_parm (stmt))
1043 {
1044 *stmt_p = convert_from_reference (stmt);
1045 *walk_subtrees = 0;
1046 return NULL;
1047 }
1048
1049 /* Map block scope extern declarations to visible declarations with the
1050 same name and type in outer scopes if any. */
1051 if (cp_function_chain->extern_decl_map
1052 && VAR_OR_FUNCTION_DECL_P (stmt)
1053 && DECL_EXTERNAL (stmt))
1054 {
1055 struct cxx_int_tree_map *h, in;
1056 in.uid = DECL_UID (stmt);
1057 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1058 if (h)
1059 {
1060 *stmt_p = h->to;
1061 *walk_subtrees = 0;
1062 return NULL;
1063 }
1064 }
1065
1066 /* Other than invisiref parms, don't walk the same tree twice. */
1067 if (p_set->contains (stmt))
1068 {
1069 *walk_subtrees = 0;
1070 return NULL_TREE;
1071 }
1072
1073 if (TREE_CODE (stmt) == ADDR_EXPR
1074 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1075 {
1076 /* If in an OpenMP context, note var uses. */
1077 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1078 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1079 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1080 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1081 *walk_subtrees = 0;
1082 }
1083 else if (TREE_CODE (stmt) == RETURN_EXPR
1084 && TREE_OPERAND (stmt, 0)
1085 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1086 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1087 *walk_subtrees = 0;
1088 else if (TREE_CODE (stmt) == OMP_CLAUSE)
1089 switch (OMP_CLAUSE_CODE (stmt))
1090 {
1091 case OMP_CLAUSE_LASTPRIVATE:
1092 /* Don't dereference an invisiref in OpenMP clauses. */
1093 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1094 {
1095 *walk_subtrees = 0;
1096 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1097 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1098 cp_genericize_r, data, NULL);
1099 }
1100 break;
1101 case OMP_CLAUSE_PRIVATE:
1102 /* Don't dereference an invisiref in OpenMP clauses. */
1103 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1104 *walk_subtrees = 0;
1105 else if (wtd->omp_ctx != NULL)
1106 {
1107 /* Private clause doesn't cause any references to the
1108 var in outer contexts, avoid calling
1109 omp_cxx_notice_variable for it. */
1110 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1111 wtd->omp_ctx = NULL;
1112 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1113 data, NULL);
1114 wtd->omp_ctx = old;
1115 *walk_subtrees = 0;
1116 }
1117 break;
1118 case OMP_CLAUSE_SHARED:
1119 case OMP_CLAUSE_FIRSTPRIVATE:
1120 case OMP_CLAUSE_COPYIN:
1121 case OMP_CLAUSE_COPYPRIVATE:
1122 /* Don't dereference an invisiref in OpenMP clauses. */
1123 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1124 *walk_subtrees = 0;
1125 break;
1126 case OMP_CLAUSE_REDUCTION:
1127 /* Don't dereference an invisiref in reduction clause's
1128 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1129 still needs to be genericized. */
1130 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1131 {
1132 *walk_subtrees = 0;
1133 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1134 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1135 cp_genericize_r, data, NULL);
1136 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1137 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1138 cp_genericize_r, data, NULL);
1139 }
1140 break;
1141 default:
1142 break;
1143 }
1144 else if (IS_TYPE_OR_DECL_P (stmt))
1145 *walk_subtrees = 0;
1146
1147 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1148 to lower this construct before scanning it, so we need to lower these
1149 before doing anything else. */
1150 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1151 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1152 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1153 : TRY_FINALLY_EXPR,
1154 void_type_node,
1155 CLEANUP_BODY (stmt),
1156 CLEANUP_EXPR (stmt));
1157
1158 else if (TREE_CODE (stmt) == IF_STMT)
1159 {
1160 genericize_if_stmt (stmt_p);
1161 /* *stmt_p has changed, tail recurse to handle it again. */
1162 return cp_genericize_r (stmt_p, walk_subtrees, data);
1163 }
1164
1165 /* COND_EXPR might have incompatible types in branches if one or both
1166 arms are bitfields. Fix it up now. */
1167 else if (TREE_CODE (stmt) == COND_EXPR)
1168 {
1169 tree type_left
1170 = (TREE_OPERAND (stmt, 1)
1171 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1172 : NULL_TREE);
1173 tree type_right
1174 = (TREE_OPERAND (stmt, 2)
1175 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1176 : NULL_TREE);
1177 if (type_left
1178 && !useless_type_conversion_p (TREE_TYPE (stmt),
1179 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1180 {
1181 TREE_OPERAND (stmt, 1)
1182 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1183 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1184 type_left));
1185 }
1186 if (type_right
1187 && !useless_type_conversion_p (TREE_TYPE (stmt),
1188 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1189 {
1190 TREE_OPERAND (stmt, 2)
1191 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1192 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1193 type_right));
1194 }
1195 }
1196
1197 else if (TREE_CODE (stmt) == BIND_EXPR)
1198 {
1199 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1200 {
1201 tree decl;
1202 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1203 if (VAR_P (decl)
1204 && !DECL_EXTERNAL (decl)
1205 && omp_var_to_track (decl))
1206 {
1207 splay_tree_node n
1208 = splay_tree_lookup (wtd->omp_ctx->variables,
1209 (splay_tree_key) decl);
1210 if (n == NULL)
1211 splay_tree_insert (wtd->omp_ctx->variables,
1212 (splay_tree_key) decl,
1213 TREE_STATIC (decl)
1214 ? OMP_CLAUSE_DEFAULT_SHARED
1215 : OMP_CLAUSE_DEFAULT_PRIVATE);
1216 }
1217 }
1218 if (flag_sanitize
1219 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1220 {
1221 /* The point here is to not sanitize static initializers. */
1222 bool no_sanitize_p = wtd->no_sanitize_p;
1223 wtd->no_sanitize_p = true;
1224 for (tree decl = BIND_EXPR_VARS (stmt);
1225 decl;
1226 decl = DECL_CHAIN (decl))
1227 if (VAR_P (decl)
1228 && TREE_STATIC (decl)
1229 && DECL_INITIAL (decl))
1230 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1231 wtd->no_sanitize_p = no_sanitize_p;
1232 }
1233 wtd->bind_expr_stack.safe_push (stmt);
1234 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1235 cp_genericize_r, data, NULL);
1236 wtd->bind_expr_stack.pop ();
1237 }
1238
1239 else if (TREE_CODE (stmt) == USING_STMT)
1240 {
1241 tree block = NULL_TREE;
1242
1243 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1244 BLOCK, and append an IMPORTED_DECL to its
1245 BLOCK_VARS chained list. */
1246 if (wtd->bind_expr_stack.exists ())
1247 {
1248 int i;
1249 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1250 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1251 break;
1252 }
1253 if (block)
1254 {
1255 tree using_directive;
1256 gcc_assert (TREE_OPERAND (stmt, 0));
1257
1258 using_directive = make_node (IMPORTED_DECL);
1259 TREE_TYPE (using_directive) = void_type_node;
1260
1261 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1262 = TREE_OPERAND (stmt, 0);
1263 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1264 BLOCK_VARS (block) = using_directive;
1265 }
1266 /* The USING_STMT won't appear in GENERIC. */
1267 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1268 *walk_subtrees = 0;
1269 }
1270
1271 else if (TREE_CODE (stmt) == DECL_EXPR
1272 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1273 {
1274 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1275 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1276 *walk_subtrees = 0;
1277 }
1278 else if (TREE_CODE (stmt) == DECL_EXPR)
1279 {
1280 tree d = DECL_EXPR_DECL (stmt);
1281 if (TREE_CODE (d) == VAR_DECL)
1282 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1283 }
1284 else if (TREE_CODE (stmt) == OMP_PARALLEL
1285 || TREE_CODE (stmt) == OMP_TASK
1286 || TREE_CODE (stmt) == OMP_TASKLOOP)
1287 {
1288 struct cp_genericize_omp_taskreg omp_ctx;
1289 tree c, decl;
1290 splay_tree_node n;
1291
1292 *walk_subtrees = 0;
1293 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1294 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1295 omp_ctx.default_shared = omp_ctx.is_parallel;
1296 omp_ctx.outer = wtd->omp_ctx;
1297 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1298 wtd->omp_ctx = &omp_ctx;
1299 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1300 switch (OMP_CLAUSE_CODE (c))
1301 {
1302 case OMP_CLAUSE_SHARED:
1303 case OMP_CLAUSE_PRIVATE:
1304 case OMP_CLAUSE_FIRSTPRIVATE:
1305 case OMP_CLAUSE_LASTPRIVATE:
1306 decl = OMP_CLAUSE_DECL (c);
1307 if (decl == error_mark_node || !omp_var_to_track (decl))
1308 break;
1309 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1310 if (n != NULL)
1311 break;
1312 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1313 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1314 ? OMP_CLAUSE_DEFAULT_SHARED
1315 : OMP_CLAUSE_DEFAULT_PRIVATE);
1316 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1317 && omp_ctx.outer)
1318 omp_cxx_notice_variable (omp_ctx.outer, decl);
1319 break;
1320 case OMP_CLAUSE_DEFAULT:
1321 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1322 omp_ctx.default_shared = true;
1323 default:
1324 break;
1325 }
1326 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1327 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1328 else
1329 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1330 wtd->omp_ctx = omp_ctx.outer;
1331 splay_tree_delete (omp_ctx.variables);
1332 }
1333 else if (TREE_CODE (stmt) == TRY_BLOCK)
1334 {
1335 *walk_subtrees = 0;
1336 tree try_block = wtd->try_block;
1337 wtd->try_block = stmt;
1338 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1339 wtd->try_block = try_block;
1340 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1341 }
1342 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1343 {
1344 /* MUST_NOT_THROW_COND might be something else with TM. */
1345 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1346 {
1347 *walk_subtrees = 0;
1348 tree try_block = wtd->try_block;
1349 wtd->try_block = stmt;
1350 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1351 wtd->try_block = try_block;
1352 }
1353 }
1354 else if (TREE_CODE (stmt) == THROW_EXPR)
1355 {
1356 location_t loc = location_of (stmt);
1357 if (TREE_NO_WARNING (stmt))
1358 /* Never mind. */;
1359 else if (wtd->try_block)
1360 {
1361 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1362 && warning_at (loc, OPT_Wterminate,
1363 "throw will always call terminate()")
1364 && cxx_dialect >= cxx11
1365 && DECL_DESTRUCTOR_P (current_function_decl))
1366 inform (loc, "in C++11 destructors default to noexcept");
1367 }
1368 else
1369 {
1370 if (warn_cxx11_compat && cxx_dialect < cxx11
1371 && DECL_DESTRUCTOR_P (current_function_decl)
1372 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1373 == NULL_TREE)
1374 && (get_defaulted_eh_spec (current_function_decl)
1375 == empty_except_spec))
1376 warning_at (loc, OPT_Wc__11_compat,
1377 "in C++11 this throw will terminate because "
1378 "destructors default to noexcept");
1379 }
1380 }
1381 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1382 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1383 else if (TREE_CODE (stmt) == FOR_STMT)
1384 genericize_for_stmt (stmt_p, walk_subtrees, data);
1385 else if (TREE_CODE (stmt) == WHILE_STMT)
1386 genericize_while_stmt (stmt_p, walk_subtrees, data);
1387 else if (TREE_CODE (stmt) == DO_STMT)
1388 genericize_do_stmt (stmt_p, walk_subtrees, data);
1389 else if (TREE_CODE (stmt) == SWITCH_STMT)
1390 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1391 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1392 genericize_continue_stmt (stmt_p);
1393 else if (TREE_CODE (stmt) == BREAK_STMT)
1394 genericize_break_stmt (stmt_p);
1395 else if (TREE_CODE (stmt) == OMP_FOR
1396 || TREE_CODE (stmt) == OMP_SIMD
1397 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1398 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1399 else if (TREE_CODE (stmt) == PTRMEM_CST)
1400 {
1401 /* By the time we get here we're handing off to the back end, so we don't
1402 need or want to preserve PTRMEM_CST anymore. */
1403 *stmt_p = cplus_expand_constant (stmt);
1404 *walk_subtrees = 0;
1405 }
1406 else if ((flag_sanitize
1407 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1408 && !wtd->no_sanitize_p)
1409 {
1410 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1411 && TREE_CODE (stmt) == NOP_EXPR
1412 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1413 ubsan_maybe_instrument_reference (stmt);
1414 else if (TREE_CODE (stmt) == CALL_EXPR)
1415 {
1416 tree fn = CALL_EXPR_FN (stmt);
1417 if (fn != NULL_TREE
1418 && !error_operand_p (fn)
1419 && POINTER_TYPE_P (TREE_TYPE (fn))
1420 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1421 {
1422 bool is_ctor
1423 = TREE_CODE (fn) == ADDR_EXPR
1424 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1425 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1426 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1427 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1428 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1429 cp_ubsan_maybe_instrument_member_call (stmt);
1430 }
1431 }
1432 }
1433
1434 p_set->add (*stmt_p);
1435
1436 return NULL;
1437 }
1438
1439 /* Lower C++ front end trees to GENERIC in T_P. */
1440
1441 static void
1442 cp_genericize_tree (tree* t_p)
1443 {
1444 struct cp_genericize_data wtd;
1445
1446 wtd.p_set = new hash_set<tree>;
1447 wtd.bind_expr_stack.create (0);
1448 wtd.omp_ctx = NULL;
1449 wtd.try_block = NULL_TREE;
1450 wtd.no_sanitize_p = false;
1451 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1452 delete wtd.p_set;
1453 wtd.bind_expr_stack.release ();
1454 if (flag_sanitize & SANITIZE_VPTR)
1455 cp_ubsan_instrument_member_accesses (t_p);
1456 }
1457
1458 /* If a function that should end with a return in non-void
1459 function doesn't obviously end with return, add ubsan
1460 instrumentation code to verify it at runtime. */
1461
1462 static void
1463 cp_ubsan_maybe_instrument_return (tree fndecl)
1464 {
1465 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1466 || DECL_CONSTRUCTOR_P (fndecl)
1467 || DECL_DESTRUCTOR_P (fndecl)
1468 || !targetm.warn_func_return (fndecl))
1469 return;
1470
1471 tree t = DECL_SAVED_TREE (fndecl);
1472 while (t)
1473 {
1474 switch (TREE_CODE (t))
1475 {
1476 case BIND_EXPR:
1477 t = BIND_EXPR_BODY (t);
1478 continue;
1479 case TRY_FINALLY_EXPR:
1480 t = TREE_OPERAND (t, 0);
1481 continue;
1482 case STATEMENT_LIST:
1483 {
1484 tree_stmt_iterator i = tsi_last (t);
1485 if (!tsi_end_p (i))
1486 {
1487 t = tsi_stmt (i);
1488 continue;
1489 }
1490 }
1491 break;
1492 case RETURN_EXPR:
1493 return;
1494 default:
1495 break;
1496 }
1497 break;
1498 }
1499 if (t == NULL_TREE)
1500 return;
1501 t = DECL_SAVED_TREE (fndecl);
1502 if (TREE_CODE (t) == BIND_EXPR
1503 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1504 {
1505 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1506 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1507 tsi_link_after (&i, t, TSI_NEW_STMT);
1508 }
1509 }
1510
1511 void
1512 cp_genericize (tree fndecl)
1513 {
1514 tree t;
1515
1516 /* Fix up the types of parms passed by invisible reference. */
1517 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1518 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1519 {
1520 /* If a function's arguments are copied to create a thunk,
1521 then DECL_BY_REFERENCE will be set -- but the type of the
1522 argument will be a pointer type, so we will never get
1523 here. */
1524 gcc_assert (!DECL_BY_REFERENCE (t));
1525 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1526 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1527 DECL_BY_REFERENCE (t) = 1;
1528 TREE_ADDRESSABLE (t) = 0;
1529 relayout_decl (t);
1530 }
1531
1532 /* Do the same for the return value. */
1533 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1534 {
1535 t = DECL_RESULT (fndecl);
1536 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1537 DECL_BY_REFERENCE (t) = 1;
1538 TREE_ADDRESSABLE (t) = 0;
1539 relayout_decl (t);
1540 if (DECL_NAME (t))
1541 {
1542 /* Adjust DECL_VALUE_EXPR of the original var. */
1543 tree outer = outer_curly_brace_block (current_function_decl);
1544 tree var;
1545
1546 if (outer)
1547 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1548 if (DECL_NAME (t) == DECL_NAME (var)
1549 && DECL_HAS_VALUE_EXPR_P (var)
1550 && DECL_VALUE_EXPR (var) == t)
1551 {
1552 tree val = convert_from_reference (t);
1553 SET_DECL_VALUE_EXPR (var, val);
1554 break;
1555 }
1556 }
1557 }
1558
1559 /* If we're a clone, the body is already GIMPLE. */
1560 if (DECL_CLONED_FUNCTION_P (fndecl))
1561 return;
1562
1563 /* Expand all the array notations here. */
1564 if (flag_cilkplus
1565 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1566 DECL_SAVED_TREE (fndecl) =
1567 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1568
1569 /* We do want to see every occurrence of the parms, so we can't just use
1570 walk_tree's hash functionality. */
1571 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1572
1573 if (flag_sanitize & SANITIZE_RETURN
1574 && do_ubsan_in_current_function ())
1575 cp_ubsan_maybe_instrument_return (fndecl);
1576
1577 /* Do everything else. */
1578 c_genericize (fndecl);
1579
1580 gcc_assert (bc_label[bc_break] == NULL);
1581 gcc_assert (bc_label[bc_continue] == NULL);
1582 }
1583 \f
1584 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1585 NULL if there is in fact nothing to do. ARG2 may be null if FN
1586 actually only takes one argument. */
1587
1588 static tree
1589 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1590 {
1591 tree defparm, parm, t;
1592 int i = 0;
1593 int nargs;
1594 tree *argarray;
1595
1596 if (fn == NULL)
1597 return NULL;
1598
1599 nargs = list_length (DECL_ARGUMENTS (fn));
1600 argarray = XALLOCAVEC (tree, nargs);
1601
1602 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1603 if (arg2)
1604 defparm = TREE_CHAIN (defparm);
1605
1606 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1607 {
1608 tree inner_type = TREE_TYPE (arg1);
1609 tree start1, end1, p1;
1610 tree start2 = NULL, p2 = NULL;
1611 tree ret = NULL, lab;
1612
1613 start1 = arg1;
1614 start2 = arg2;
1615 do
1616 {
1617 inner_type = TREE_TYPE (inner_type);
1618 start1 = build4 (ARRAY_REF, inner_type, start1,
1619 size_zero_node, NULL, NULL);
1620 if (arg2)
1621 start2 = build4 (ARRAY_REF, inner_type, start2,
1622 size_zero_node, NULL, NULL);
1623 }
1624 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1625 start1 = build_fold_addr_expr_loc (input_location, start1);
1626 if (arg2)
1627 start2 = build_fold_addr_expr_loc (input_location, start2);
1628
1629 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1630 end1 = fold_build_pointer_plus (start1, end1);
1631
1632 p1 = create_tmp_var (TREE_TYPE (start1));
1633 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1634 append_to_statement_list (t, &ret);
1635
1636 if (arg2)
1637 {
1638 p2 = create_tmp_var (TREE_TYPE (start2));
1639 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1640 append_to_statement_list (t, &ret);
1641 }
1642
1643 lab = create_artificial_label (input_location);
1644 t = build1 (LABEL_EXPR, void_type_node, lab);
1645 append_to_statement_list (t, &ret);
1646
1647 argarray[i++] = p1;
1648 if (arg2)
1649 argarray[i++] = p2;
1650 /* Handle default arguments. */
1651 for (parm = defparm; parm && parm != void_list_node;
1652 parm = TREE_CHAIN (parm), i++)
1653 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1654 TREE_PURPOSE (parm), fn, i,
1655 tf_warning_or_error);
1656 t = build_call_a (fn, i, argarray);
1657 t = fold_convert (void_type_node, t);
1658 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1659 append_to_statement_list (t, &ret);
1660
1661 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1662 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1663 append_to_statement_list (t, &ret);
1664
1665 if (arg2)
1666 {
1667 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1668 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1669 append_to_statement_list (t, &ret);
1670 }
1671
1672 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1673 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1674 append_to_statement_list (t, &ret);
1675
1676 return ret;
1677 }
1678 else
1679 {
1680 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1681 if (arg2)
1682 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1683 /* Handle default arguments. */
1684 for (parm = defparm; parm && parm != void_list_node;
1685 parm = TREE_CHAIN (parm), i++)
1686 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1687 TREE_PURPOSE (parm),
1688 fn, i, tf_warning_or_error);
1689 t = build_call_a (fn, i, argarray);
1690 t = fold_convert (void_type_node, t);
1691 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1692 }
1693 }
1694
1695 /* Return code to initialize DECL with its default constructor, or
1696 NULL if there's nothing to do. */
1697
1698 tree
1699 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1700 {
1701 tree info = CP_OMP_CLAUSE_INFO (clause);
1702 tree ret = NULL;
1703
1704 if (info)
1705 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1706
1707 return ret;
1708 }
1709
1710 /* Return code to initialize DST with a copy constructor from SRC. */
1711
1712 tree
1713 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1714 {
1715 tree info = CP_OMP_CLAUSE_INFO (clause);
1716 tree ret = NULL;
1717
1718 if (info)
1719 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1720 if (ret == NULL)
1721 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1722
1723 return ret;
1724 }
1725
1726 /* Similarly, except use an assignment operator instead. */
1727
1728 tree
1729 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1730 {
1731 tree info = CP_OMP_CLAUSE_INFO (clause);
1732 tree ret = NULL;
1733
1734 if (info)
1735 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1736 if (ret == NULL)
1737 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1738
1739 return ret;
1740 }
1741
1742 /* Return code to destroy DECL. */
1743
1744 tree
1745 cxx_omp_clause_dtor (tree clause, tree decl)
1746 {
1747 tree info = CP_OMP_CLAUSE_INFO (clause);
1748 tree ret = NULL;
1749
1750 if (info)
1751 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1752
1753 return ret;
1754 }
1755
1756 /* True if OpenMP should privatize what this DECL points to rather
1757 than the DECL itself. */
1758
1759 bool
1760 cxx_omp_privatize_by_reference (const_tree decl)
1761 {
1762 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1763 || is_invisiref_parm (decl));
1764 }
1765
1766 /* Return true if DECL is const qualified var having no mutable member. */
1767 bool
1768 cxx_omp_const_qual_no_mutable (tree decl)
1769 {
1770 tree type = TREE_TYPE (decl);
1771 if (TREE_CODE (type) == REFERENCE_TYPE)
1772 {
1773 if (!is_invisiref_parm (decl))
1774 return false;
1775 type = TREE_TYPE (type);
1776
1777 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1778 {
1779 /* NVR doesn't preserve const qualification of the
1780 variable's type. */
1781 tree outer = outer_curly_brace_block (current_function_decl);
1782 tree var;
1783
1784 if (outer)
1785 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1786 if (DECL_NAME (decl) == DECL_NAME (var)
1787 && (TYPE_MAIN_VARIANT (type)
1788 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1789 {
1790 if (TYPE_READONLY (TREE_TYPE (var)))
1791 type = TREE_TYPE (var);
1792 break;
1793 }
1794 }
1795 }
1796
1797 if (type == error_mark_node)
1798 return false;
1799
1800 /* Variables with const-qualified type having no mutable member
1801 are predetermined shared. */
1802 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1803 return true;
1804
1805 return false;
1806 }
1807
1808 /* True if OpenMP sharing attribute of DECL is predetermined. */
1809
1810 enum omp_clause_default_kind
1811 cxx_omp_predetermined_sharing (tree decl)
1812 {
1813 /* Static data members are predetermined shared. */
1814 if (TREE_STATIC (decl))
1815 {
1816 tree ctx = CP_DECL_CONTEXT (decl);
1817 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1818 return OMP_CLAUSE_DEFAULT_SHARED;
1819 }
1820
1821 /* Const qualified vars having no mutable member are predetermined
1822 shared. */
1823 if (cxx_omp_const_qual_no_mutable (decl))
1824 return OMP_CLAUSE_DEFAULT_SHARED;
1825
1826 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1827 }
1828
1829 /* Finalize an implicitly determined clause. */
1830
1831 void
1832 cxx_omp_finish_clause (tree c, gimple_seq *)
1833 {
1834 tree decl, inner_type;
1835 bool make_shared = false;
1836
1837 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1838 return;
1839
1840 decl = OMP_CLAUSE_DECL (c);
1841 decl = require_complete_type (decl);
1842 inner_type = TREE_TYPE (decl);
1843 if (decl == error_mark_node)
1844 make_shared = true;
1845 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1846 inner_type = TREE_TYPE (inner_type);
1847
1848 /* We're interested in the base element, not arrays. */
1849 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1850 inner_type = TREE_TYPE (inner_type);
1851
1852 /* Check for special function availability by building a call to one.
1853 Save the results, because later we won't be in the right context
1854 for making these queries. */
1855 if (!make_shared
1856 && CLASS_TYPE_P (inner_type)
1857 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1858 make_shared = true;
1859
1860 if (make_shared)
1861 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1862 }
1863
1864 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1865 disregarded in OpenMP construct, because it is going to be
1866 remapped during OpenMP lowering. SHARED is true if DECL
1867 is going to be shared, false if it is going to be privatized. */
1868
1869 bool
1870 cxx_omp_disregard_value_expr (tree decl, bool shared)
1871 {
1872 return !shared
1873 && VAR_P (decl)
1874 && DECL_HAS_VALUE_EXPR_P (decl)
1875 && DECL_ARTIFICIAL (decl)
1876 && DECL_LANG_SPECIFIC (decl)
1877 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1878 }
1879
1880 /* Perform folding on expression X. */
1881
1882 tree
1883 cp_fully_fold (tree x)
1884 {
1885 return cp_fold (x);
1886 }
1887
1888 /* Fold expression X which is used as an rvalue if RVAL is true. */
1889
1890 static tree
1891 cp_fold_maybe_rvalue (tree x, bool rval)
1892 {
1893 while (true)
1894 {
1895 x = cp_fold (x);
1896 if (rval && DECL_P (x))
1897 {
1898 tree v = decl_constant_value (x);
1899 if (v != x && v != error_mark_node)
1900 {
1901 x = v;
1902 continue;
1903 }
1904 }
1905 break;
1906 }
1907 return x;
1908 }
1909
1910 /* Fold expression X which is used as an rvalue. */
1911
1912 static tree
1913 cp_fold_rvalue (tree x)
1914 {
1915 return cp_fold_maybe_rvalue (x, true);
1916 }
1917
1918 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
1919 and certain changes are made to the folding done. Or should be (FIXME). We
1920 never touch maybe_const, as it is only used for the C front-end
1921 C_MAYBE_CONST_EXPR. */
1922
1923 tree
1924 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
1925 {
1926 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
1927 INTEGER_CST. */
1928 return cp_fold_rvalue (x);
1929 }
1930
1931 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
1932
1933 /* Dispose of the whole FOLD_CACHE. */
1934
1935 void
1936 clear_fold_cache (void)
1937 {
1938 if (fold_cache != NULL)
1939 fold_cache->empty ();
1940 }
1941
1942 /* This function tries to fold an expression X.
1943 To avoid combinatorial explosion, folding results are kept in fold_cache.
1944 If we are processing a template or X is invalid, we don't fold at all.
1945 For performance reasons we don't cache expressions representing a
1946 declaration or constant.
1947 Function returns X or its folded variant. */
1948
1949 static tree
1950 cp_fold (tree x)
1951 {
1952 tree op0, op1, op2, op3;
1953 tree org_x = x, r = NULL_TREE;
1954 enum tree_code code;
1955 location_t loc;
1956 bool rval_ops = true;
1957
1958 if (!x || x == error_mark_node)
1959 return x;
1960
1961 if (processing_template_decl
1962 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
1963 return x;
1964
1965 /* Don't bother to cache DECLs or constants. */
1966 if (DECL_P (x) || CONSTANT_CLASS_P (x))
1967 return x;
1968
1969 if (fold_cache == NULL)
1970 fold_cache = hash_map<tree, tree>::create_ggc (101);
1971
1972 if (tree *cached = fold_cache->get (x))
1973 return *cached;
1974
1975 code = TREE_CODE (x);
1976 switch (code)
1977 {
1978 case SIZEOF_EXPR:
1979 x = fold_sizeof_expr (x);
1980 break;
1981
1982 case VIEW_CONVERT_EXPR:
1983 rval_ops = false;
1984 case CONVERT_EXPR:
1985 case NOP_EXPR:
1986 case NON_LVALUE_EXPR:
1987
1988 if (VOID_TYPE_P (TREE_TYPE (x)))
1989 return x;
1990
1991 loc = EXPR_LOCATION (x);
1992 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
1993
1994 if (code == CONVERT_EXPR
1995 && SCALAR_TYPE_P (TREE_TYPE (x))
1996 && op0 != void_node)
1997 /* During parsing we used convert_to_*_nofold; re-convert now using the
1998 folding variants, since fold() doesn't do those transformations. */
1999 x = fold (convert (TREE_TYPE (x), op0));
2000 else if (op0 != TREE_OPERAND (x, 0))
2001 {
2002 if (op0 == error_mark_node)
2003 x = error_mark_node;
2004 else
2005 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2006 }
2007 else
2008 x = fold (x);
2009
2010 /* Conversion of an out-of-range value has implementation-defined
2011 behavior; the language considers it different from arithmetic
2012 overflow, which is undefined. */
2013 if (TREE_CODE (op0) == INTEGER_CST
2014 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2015 TREE_OVERFLOW (x) = false;
2016
2017 break;
2018
2019 case INDIRECT_REF:
2020 /* We don't need the decltype(auto) obfuscation anymore. */
2021 if (REF_PARENTHESIZED_P (x))
2022 {
2023 tree p = maybe_undo_parenthesized_ref (x);
2024 return cp_fold (p);
2025 }
2026 goto unary;
2027
2028 case ADDR_EXPR:
2029 case REALPART_EXPR:
2030 case IMAGPART_EXPR:
2031 rval_ops = false;
2032 case CONJ_EXPR:
2033 case FIX_TRUNC_EXPR:
2034 case FLOAT_EXPR:
2035 case NEGATE_EXPR:
2036 case ABS_EXPR:
2037 case BIT_NOT_EXPR:
2038 case TRUTH_NOT_EXPR:
2039 case FIXED_CONVERT_EXPR:
2040 unary:
2041
2042 loc = EXPR_LOCATION (x);
2043 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2044
2045 if (op0 != TREE_OPERAND (x, 0))
2046 {
2047 if (op0 == error_mark_node)
2048 x = error_mark_node;
2049 else
2050 {
2051 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2052 if (code == INDIRECT_REF
2053 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2054 {
2055 TREE_READONLY (x) = TREE_READONLY (org_x);
2056 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2057 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2058 }
2059 }
2060 }
2061 else
2062 x = fold (x);
2063
2064 gcc_assert (TREE_CODE (x) != COND_EXPR
2065 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2066 break;
2067
2068 case UNARY_PLUS_EXPR:
2069 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2070 if (op0 == error_mark_node)
2071 x = error_mark_node;
2072 else
2073 x = fold_convert (TREE_TYPE (x), op0);
2074 break;
2075
2076 case POSTDECREMENT_EXPR:
2077 case POSTINCREMENT_EXPR:
2078 case INIT_EXPR:
2079 case PREDECREMENT_EXPR:
2080 case PREINCREMENT_EXPR:
2081 case COMPOUND_EXPR:
2082 case MODIFY_EXPR:
2083 rval_ops = false;
2084 case POINTER_PLUS_EXPR:
2085 case PLUS_EXPR:
2086 case MINUS_EXPR:
2087 case MULT_EXPR:
2088 case TRUNC_DIV_EXPR:
2089 case CEIL_DIV_EXPR:
2090 case FLOOR_DIV_EXPR:
2091 case ROUND_DIV_EXPR:
2092 case TRUNC_MOD_EXPR:
2093 case CEIL_MOD_EXPR:
2094 case ROUND_MOD_EXPR:
2095 case RDIV_EXPR:
2096 case EXACT_DIV_EXPR:
2097 case MIN_EXPR:
2098 case MAX_EXPR:
2099 case LSHIFT_EXPR:
2100 case RSHIFT_EXPR:
2101 case LROTATE_EXPR:
2102 case RROTATE_EXPR:
2103 case BIT_AND_EXPR:
2104 case BIT_IOR_EXPR:
2105 case BIT_XOR_EXPR:
2106 case TRUTH_AND_EXPR:
2107 case TRUTH_ANDIF_EXPR:
2108 case TRUTH_OR_EXPR:
2109 case TRUTH_ORIF_EXPR:
2110 case TRUTH_XOR_EXPR:
2111 case LT_EXPR: case LE_EXPR:
2112 case GT_EXPR: case GE_EXPR:
2113 case EQ_EXPR: case NE_EXPR:
2114 case UNORDERED_EXPR: case ORDERED_EXPR:
2115 case UNLT_EXPR: case UNLE_EXPR:
2116 case UNGT_EXPR: case UNGE_EXPR:
2117 case UNEQ_EXPR: case LTGT_EXPR:
2118 case RANGE_EXPR: case COMPLEX_EXPR:
2119
2120 loc = EXPR_LOCATION (x);
2121 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2122 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2123
2124 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2125 {
2126 if (op0 == error_mark_node || op1 == error_mark_node)
2127 x = error_mark_node;
2128 else
2129 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2130 }
2131 else
2132 x = fold (x);
2133
2134 if (TREE_NO_WARNING (org_x)
2135 && warn_nonnull_compare
2136 && COMPARISON_CLASS_P (org_x))
2137 {
2138 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2139 ;
2140 else if (COMPARISON_CLASS_P (x))
2141 TREE_NO_WARNING (x) = 1;
2142 /* Otherwise give up on optimizing these, let GIMPLE folders
2143 optimize those later on. */
2144 else if (op0 != TREE_OPERAND (org_x, 0)
2145 || op1 != TREE_OPERAND (org_x, 1))
2146 {
2147 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2148 TREE_NO_WARNING (x) = 1;
2149 }
2150 else
2151 x = org_x;
2152 }
2153 break;
2154
2155 case VEC_COND_EXPR:
2156 case COND_EXPR:
2157
2158 /* Don't bother folding a void condition, since it can't produce a
2159 constant value. Also, some statement-level uses of COND_EXPR leave
2160 one of the branches NULL, so folding would crash. */
2161 if (VOID_TYPE_P (TREE_TYPE (x)))
2162 return x;
2163
2164 loc = EXPR_LOCATION (x);
2165 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2166 op1 = cp_fold (TREE_OPERAND (x, 1));
2167 op2 = cp_fold (TREE_OPERAND (x, 2));
2168
2169 if (op0 != TREE_OPERAND (x, 0)
2170 || op1 != TREE_OPERAND (x, 1)
2171 || op2 != TREE_OPERAND (x, 2))
2172 {
2173 if (op0 == error_mark_node
2174 || op1 == error_mark_node
2175 || op2 == error_mark_node)
2176 x = error_mark_node;
2177 else
2178 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2179 }
2180 else
2181 x = fold (x);
2182
2183 /* A COND_EXPR might have incompatible types in branches if one or both
2184 arms are bitfields. If folding exposed such a branch, fix it up. */
2185 if (TREE_CODE (x) != code)
2186 if (tree type = is_bitfield_expr_with_lowered_type (x))
2187 x = fold_convert (type, x);
2188
2189 break;
2190
2191 case CALL_EXPR:
2192 {
2193 int i, m, sv = optimize, nw = sv, changed = 0;
2194 tree callee = get_callee_fndecl (x);
2195
2196 /* Some built-in function calls will be evaluated at compile-time in
2197 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2198 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2199 if (callee && DECL_BUILT_IN (callee) && !optimize
2200 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2201 && current_function_decl
2202 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2203 nw = 1;
2204
2205 x = copy_node (x);
2206
2207 m = call_expr_nargs (x);
2208 for (i = 0; i < m; i++)
2209 {
2210 r = cp_fold (CALL_EXPR_ARG (x, i));
2211 if (r != CALL_EXPR_ARG (x, i))
2212 {
2213 if (r == error_mark_node)
2214 {
2215 x = error_mark_node;
2216 break;
2217 }
2218 changed = 1;
2219 }
2220 CALL_EXPR_ARG (x, i) = r;
2221 }
2222 if (x == error_mark_node)
2223 break;
2224
2225 optimize = nw;
2226 r = fold (x);
2227 optimize = sv;
2228
2229 if (TREE_CODE (r) != CALL_EXPR)
2230 {
2231 x = cp_fold (r);
2232 break;
2233 }
2234
2235 optimize = nw;
2236
2237 /* Invoke maybe_constant_value for functions declared
2238 constexpr and not called with AGGR_INIT_EXPRs.
2239 TODO:
2240 Do constexpr expansion of expressions where the call itself is not
2241 constant, but the call followed by an INDIRECT_REF is. */
2242 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2243 && !flag_no_inline)
2244 r = maybe_constant_value (x);
2245 optimize = sv;
2246
2247 if (TREE_CODE (r) != CALL_EXPR)
2248 {
2249 x = r;
2250 break;
2251 }
2252
2253 if (!changed)
2254 x = org_x;
2255 break;
2256 }
2257
2258 case CONSTRUCTOR:
2259 {
2260 unsigned i;
2261 constructor_elt *p;
2262 bool changed = false;
2263 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2264 vec<constructor_elt, va_gc> *nelts = NULL;
2265 vec_safe_reserve (nelts, vec_safe_length (elts));
2266 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2267 {
2268 tree op = cp_fold (p->value);
2269 constructor_elt e = { p->index, op };
2270 nelts->quick_push (e);
2271 if (op != p->value)
2272 {
2273 if (op == error_mark_node)
2274 {
2275 x = error_mark_node;
2276 changed = false;
2277 break;
2278 }
2279 changed = true;
2280 }
2281 }
2282 if (changed)
2283 x = build_constructor (TREE_TYPE (x), nelts);
2284 else
2285 vec_free (nelts);
2286 break;
2287 }
2288 case TREE_VEC:
2289 {
2290 bool changed = false;
2291 vec<tree, va_gc> *vec = make_tree_vector ();
2292 int i, n = TREE_VEC_LENGTH (x);
2293 vec_safe_reserve (vec, n);
2294
2295 for (i = 0; i < n; i++)
2296 {
2297 tree op = cp_fold (TREE_VEC_ELT (x, i));
2298 vec->quick_push (op);
2299 if (op != TREE_VEC_ELT (x, i))
2300 changed = true;
2301 }
2302
2303 if (changed)
2304 {
2305 r = copy_node (x);
2306 for (i = 0; i < n; i++)
2307 TREE_VEC_ELT (r, i) = (*vec)[i];
2308 x = r;
2309 }
2310
2311 release_tree_vector (vec);
2312 }
2313
2314 break;
2315
2316 case ARRAY_REF:
2317 case ARRAY_RANGE_REF:
2318
2319 loc = EXPR_LOCATION (x);
2320 op0 = cp_fold (TREE_OPERAND (x, 0));
2321 op1 = cp_fold (TREE_OPERAND (x, 1));
2322 op2 = cp_fold (TREE_OPERAND (x, 2));
2323 op3 = cp_fold (TREE_OPERAND (x, 3));
2324
2325 if (op0 != TREE_OPERAND (x, 0)
2326 || op1 != TREE_OPERAND (x, 1)
2327 || op2 != TREE_OPERAND (x, 2)
2328 || op3 != TREE_OPERAND (x, 3))
2329 {
2330 if (op0 == error_mark_node
2331 || op1 == error_mark_node
2332 || op2 == error_mark_node
2333 || op3 == error_mark_node)
2334 x = error_mark_node;
2335 else
2336 {
2337 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2338 TREE_READONLY (x) = TREE_READONLY (org_x);
2339 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2340 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2341 }
2342 }
2343
2344 x = fold (x);
2345 break;
2346
2347 default:
2348 return org_x;
2349 }
2350
2351 fold_cache->put (org_x, x);
2352 /* Prevent that we try to fold an already folded result again. */
2353 if (x != org_x)
2354 fold_cache->put (x, x);
2355
2356 return x;
2357 }
2358
2359 #include "gt-cp-cp-gimplify.h"