re PR c++/69095 (internal compiler error: in dependent_type_p, at cp/pt.c:19399)
[gcc.git] / gcc / cp / cp-gimplify.c
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "cilk.h"
35 #include "cp-cilkplus.h"
36
37 /* Forward declarations. */
38
39 static tree cp_genericize_r (tree *, int *, void *);
40 static tree cp_fold_r (tree *, int *, void *);
41 static void cp_genericize_tree (tree*);
42 static tree cp_fold (tree);
43
44 /* Local declarations. */
45
46 enum bc_t { bc_break = 0, bc_continue = 1 };
47
48 /* Stack of labels which are targets for "break" or "continue",
49 linked through TREE_CHAIN. */
50 static tree bc_label[2];
51
52 /* Begin a scope which can be exited by a break or continue statement. BC
53 indicates which.
54
55 Just creates a label with location LOCATION and pushes it into the current
56 context. */
57
58 static tree
59 begin_bc_block (enum bc_t bc, location_t location)
60 {
61 tree label = create_artificial_label (location);
62 DECL_CHAIN (label) = bc_label[bc];
63 bc_label[bc] = label;
64 if (bc == bc_break)
65 LABEL_DECL_BREAK (label) = true;
66 else
67 LABEL_DECL_CONTINUE (label) = true;
68 return label;
69 }
70
71 /* Finish a scope which can be exited by a break or continue statement.
72 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
73 an expression for the contents of the scope.
74
75 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
76 BLOCK. Otherwise, just forget the label. */
77
78 static void
79 finish_bc_block (tree *block, enum bc_t bc, tree label)
80 {
81 gcc_assert (label == bc_label[bc]);
82
83 if (TREE_USED (label))
84 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
85 block);
86
87 bc_label[bc] = DECL_CHAIN (label);
88 DECL_CHAIN (label) = NULL_TREE;
89 }
90
91 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
92 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
93 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
94 of gimplify_cilk_spawn. */
95
96 static void
97 cilk_cp_gimplify_call_params_in_spawned_fn (tree *expr_p, gimple_seq *pre_p,
98 gimple_seq *post_p)
99 {
100 int ii = 0;
101
102 cilk_gimplify_call_params_in_spawned_fn (expr_p, pre_p);
103 if (TREE_CODE (*expr_p) == AGGR_INIT_EXPR)
104 for (ii = 0; ii < aggr_init_expr_nargs (*expr_p); ii++)
105 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p, ii), pre_p, post_p,
106 is_gimple_reg, fb_rvalue);
107 }
108
109
110 /* Get the LABEL_EXPR to represent a break or continue statement
111 in the current block scope. BC indicates which. */
112
113 static tree
114 get_bc_label (enum bc_t bc)
115 {
116 tree label = bc_label[bc];
117
118 /* Mark the label used for finish_bc_block. */
119 TREE_USED (label) = 1;
120 return label;
121 }
122
123 /* Genericize a TRY_BLOCK. */
124
125 static void
126 genericize_try_block (tree *stmt_p)
127 {
128 tree body = TRY_STMTS (*stmt_p);
129 tree cleanup = TRY_HANDLERS (*stmt_p);
130
131 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
132 }
133
134 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
135
136 static void
137 genericize_catch_block (tree *stmt_p)
138 {
139 tree type = HANDLER_TYPE (*stmt_p);
140 tree body = HANDLER_BODY (*stmt_p);
141
142 /* FIXME should the caught type go in TREE_TYPE? */
143 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
144 }
145
146 /* A terser interface for building a representation of an exception
147 specification. */
148
149 static tree
150 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
151 {
152 tree t;
153
154 /* FIXME should the allowed types go in TREE_TYPE? */
155 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
156 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
157
158 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
159 append_to_statement_list (body, &TREE_OPERAND (t, 0));
160
161 return t;
162 }
163
164 /* Genericize an EH_SPEC_BLOCK by converting it to a
165 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
166
167 static void
168 genericize_eh_spec_block (tree *stmt_p)
169 {
170 tree body = EH_SPEC_STMTS (*stmt_p);
171 tree allowed = EH_SPEC_RAISES (*stmt_p);
172 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
173
174 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
175 TREE_NO_WARNING (*stmt_p) = true;
176 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
177 }
178
179 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
180
181 static void
182 genericize_if_stmt (tree *stmt_p)
183 {
184 tree stmt, cond, then_, else_;
185 location_t locus = EXPR_LOCATION (*stmt_p);
186
187 stmt = *stmt_p;
188 cond = IF_COND (stmt);
189 then_ = THEN_CLAUSE (stmt);
190 else_ = ELSE_CLAUSE (stmt);
191
192 if (!then_)
193 then_ = build_empty_stmt (locus);
194 if (!else_)
195 else_ = build_empty_stmt (locus);
196
197 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
198 stmt = then_;
199 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
200 stmt = else_;
201 else
202 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
203 if (!EXPR_HAS_LOCATION (stmt))
204 protected_set_expr_location (stmt, locus);
205 *stmt_p = stmt;
206 }
207
208 /* Build a generic representation of one of the C loop forms. COND is the
209 loop condition or NULL_TREE. BODY is the (possibly compound) statement
210 controlled by the loop. INCR is the increment expression of a for-loop,
211 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
212 evaluated before the loop body as in while and for loops, or after the
213 loop body as in do-while loops. */
214
215 static void
216 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
217 tree incr, bool cond_is_first, int *walk_subtrees,
218 void *data)
219 {
220 tree blab, clab;
221 tree exit = NULL;
222 tree stmt_list = NULL;
223
224 blab = begin_bc_block (bc_break, start_locus);
225 clab = begin_bc_block (bc_continue, start_locus);
226
227 protected_set_expr_location (incr, start_locus);
228
229 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
230 cp_walk_tree (&body, cp_genericize_r, data, NULL);
231 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
232 *walk_subtrees = 0;
233
234 if (cond && TREE_CODE (cond) != INTEGER_CST)
235 {
236 /* If COND is constant, don't bother building an exit. If it's false,
237 we won't build a loop. If it's true, any exits are in the body. */
238 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
239 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
240 get_bc_label (bc_break));
241 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
242 build_empty_stmt (cloc), exit);
243 }
244
245 if (exit && cond_is_first)
246 append_to_statement_list (exit, &stmt_list);
247 append_to_statement_list (body, &stmt_list);
248 finish_bc_block (&stmt_list, bc_continue, clab);
249 append_to_statement_list (incr, &stmt_list);
250 if (exit && !cond_is_first)
251 append_to_statement_list (exit, &stmt_list);
252
253 if (!stmt_list)
254 stmt_list = build_empty_stmt (start_locus);
255
256 tree loop;
257 if (cond && integer_zerop (cond))
258 {
259 if (cond_is_first)
260 loop = fold_build3_loc (start_locus, COND_EXPR,
261 void_type_node, cond, stmt_list,
262 build_empty_stmt (start_locus));
263 else
264 loop = stmt_list;
265 }
266 else
267 {
268 location_t loc = start_locus;
269 if (!cond || integer_nonzerop (cond))
270 loc = EXPR_LOCATION (expr_first (body));
271 if (loc == UNKNOWN_LOCATION)
272 loc = start_locus;
273 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
274 }
275
276 stmt_list = NULL;
277 append_to_statement_list (loop, &stmt_list);
278 finish_bc_block (&stmt_list, bc_break, blab);
279 if (!stmt_list)
280 stmt_list = build_empty_stmt (start_locus);
281
282 *stmt_p = stmt_list;
283 }
284
285 /* Genericize a FOR_STMT node *STMT_P. */
286
287 static void
288 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
289 {
290 tree stmt = *stmt_p;
291 tree expr = NULL;
292 tree loop;
293 tree init = FOR_INIT_STMT (stmt);
294
295 if (init)
296 {
297 cp_walk_tree (&init, cp_genericize_r, data, NULL);
298 append_to_statement_list (init, &expr);
299 }
300
301 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
302 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
303 append_to_statement_list (loop, &expr);
304 if (expr == NULL_TREE)
305 expr = loop;
306 *stmt_p = expr;
307 }
308
309 /* Genericize a WHILE_STMT node *STMT_P. */
310
311 static void
312 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
313 {
314 tree stmt = *stmt_p;
315 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
316 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
317 }
318
319 /* Genericize a DO_STMT node *STMT_P. */
320
321 static void
322 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
323 {
324 tree stmt = *stmt_p;
325 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
326 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
327 }
328
329 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
330
331 static void
332 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
333 {
334 tree stmt = *stmt_p;
335 tree break_block, body, cond, type;
336 location_t stmt_locus = EXPR_LOCATION (stmt);
337
338 break_block = begin_bc_block (bc_break, stmt_locus);
339
340 body = SWITCH_STMT_BODY (stmt);
341 if (!body)
342 body = build_empty_stmt (stmt_locus);
343 cond = SWITCH_STMT_COND (stmt);
344 type = SWITCH_STMT_TYPE (stmt);
345
346 cp_walk_tree (&body, cp_genericize_r, data, NULL);
347 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
348 cp_walk_tree (&type, cp_genericize_r, data, NULL);
349 *walk_subtrees = 0;
350
351 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
352 finish_bc_block (stmt_p, bc_break, break_block);
353 }
354
355 /* Genericize a CONTINUE_STMT node *STMT_P. */
356
357 static void
358 genericize_continue_stmt (tree *stmt_p)
359 {
360 tree stmt_list = NULL;
361 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
362 tree label = get_bc_label (bc_continue);
363 location_t location = EXPR_LOCATION (*stmt_p);
364 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
365 append_to_statement_list (pred, &stmt_list);
366 append_to_statement_list (jump, &stmt_list);
367 *stmt_p = stmt_list;
368 }
369
370 /* Genericize a BREAK_STMT node *STMT_P. */
371
372 static void
373 genericize_break_stmt (tree *stmt_p)
374 {
375 tree label = get_bc_label (bc_break);
376 location_t location = EXPR_LOCATION (*stmt_p);
377 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
378 }
379
380 /* Genericize a OMP_FOR node *STMT_P. */
381
382 static void
383 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
384 {
385 tree stmt = *stmt_p;
386 location_t locus = EXPR_LOCATION (stmt);
387 tree clab = begin_bc_block (bc_continue, locus);
388
389 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
390 if (TREE_CODE (stmt) != OMP_TASKLOOP)
391 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
392 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
393 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
394 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
395 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
396 *walk_subtrees = 0;
397
398 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
399 }
400
401 /* Hook into the middle of gimplifying an OMP_FOR node. */
402
403 static enum gimplify_status
404 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
405 {
406 tree for_stmt = *expr_p;
407 gimple_seq seq = NULL;
408
409 /* Protect ourselves from recursion. */
410 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
411 return GS_UNHANDLED;
412 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
413
414 gimplify_and_add (for_stmt, &seq);
415 gimple_seq_add_seq (pre_p, seq);
416
417 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
418
419 return GS_ALL_DONE;
420 }
421
422 /* Gimplify an EXPR_STMT node. */
423
424 static void
425 gimplify_expr_stmt (tree *stmt_p)
426 {
427 tree stmt = EXPR_STMT_EXPR (*stmt_p);
428
429 if (stmt == error_mark_node)
430 stmt = NULL;
431
432 /* Gimplification of a statement expression will nullify the
433 statement if all its side effects are moved to *PRE_P and *POST_P.
434
435 In this case we will not want to emit the gimplified statement.
436 However, we may still want to emit a warning, so we do that before
437 gimplification. */
438 if (stmt && warn_unused_value)
439 {
440 if (!TREE_SIDE_EFFECTS (stmt))
441 {
442 if (!IS_EMPTY_STMT (stmt)
443 && !VOID_TYPE_P (TREE_TYPE (stmt))
444 && !TREE_NO_WARNING (stmt))
445 warning (OPT_Wunused_value, "statement with no effect");
446 }
447 else
448 warn_if_unused_value (stmt, input_location);
449 }
450
451 if (stmt == NULL_TREE)
452 stmt = alloc_stmt_list ();
453
454 *stmt_p = stmt;
455 }
456
457 /* Gimplify initialization from an AGGR_INIT_EXPR. */
458
459 static void
460 cp_gimplify_init_expr (tree *expr_p)
461 {
462 tree from = TREE_OPERAND (*expr_p, 1);
463 tree to = TREE_OPERAND (*expr_p, 0);
464 tree t;
465
466 /* What about code that pulls out the temp and uses it elsewhere? I
467 think that such code never uses the TARGET_EXPR as an initializer. If
468 I'm wrong, we'll abort because the temp won't have any RTL. In that
469 case, I guess we'll need to replace references somehow. */
470 if (TREE_CODE (from) == TARGET_EXPR)
471 from = TARGET_EXPR_INITIAL (from);
472
473 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
474 inside the TARGET_EXPR. */
475 for (t = from; t; )
476 {
477 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
478
479 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
480 replace the slot operand with our target.
481
482 Should we add a target parm to gimplify_expr instead? No, as in this
483 case we want to replace the INIT_EXPR. */
484 if (TREE_CODE (sub) == AGGR_INIT_EXPR
485 || TREE_CODE (sub) == VEC_INIT_EXPR)
486 {
487 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
488 AGGR_INIT_EXPR_SLOT (sub) = to;
489 else
490 VEC_INIT_EXPR_SLOT (sub) = to;
491 *expr_p = from;
492
493 /* The initialization is now a side-effect, so the container can
494 become void. */
495 if (from != sub)
496 TREE_TYPE (from) = void_type_node;
497 }
498
499 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
500 /* Handle aggregate NSDMI. */
501 replace_placeholders (sub, to);
502
503 if (t == sub)
504 break;
505 else
506 t = TREE_OPERAND (t, 1);
507 }
508
509 }
510
511 /* Gimplify a MUST_NOT_THROW_EXPR. */
512
513 static enum gimplify_status
514 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
515 {
516 tree stmt = *expr_p;
517 tree temp = voidify_wrapper_expr (stmt, NULL);
518 tree body = TREE_OPERAND (stmt, 0);
519 gimple_seq try_ = NULL;
520 gimple_seq catch_ = NULL;
521 gimple *mnt;
522
523 gimplify_and_add (body, &try_);
524 mnt = gimple_build_eh_must_not_throw (terminate_node);
525 gimple_seq_add_stmt_without_update (&catch_, mnt);
526 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
527
528 gimple_seq_add_stmt_without_update (pre_p, mnt);
529 if (temp)
530 {
531 *expr_p = temp;
532 return GS_OK;
533 }
534
535 *expr_p = NULL;
536 return GS_ALL_DONE;
537 }
538
539 /* Return TRUE if an operand (OP) of a given TYPE being copied is
540 really just an empty class copy.
541
542 Check that the operand has a simple form so that TARGET_EXPRs and
543 non-empty CONSTRUCTORs get reduced properly, and we leave the
544 return slot optimization alone because it isn't a copy. */
545
546 static bool
547 simple_empty_class_p (tree type, tree op)
548 {
549 return
550 ((TREE_CODE (op) == COMPOUND_EXPR
551 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
552 || is_gimple_lvalue (op)
553 || INDIRECT_REF_P (op)
554 || (TREE_CODE (op) == CONSTRUCTOR
555 && CONSTRUCTOR_NELTS (op) == 0
556 && !TREE_CLOBBER_P (op))
557 || (TREE_CODE (op) == CALL_EXPR
558 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
559 && is_really_empty_class (type);
560 }
561
562 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
563
564 int
565 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
566 {
567 int saved_stmts_are_full_exprs_p = 0;
568 enum tree_code code = TREE_CODE (*expr_p);
569 enum gimplify_status ret;
570
571 if (STATEMENT_CODE_P (code))
572 {
573 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
574 current_stmt_tree ()->stmts_are_full_exprs_p
575 = STMT_IS_FULL_EXPR_P (*expr_p);
576 }
577
578 switch (code)
579 {
580 case AGGR_INIT_EXPR:
581 simplify_aggr_init_expr (expr_p);
582 ret = GS_OK;
583 break;
584
585 case VEC_INIT_EXPR:
586 {
587 location_t loc = input_location;
588 tree init = VEC_INIT_EXPR_INIT (*expr_p);
589 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
590 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
591 input_location = EXPR_LOCATION (*expr_p);
592 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
593 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
594 from_array,
595 tf_warning_or_error);
596 cp_genericize_tree (expr_p);
597 ret = GS_OK;
598 input_location = loc;
599 }
600 break;
601
602 case THROW_EXPR:
603 /* FIXME communicate throw type to back end, probably by moving
604 THROW_EXPR into ../tree.def. */
605 *expr_p = TREE_OPERAND (*expr_p, 0);
606 ret = GS_OK;
607 break;
608
609 case MUST_NOT_THROW_EXPR:
610 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
611 break;
612
613 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
614 LHS of an assignment might also be involved in the RHS, as in bug
615 25979. */
616 case INIT_EXPR:
617 if (fn_contains_cilk_spawn_p (cfun))
618 {
619 if (cilk_cp_detect_spawn_and_unwrap (expr_p))
620 {
621 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p,
622 pre_p, post_p);
623 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
624 }
625 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
626 return GS_ERROR;
627 }
628
629 cp_gimplify_init_expr (expr_p);
630 if (TREE_CODE (*expr_p) != INIT_EXPR)
631 return GS_OK;
632 /* Otherwise fall through. */
633 case MODIFY_EXPR:
634 modify_expr_case:
635 {
636 if (fn_contains_cilk_spawn_p (cfun)
637 && cilk_cp_detect_spawn_and_unwrap (expr_p)
638 && !seen_error ())
639 {
640 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
641 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
642 }
643 /* If the back end isn't clever enough to know that the lhs and rhs
644 types are the same, add an explicit conversion. */
645 tree op0 = TREE_OPERAND (*expr_p, 0);
646 tree op1 = TREE_OPERAND (*expr_p, 1);
647
648 if (!error_operand_p (op0)
649 && !error_operand_p (op1)
650 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
651 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
652 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
653 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
654 TREE_TYPE (op0), op1);
655
656 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
657 {
658 /* Remove any copies of empty classes. Also drop volatile
659 variables on the RHS to avoid infinite recursion from
660 gimplify_expr trying to load the value. */
661 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
662 is_gimple_lvalue, fb_lvalue);
663 if (TREE_SIDE_EFFECTS (op1))
664 {
665 if (TREE_THIS_VOLATILE (op1)
666 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
667 op1 = build_fold_addr_expr (op1);
668
669 gimplify_and_add (op1, pre_p);
670 }
671 *expr_p = TREE_OPERAND (*expr_p, 0);
672 }
673 }
674 ret = GS_OK;
675 break;
676
677 case EMPTY_CLASS_EXPR:
678 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
679 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
680 ret = GS_OK;
681 break;
682
683 case BASELINK:
684 *expr_p = BASELINK_FUNCTIONS (*expr_p);
685 ret = GS_OK;
686 break;
687
688 case TRY_BLOCK:
689 genericize_try_block (expr_p);
690 ret = GS_OK;
691 break;
692
693 case HANDLER:
694 genericize_catch_block (expr_p);
695 ret = GS_OK;
696 break;
697
698 case EH_SPEC_BLOCK:
699 genericize_eh_spec_block (expr_p);
700 ret = GS_OK;
701 break;
702
703 case USING_STMT:
704 gcc_unreachable ();
705
706 case FOR_STMT:
707 case WHILE_STMT:
708 case DO_STMT:
709 case SWITCH_STMT:
710 case CONTINUE_STMT:
711 case BREAK_STMT:
712 gcc_unreachable ();
713
714 case OMP_FOR:
715 case OMP_SIMD:
716 case OMP_DISTRIBUTE:
717 case OMP_TASKLOOP:
718 ret = cp_gimplify_omp_for (expr_p, pre_p);
719 break;
720
721 case EXPR_STMT:
722 gimplify_expr_stmt (expr_p);
723 ret = GS_OK;
724 break;
725
726 case UNARY_PLUS_EXPR:
727 {
728 tree arg = TREE_OPERAND (*expr_p, 0);
729 tree type = TREE_TYPE (*expr_p);
730 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
731 : arg;
732 ret = GS_OK;
733 }
734 break;
735
736 case CILK_SPAWN_STMT:
737 gcc_assert(fn_contains_cilk_spawn_p (cfun)
738 && cilk_cp_detect_spawn_and_unwrap (expr_p));
739
740 if (!seen_error ())
741 {
742 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
743 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
744 }
745 return GS_ERROR;
746
747 case CALL_EXPR:
748 if (fn_contains_cilk_spawn_p (cfun)
749 && cilk_cp_detect_spawn_and_unwrap (expr_p)
750 && !seen_error ())
751 {
752 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
753 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
754 }
755 /* DR 1030 says that we need to evaluate the elements of an
756 initializer-list in forward order even when it's used as arguments to
757 a constructor. So if the target wants to evaluate them in reverse
758 order and there's more than one argument other than 'this', gimplify
759 them in order. */
760 ret = GS_OK;
761 if (PUSH_ARGS_REVERSED && CALL_EXPR_LIST_INIT_P (*expr_p)
762 && call_expr_nargs (*expr_p) > 2)
763 {
764 int nargs = call_expr_nargs (*expr_p);
765 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
766 for (int i = 1; i < nargs; ++i)
767 {
768 enum gimplify_status t
769 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
770 if (t == GS_ERROR)
771 ret = GS_ERROR;
772 }
773 }
774 break;
775
776 case RETURN_EXPR:
777 if (TREE_OPERAND (*expr_p, 0)
778 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
779 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
780 {
781 expr_p = &TREE_OPERAND (*expr_p, 0);
782 code = TREE_CODE (*expr_p);
783 /* Avoid going through the INIT_EXPR case, which can
784 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
785 goto modify_expr_case;
786 }
787 /* Fall through. */
788
789 default:
790 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
791 break;
792 }
793
794 /* Restore saved state. */
795 if (STATEMENT_CODE_P (code))
796 current_stmt_tree ()->stmts_are_full_exprs_p
797 = saved_stmts_are_full_exprs_p;
798
799 return ret;
800 }
801
802 static inline bool
803 is_invisiref_parm (const_tree t)
804 {
805 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
806 && DECL_BY_REFERENCE (t));
807 }
808
809 /* Return true if the uid in both int tree maps are equal. */
810
811 bool
812 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
813 {
814 return (a->uid == b->uid);
815 }
816
817 /* Hash a UID in a cxx_int_tree_map. */
818
819 unsigned int
820 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
821 {
822 return item->uid;
823 }
824
825 /* A stable comparison routine for use with splay trees and DECLs. */
826
827 static int
828 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
829 {
830 tree a = (tree) xa;
831 tree b = (tree) xb;
832
833 return DECL_UID (a) - DECL_UID (b);
834 }
835
836 /* OpenMP context during genericization. */
837
838 struct cp_genericize_omp_taskreg
839 {
840 bool is_parallel;
841 bool default_shared;
842 struct cp_genericize_omp_taskreg *outer;
843 splay_tree variables;
844 };
845
846 /* Return true if genericization should try to determine if
847 DECL is firstprivate or shared within task regions. */
848
849 static bool
850 omp_var_to_track (tree decl)
851 {
852 tree type = TREE_TYPE (decl);
853 if (is_invisiref_parm (decl))
854 type = TREE_TYPE (type);
855 while (TREE_CODE (type) == ARRAY_TYPE)
856 type = TREE_TYPE (type);
857 if (type == error_mark_node || !CLASS_TYPE_P (type))
858 return false;
859 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
860 return false;
861 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
862 return false;
863 return true;
864 }
865
866 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
867
868 static void
869 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
870 {
871 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
872 (splay_tree_key) decl);
873 if (n == NULL)
874 {
875 int flags = OMP_CLAUSE_DEFAULT_SHARED;
876 if (omp_ctx->outer)
877 omp_cxx_notice_variable (omp_ctx->outer, decl);
878 if (!omp_ctx->default_shared)
879 {
880 struct cp_genericize_omp_taskreg *octx;
881
882 for (octx = omp_ctx->outer; octx; octx = octx->outer)
883 {
884 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
885 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
886 {
887 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
888 break;
889 }
890 if (octx->is_parallel)
891 break;
892 }
893 if (octx == NULL
894 && (TREE_CODE (decl) == PARM_DECL
895 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
896 && DECL_CONTEXT (decl) == current_function_decl)))
897 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
898 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
899 {
900 /* DECL is implicitly determined firstprivate in
901 the current task construct. Ensure copy ctor and
902 dtor are instantiated, because during gimplification
903 it will be already too late. */
904 tree type = TREE_TYPE (decl);
905 if (is_invisiref_parm (decl))
906 type = TREE_TYPE (type);
907 while (TREE_CODE (type) == ARRAY_TYPE)
908 type = TREE_TYPE (type);
909 get_copy_ctor (type, tf_none);
910 get_dtor (type, tf_none);
911 }
912 }
913 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
914 }
915 }
916
917 /* Genericization context. */
918
919 struct cp_genericize_data
920 {
921 hash_set<tree> *p_set;
922 vec<tree> bind_expr_stack;
923 struct cp_genericize_omp_taskreg *omp_ctx;
924 tree try_block;
925 bool no_sanitize_p;
926 };
927
928 /* Perform any pre-gimplification folding of C++ front end trees to
929 GENERIC.
930 Note: The folding of none-omp cases is something to move into
931 the middle-end. As for now we have most foldings only on GENERIC
932 in fold-const, we need to perform this before transformation to
933 GIMPLE-form. */
934
935 static tree
936 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
937 {
938 tree stmt;
939 enum tree_code code;
940
941 *stmt_p = stmt = cp_fold (*stmt_p);
942
943 code = TREE_CODE (stmt);
944 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
945 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
946 || code == OACC_LOOP)
947 {
948 tree x;
949 int i, n;
950
951 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
952 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
953 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
954 x = OMP_FOR_COND (stmt);
955 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
956 {
957 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
958 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
959 }
960 else if (x && TREE_CODE (x) == TREE_VEC)
961 {
962 n = TREE_VEC_LENGTH (x);
963 for (i = 0; i < n; i++)
964 {
965 tree o = TREE_VEC_ELT (x, i);
966 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
967 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
968 }
969 }
970 x = OMP_FOR_INCR (stmt);
971 if (x && TREE_CODE (x) == TREE_VEC)
972 {
973 n = TREE_VEC_LENGTH (x);
974 for (i = 0; i < n; i++)
975 {
976 tree o = TREE_VEC_ELT (x, i);
977 if (o && TREE_CODE (o) == MODIFY_EXPR)
978 o = TREE_OPERAND (o, 1);
979 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
980 || TREE_CODE (o) == POINTER_PLUS_EXPR))
981 {
982 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
983 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
984 }
985 }
986 }
987 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
988 *walk_subtrees = 0;
989 }
990
991 return NULL;
992 }
993
994 /* Fold ALL the trees! FIXME we should be able to remove this, but
995 apparently that still causes optimization regressions. */
996
997 void
998 cp_fold_function (tree fndecl)
999 {
1000 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, NULL, NULL);
1001 }
1002
1003 /* Perform any pre-gimplification lowering of C++ front end trees to
1004 GENERIC. */
1005
1006 static tree
1007 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1008 {
1009 tree stmt = *stmt_p;
1010 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1011 hash_set<tree> *p_set = wtd->p_set;
1012
1013 /* If in an OpenMP context, note var uses. */
1014 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1015 && (VAR_P (stmt)
1016 || TREE_CODE (stmt) == PARM_DECL
1017 || TREE_CODE (stmt) == RESULT_DECL)
1018 && omp_var_to_track (stmt))
1019 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1020
1021 /* Don't dereference parms in a thunk, pass the references through. */
1022 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1023 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1024 {
1025 *walk_subtrees = 0;
1026 return NULL;
1027 }
1028
1029 /* Otherwise, do dereference invisible reference parms. */
1030 if (is_invisiref_parm (stmt))
1031 {
1032 *stmt_p = convert_from_reference (stmt);
1033 *walk_subtrees = 0;
1034 return NULL;
1035 }
1036
1037 /* Map block scope extern declarations to visible declarations with the
1038 same name and type in outer scopes if any. */
1039 if (cp_function_chain->extern_decl_map
1040 && VAR_OR_FUNCTION_DECL_P (stmt)
1041 && DECL_EXTERNAL (stmt))
1042 {
1043 struct cxx_int_tree_map *h, in;
1044 in.uid = DECL_UID (stmt);
1045 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1046 if (h)
1047 {
1048 *stmt_p = h->to;
1049 *walk_subtrees = 0;
1050 return NULL;
1051 }
1052 }
1053
1054 /* Other than invisiref parms, don't walk the same tree twice. */
1055 if (p_set->contains (stmt))
1056 {
1057 *walk_subtrees = 0;
1058 return NULL_TREE;
1059 }
1060
1061 if (TREE_CODE (stmt) == ADDR_EXPR
1062 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1063 {
1064 /* If in an OpenMP context, note var uses. */
1065 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1066 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1067 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1068 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1069 *walk_subtrees = 0;
1070 }
1071 else if (TREE_CODE (stmt) == RETURN_EXPR
1072 && TREE_OPERAND (stmt, 0)
1073 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1074 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1075 *walk_subtrees = 0;
1076 else if (TREE_CODE (stmt) == OMP_CLAUSE)
1077 switch (OMP_CLAUSE_CODE (stmt))
1078 {
1079 case OMP_CLAUSE_LASTPRIVATE:
1080 /* Don't dereference an invisiref in OpenMP clauses. */
1081 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1082 {
1083 *walk_subtrees = 0;
1084 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1085 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1086 cp_genericize_r, data, NULL);
1087 }
1088 break;
1089 case OMP_CLAUSE_PRIVATE:
1090 /* Don't dereference an invisiref in OpenMP clauses. */
1091 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1092 *walk_subtrees = 0;
1093 else if (wtd->omp_ctx != NULL)
1094 {
1095 /* Private clause doesn't cause any references to the
1096 var in outer contexts, avoid calling
1097 omp_cxx_notice_variable for it. */
1098 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1099 wtd->omp_ctx = NULL;
1100 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1101 data, NULL);
1102 wtd->omp_ctx = old;
1103 *walk_subtrees = 0;
1104 }
1105 break;
1106 case OMP_CLAUSE_SHARED:
1107 case OMP_CLAUSE_FIRSTPRIVATE:
1108 case OMP_CLAUSE_COPYIN:
1109 case OMP_CLAUSE_COPYPRIVATE:
1110 /* Don't dereference an invisiref in OpenMP clauses. */
1111 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1112 *walk_subtrees = 0;
1113 break;
1114 case OMP_CLAUSE_REDUCTION:
1115 /* Don't dereference an invisiref in reduction clause's
1116 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1117 still needs to be genericized. */
1118 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1119 {
1120 *walk_subtrees = 0;
1121 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1122 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1123 cp_genericize_r, data, NULL);
1124 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1125 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1126 cp_genericize_r, data, NULL);
1127 }
1128 break;
1129 default:
1130 break;
1131 }
1132 else if (IS_TYPE_OR_DECL_P (stmt))
1133 *walk_subtrees = 0;
1134
1135 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1136 to lower this construct before scanning it, so we need to lower these
1137 before doing anything else. */
1138 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1139 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1140 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1141 : TRY_FINALLY_EXPR,
1142 void_type_node,
1143 CLEANUP_BODY (stmt),
1144 CLEANUP_EXPR (stmt));
1145
1146 else if (TREE_CODE (stmt) == IF_STMT)
1147 {
1148 genericize_if_stmt (stmt_p);
1149 /* *stmt_p has changed, tail recurse to handle it again. */
1150 return cp_genericize_r (stmt_p, walk_subtrees, data);
1151 }
1152
1153 /* COND_EXPR might have incompatible types in branches if one or both
1154 arms are bitfields. Fix it up now. */
1155 else if (TREE_CODE (stmt) == COND_EXPR)
1156 {
1157 tree type_left
1158 = (TREE_OPERAND (stmt, 1)
1159 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1160 : NULL_TREE);
1161 tree type_right
1162 = (TREE_OPERAND (stmt, 2)
1163 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1164 : NULL_TREE);
1165 if (type_left
1166 && !useless_type_conversion_p (TREE_TYPE (stmt),
1167 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1168 {
1169 TREE_OPERAND (stmt, 1)
1170 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1171 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1172 type_left));
1173 }
1174 if (type_right
1175 && !useless_type_conversion_p (TREE_TYPE (stmt),
1176 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1177 {
1178 TREE_OPERAND (stmt, 2)
1179 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1180 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1181 type_right));
1182 }
1183 }
1184
1185 else if (TREE_CODE (stmt) == BIND_EXPR)
1186 {
1187 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1188 {
1189 tree decl;
1190 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1191 if (VAR_P (decl)
1192 && !DECL_EXTERNAL (decl)
1193 && omp_var_to_track (decl))
1194 {
1195 splay_tree_node n
1196 = splay_tree_lookup (wtd->omp_ctx->variables,
1197 (splay_tree_key) decl);
1198 if (n == NULL)
1199 splay_tree_insert (wtd->omp_ctx->variables,
1200 (splay_tree_key) decl,
1201 TREE_STATIC (decl)
1202 ? OMP_CLAUSE_DEFAULT_SHARED
1203 : OMP_CLAUSE_DEFAULT_PRIVATE);
1204 }
1205 }
1206 if (flag_sanitize
1207 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1208 {
1209 /* The point here is to not sanitize static initializers. */
1210 bool no_sanitize_p = wtd->no_sanitize_p;
1211 wtd->no_sanitize_p = true;
1212 for (tree decl = BIND_EXPR_VARS (stmt);
1213 decl;
1214 decl = DECL_CHAIN (decl))
1215 if (VAR_P (decl)
1216 && TREE_STATIC (decl)
1217 && DECL_INITIAL (decl))
1218 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1219 wtd->no_sanitize_p = no_sanitize_p;
1220 }
1221 wtd->bind_expr_stack.safe_push (stmt);
1222 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1223 cp_genericize_r, data, NULL);
1224 wtd->bind_expr_stack.pop ();
1225 }
1226
1227 else if (TREE_CODE (stmt) == USING_STMT)
1228 {
1229 tree block = NULL_TREE;
1230
1231 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1232 BLOCK, and append an IMPORTED_DECL to its
1233 BLOCK_VARS chained list. */
1234 if (wtd->bind_expr_stack.exists ())
1235 {
1236 int i;
1237 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1238 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1239 break;
1240 }
1241 if (block)
1242 {
1243 tree using_directive;
1244 gcc_assert (TREE_OPERAND (stmt, 0));
1245
1246 using_directive = make_node (IMPORTED_DECL);
1247 TREE_TYPE (using_directive) = void_type_node;
1248
1249 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1250 = TREE_OPERAND (stmt, 0);
1251 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1252 BLOCK_VARS (block) = using_directive;
1253 }
1254 /* The USING_STMT won't appear in GENERIC. */
1255 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1256 *walk_subtrees = 0;
1257 }
1258
1259 else if (TREE_CODE (stmt) == DECL_EXPR
1260 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1261 {
1262 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1263 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1264 *walk_subtrees = 0;
1265 }
1266 else if (TREE_CODE (stmt) == DECL_EXPR)
1267 {
1268 tree d = DECL_EXPR_DECL (stmt);
1269 if (TREE_CODE (d) == VAR_DECL)
1270 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1271 }
1272 else if (TREE_CODE (stmt) == OMP_PARALLEL
1273 || TREE_CODE (stmt) == OMP_TASK
1274 || TREE_CODE (stmt) == OMP_TASKLOOP)
1275 {
1276 struct cp_genericize_omp_taskreg omp_ctx;
1277 tree c, decl;
1278 splay_tree_node n;
1279
1280 *walk_subtrees = 0;
1281 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1282 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1283 omp_ctx.default_shared = omp_ctx.is_parallel;
1284 omp_ctx.outer = wtd->omp_ctx;
1285 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1286 wtd->omp_ctx = &omp_ctx;
1287 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1288 switch (OMP_CLAUSE_CODE (c))
1289 {
1290 case OMP_CLAUSE_SHARED:
1291 case OMP_CLAUSE_PRIVATE:
1292 case OMP_CLAUSE_FIRSTPRIVATE:
1293 case OMP_CLAUSE_LASTPRIVATE:
1294 decl = OMP_CLAUSE_DECL (c);
1295 if (decl == error_mark_node || !omp_var_to_track (decl))
1296 break;
1297 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1298 if (n != NULL)
1299 break;
1300 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1301 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1302 ? OMP_CLAUSE_DEFAULT_SHARED
1303 : OMP_CLAUSE_DEFAULT_PRIVATE);
1304 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1305 && omp_ctx.outer)
1306 omp_cxx_notice_variable (omp_ctx.outer, decl);
1307 break;
1308 case OMP_CLAUSE_DEFAULT:
1309 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1310 omp_ctx.default_shared = true;
1311 default:
1312 break;
1313 }
1314 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1315 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1316 else
1317 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1318 wtd->omp_ctx = omp_ctx.outer;
1319 splay_tree_delete (omp_ctx.variables);
1320 }
1321 else if (TREE_CODE (stmt) == TRY_BLOCK)
1322 {
1323 *walk_subtrees = 0;
1324 tree try_block = wtd->try_block;
1325 wtd->try_block = stmt;
1326 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1327 wtd->try_block = try_block;
1328 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1329 }
1330 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1331 {
1332 /* MUST_NOT_THROW_COND might be something else with TM. */
1333 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1334 {
1335 *walk_subtrees = 0;
1336 tree try_block = wtd->try_block;
1337 wtd->try_block = stmt;
1338 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1339 wtd->try_block = try_block;
1340 }
1341 }
1342 else if (TREE_CODE (stmt) == THROW_EXPR)
1343 {
1344 location_t loc = location_of (stmt);
1345 if (TREE_NO_WARNING (stmt))
1346 /* Never mind. */;
1347 else if (wtd->try_block)
1348 {
1349 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1350 && warning_at (loc, OPT_Wterminate,
1351 "throw will always call terminate()")
1352 && cxx_dialect >= cxx11
1353 && DECL_DESTRUCTOR_P (current_function_decl))
1354 inform (loc, "in C++11 destructors default to noexcept");
1355 }
1356 else
1357 {
1358 if (warn_cxx11_compat && cxx_dialect < cxx11
1359 && DECL_DESTRUCTOR_P (current_function_decl)
1360 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1361 == NULL_TREE)
1362 && (get_defaulted_eh_spec (current_function_decl)
1363 == empty_except_spec))
1364 warning_at (loc, OPT_Wc__11_compat,
1365 "in C++11 this throw will terminate because "
1366 "destructors default to noexcept");
1367 }
1368 }
1369 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1370 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1371 else if (TREE_CODE (stmt) == FOR_STMT)
1372 genericize_for_stmt (stmt_p, walk_subtrees, data);
1373 else if (TREE_CODE (stmt) == WHILE_STMT)
1374 genericize_while_stmt (stmt_p, walk_subtrees, data);
1375 else if (TREE_CODE (stmt) == DO_STMT)
1376 genericize_do_stmt (stmt_p, walk_subtrees, data);
1377 else if (TREE_CODE (stmt) == SWITCH_STMT)
1378 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1379 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1380 genericize_continue_stmt (stmt_p);
1381 else if (TREE_CODE (stmt) == BREAK_STMT)
1382 genericize_break_stmt (stmt_p);
1383 else if (TREE_CODE (stmt) == OMP_FOR
1384 || TREE_CODE (stmt) == OMP_SIMD
1385 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1386 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1387 else if (TREE_CODE (stmt) == PTRMEM_CST)
1388 {
1389 /* By the time we get here we're handing off to the back end, so we don't
1390 need or want to preserve PTRMEM_CST anymore. */
1391 *stmt_p = cplus_expand_constant (stmt);
1392 *walk_subtrees = 0;
1393 }
1394 else if ((flag_sanitize
1395 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1396 && !wtd->no_sanitize_p)
1397 {
1398 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1399 && TREE_CODE (stmt) == NOP_EXPR
1400 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1401 ubsan_maybe_instrument_reference (stmt);
1402 else if (TREE_CODE (stmt) == CALL_EXPR)
1403 {
1404 tree fn = CALL_EXPR_FN (stmt);
1405 if (fn != NULL_TREE
1406 && !error_operand_p (fn)
1407 && POINTER_TYPE_P (TREE_TYPE (fn))
1408 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1409 {
1410 bool is_ctor
1411 = TREE_CODE (fn) == ADDR_EXPR
1412 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1413 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1414 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1415 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1416 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1417 cp_ubsan_maybe_instrument_member_call (stmt);
1418 }
1419 }
1420 }
1421
1422 p_set->add (*stmt_p);
1423
1424 return NULL;
1425 }
1426
1427 /* Lower C++ front end trees to GENERIC in T_P. */
1428
1429 static void
1430 cp_genericize_tree (tree* t_p)
1431 {
1432 struct cp_genericize_data wtd;
1433
1434 wtd.p_set = new hash_set<tree>;
1435 wtd.bind_expr_stack.create (0);
1436 wtd.omp_ctx = NULL;
1437 wtd.try_block = NULL_TREE;
1438 wtd.no_sanitize_p = false;
1439 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1440 delete wtd.p_set;
1441 wtd.bind_expr_stack.release ();
1442 if (flag_sanitize & SANITIZE_VPTR)
1443 cp_ubsan_instrument_member_accesses (t_p);
1444 }
1445
1446 /* If a function that should end with a return in non-void
1447 function doesn't obviously end with return, add ubsan
1448 instrumentation code to verify it at runtime. */
1449
1450 static void
1451 cp_ubsan_maybe_instrument_return (tree fndecl)
1452 {
1453 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1454 || DECL_CONSTRUCTOR_P (fndecl)
1455 || DECL_DESTRUCTOR_P (fndecl)
1456 || !targetm.warn_func_return (fndecl))
1457 return;
1458
1459 tree t = DECL_SAVED_TREE (fndecl);
1460 while (t)
1461 {
1462 switch (TREE_CODE (t))
1463 {
1464 case BIND_EXPR:
1465 t = BIND_EXPR_BODY (t);
1466 continue;
1467 case TRY_FINALLY_EXPR:
1468 t = TREE_OPERAND (t, 0);
1469 continue;
1470 case STATEMENT_LIST:
1471 {
1472 tree_stmt_iterator i = tsi_last (t);
1473 if (!tsi_end_p (i))
1474 {
1475 t = tsi_stmt (i);
1476 continue;
1477 }
1478 }
1479 break;
1480 case RETURN_EXPR:
1481 return;
1482 default:
1483 break;
1484 }
1485 break;
1486 }
1487 if (t == NULL_TREE)
1488 return;
1489 t = DECL_SAVED_TREE (fndecl);
1490 if (TREE_CODE (t) == BIND_EXPR
1491 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1492 {
1493 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1494 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1495 tsi_link_after (&i, t, TSI_NEW_STMT);
1496 }
1497 }
1498
1499 void
1500 cp_genericize (tree fndecl)
1501 {
1502 tree t;
1503
1504 /* Fix up the types of parms passed by invisible reference. */
1505 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1506 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1507 {
1508 /* If a function's arguments are copied to create a thunk,
1509 then DECL_BY_REFERENCE will be set -- but the type of the
1510 argument will be a pointer type, so we will never get
1511 here. */
1512 gcc_assert (!DECL_BY_REFERENCE (t));
1513 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1514 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1515 DECL_BY_REFERENCE (t) = 1;
1516 TREE_ADDRESSABLE (t) = 0;
1517 relayout_decl (t);
1518 }
1519
1520 /* Do the same for the return value. */
1521 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1522 {
1523 t = DECL_RESULT (fndecl);
1524 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1525 DECL_BY_REFERENCE (t) = 1;
1526 TREE_ADDRESSABLE (t) = 0;
1527 relayout_decl (t);
1528 if (DECL_NAME (t))
1529 {
1530 /* Adjust DECL_VALUE_EXPR of the original var. */
1531 tree outer = outer_curly_brace_block (current_function_decl);
1532 tree var;
1533
1534 if (outer)
1535 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1536 if (DECL_NAME (t) == DECL_NAME (var)
1537 && DECL_HAS_VALUE_EXPR_P (var)
1538 && DECL_VALUE_EXPR (var) == t)
1539 {
1540 tree val = convert_from_reference (t);
1541 SET_DECL_VALUE_EXPR (var, val);
1542 break;
1543 }
1544 }
1545 }
1546
1547 /* If we're a clone, the body is already GIMPLE. */
1548 if (DECL_CLONED_FUNCTION_P (fndecl))
1549 return;
1550
1551 /* Expand all the array notations here. */
1552 if (flag_cilkplus
1553 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1554 DECL_SAVED_TREE (fndecl) =
1555 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1556
1557 /* We do want to see every occurrence of the parms, so we can't just use
1558 walk_tree's hash functionality. */
1559 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1560
1561 if (flag_sanitize & SANITIZE_RETURN
1562 && do_ubsan_in_current_function ())
1563 cp_ubsan_maybe_instrument_return (fndecl);
1564
1565 /* Do everything else. */
1566 c_genericize (fndecl);
1567
1568 gcc_assert (bc_label[bc_break] == NULL);
1569 gcc_assert (bc_label[bc_continue] == NULL);
1570 }
1571 \f
1572 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1573 NULL if there is in fact nothing to do. ARG2 may be null if FN
1574 actually only takes one argument. */
1575
1576 static tree
1577 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1578 {
1579 tree defparm, parm, t;
1580 int i = 0;
1581 int nargs;
1582 tree *argarray;
1583
1584 if (fn == NULL)
1585 return NULL;
1586
1587 nargs = list_length (DECL_ARGUMENTS (fn));
1588 argarray = XALLOCAVEC (tree, nargs);
1589
1590 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1591 if (arg2)
1592 defparm = TREE_CHAIN (defparm);
1593
1594 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1595 {
1596 tree inner_type = TREE_TYPE (arg1);
1597 tree start1, end1, p1;
1598 tree start2 = NULL, p2 = NULL;
1599 tree ret = NULL, lab;
1600
1601 start1 = arg1;
1602 start2 = arg2;
1603 do
1604 {
1605 inner_type = TREE_TYPE (inner_type);
1606 start1 = build4 (ARRAY_REF, inner_type, start1,
1607 size_zero_node, NULL, NULL);
1608 if (arg2)
1609 start2 = build4 (ARRAY_REF, inner_type, start2,
1610 size_zero_node, NULL, NULL);
1611 }
1612 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1613 start1 = build_fold_addr_expr_loc (input_location, start1);
1614 if (arg2)
1615 start2 = build_fold_addr_expr_loc (input_location, start2);
1616
1617 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1618 end1 = fold_build_pointer_plus (start1, end1);
1619
1620 p1 = create_tmp_var (TREE_TYPE (start1));
1621 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1622 append_to_statement_list (t, &ret);
1623
1624 if (arg2)
1625 {
1626 p2 = create_tmp_var (TREE_TYPE (start2));
1627 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1628 append_to_statement_list (t, &ret);
1629 }
1630
1631 lab = create_artificial_label (input_location);
1632 t = build1 (LABEL_EXPR, void_type_node, lab);
1633 append_to_statement_list (t, &ret);
1634
1635 argarray[i++] = p1;
1636 if (arg2)
1637 argarray[i++] = p2;
1638 /* Handle default arguments. */
1639 for (parm = defparm; parm && parm != void_list_node;
1640 parm = TREE_CHAIN (parm), i++)
1641 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1642 TREE_PURPOSE (parm), fn, i,
1643 tf_warning_or_error);
1644 t = build_call_a (fn, i, argarray);
1645 t = fold_convert (void_type_node, t);
1646 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1647 append_to_statement_list (t, &ret);
1648
1649 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1650 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1651 append_to_statement_list (t, &ret);
1652
1653 if (arg2)
1654 {
1655 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1656 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1657 append_to_statement_list (t, &ret);
1658 }
1659
1660 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1661 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1662 append_to_statement_list (t, &ret);
1663
1664 return ret;
1665 }
1666 else
1667 {
1668 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1669 if (arg2)
1670 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1671 /* Handle default arguments. */
1672 for (parm = defparm; parm && parm != void_list_node;
1673 parm = TREE_CHAIN (parm), i++)
1674 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1675 TREE_PURPOSE (parm),
1676 fn, i, tf_warning_or_error);
1677 t = build_call_a (fn, i, argarray);
1678 t = fold_convert (void_type_node, t);
1679 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1680 }
1681 }
1682
1683 /* Return code to initialize DECL with its default constructor, or
1684 NULL if there's nothing to do. */
1685
1686 tree
1687 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1688 {
1689 tree info = CP_OMP_CLAUSE_INFO (clause);
1690 tree ret = NULL;
1691
1692 if (info)
1693 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1694
1695 return ret;
1696 }
1697
1698 /* Return code to initialize DST with a copy constructor from SRC. */
1699
1700 tree
1701 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1702 {
1703 tree info = CP_OMP_CLAUSE_INFO (clause);
1704 tree ret = NULL;
1705
1706 if (info)
1707 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1708 if (ret == NULL)
1709 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1710
1711 return ret;
1712 }
1713
1714 /* Similarly, except use an assignment operator instead. */
1715
1716 tree
1717 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1718 {
1719 tree info = CP_OMP_CLAUSE_INFO (clause);
1720 tree ret = NULL;
1721
1722 if (info)
1723 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1724 if (ret == NULL)
1725 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1726
1727 return ret;
1728 }
1729
1730 /* Return code to destroy DECL. */
1731
1732 tree
1733 cxx_omp_clause_dtor (tree clause, tree decl)
1734 {
1735 tree info = CP_OMP_CLAUSE_INFO (clause);
1736 tree ret = NULL;
1737
1738 if (info)
1739 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1740
1741 return ret;
1742 }
1743
1744 /* True if OpenMP should privatize what this DECL points to rather
1745 than the DECL itself. */
1746
1747 bool
1748 cxx_omp_privatize_by_reference (const_tree decl)
1749 {
1750 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1751 || is_invisiref_parm (decl));
1752 }
1753
1754 /* Return true if DECL is const qualified var having no mutable member. */
1755 bool
1756 cxx_omp_const_qual_no_mutable (tree decl)
1757 {
1758 tree type = TREE_TYPE (decl);
1759 if (TREE_CODE (type) == REFERENCE_TYPE)
1760 {
1761 if (!is_invisiref_parm (decl))
1762 return false;
1763 type = TREE_TYPE (type);
1764
1765 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1766 {
1767 /* NVR doesn't preserve const qualification of the
1768 variable's type. */
1769 tree outer = outer_curly_brace_block (current_function_decl);
1770 tree var;
1771
1772 if (outer)
1773 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1774 if (DECL_NAME (decl) == DECL_NAME (var)
1775 && (TYPE_MAIN_VARIANT (type)
1776 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1777 {
1778 if (TYPE_READONLY (TREE_TYPE (var)))
1779 type = TREE_TYPE (var);
1780 break;
1781 }
1782 }
1783 }
1784
1785 if (type == error_mark_node)
1786 return false;
1787
1788 /* Variables with const-qualified type having no mutable member
1789 are predetermined shared. */
1790 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1791 return true;
1792
1793 return false;
1794 }
1795
1796 /* True if OpenMP sharing attribute of DECL is predetermined. */
1797
1798 enum omp_clause_default_kind
1799 cxx_omp_predetermined_sharing (tree decl)
1800 {
1801 /* Static data members are predetermined shared. */
1802 if (TREE_STATIC (decl))
1803 {
1804 tree ctx = CP_DECL_CONTEXT (decl);
1805 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1806 return OMP_CLAUSE_DEFAULT_SHARED;
1807 }
1808
1809 /* Const qualified vars having no mutable member are predetermined
1810 shared. */
1811 if (cxx_omp_const_qual_no_mutable (decl))
1812 return OMP_CLAUSE_DEFAULT_SHARED;
1813
1814 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1815 }
1816
1817 /* Finalize an implicitly determined clause. */
1818
1819 void
1820 cxx_omp_finish_clause (tree c, gimple_seq *)
1821 {
1822 tree decl, inner_type;
1823 bool make_shared = false;
1824
1825 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1826 return;
1827
1828 decl = OMP_CLAUSE_DECL (c);
1829 decl = require_complete_type (decl);
1830 inner_type = TREE_TYPE (decl);
1831 if (decl == error_mark_node)
1832 make_shared = true;
1833 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1834 inner_type = TREE_TYPE (inner_type);
1835
1836 /* We're interested in the base element, not arrays. */
1837 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1838 inner_type = TREE_TYPE (inner_type);
1839
1840 /* Check for special function availability by building a call to one.
1841 Save the results, because later we won't be in the right context
1842 for making these queries. */
1843 if (!make_shared
1844 && CLASS_TYPE_P (inner_type)
1845 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1846 make_shared = true;
1847
1848 if (make_shared)
1849 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1850 }
1851
1852 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1853 disregarded in OpenMP construct, because it is going to be
1854 remapped during OpenMP lowering. SHARED is true if DECL
1855 is going to be shared, false if it is going to be privatized. */
1856
1857 bool
1858 cxx_omp_disregard_value_expr (tree decl, bool shared)
1859 {
1860 return !shared
1861 && VAR_P (decl)
1862 && DECL_HAS_VALUE_EXPR_P (decl)
1863 && DECL_ARTIFICIAL (decl)
1864 && DECL_LANG_SPECIFIC (decl)
1865 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1866 }
1867
1868 /* Perform folding on expression X. */
1869
1870 tree
1871 cp_fully_fold (tree x)
1872 {
1873 return cp_fold (x);
1874 }
1875
1876 /* Fold expression X which is used as an rvalue if RVAL is true. */
1877
1878 static tree
1879 cp_fold_maybe_rvalue (tree x, bool rval)
1880 {
1881 if (rval && DECL_P (x))
1882 {
1883 tree v = decl_constant_value (x);
1884 if (v != error_mark_node)
1885 x = v;
1886 }
1887 return cp_fold (x);
1888 }
1889
1890 /* Fold expression X which is used as an rvalue. */
1891
1892 static tree
1893 cp_fold_rvalue (tree x)
1894 {
1895 return cp_fold_maybe_rvalue (x, true);
1896 }
1897
1898 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
1899 and certain changes are made to the folding done. Or should be (FIXME). We
1900 never touch maybe_const, as it is only used for the C front-end
1901 C_MAYBE_CONST_EXPR. */
1902
1903 tree
1904 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
1905 {
1906 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
1907 INTEGER_CST. */
1908 return cp_fold_rvalue (x);
1909 }
1910
1911 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
1912
1913 /* Dispose of the whole FOLD_CACHE. */
1914
1915 void
1916 clear_fold_cache (void)
1917 {
1918 if (fold_cache != NULL)
1919 fold_cache->empty ();
1920 }
1921
1922 /* This function tries to fold an expression X.
1923 To avoid combinatorial explosion, folding results are kept in fold_cache.
1924 If we are processing a template or X is invalid, we don't fold at all.
1925 For performance reasons we don't cache expressions representing a
1926 declaration or constant.
1927 Function returns X or its folded variant. */
1928
1929 static tree
1930 cp_fold (tree x)
1931 {
1932 tree op0, op1, op2, op3;
1933 tree org_x = x, r = NULL_TREE;
1934 enum tree_code code;
1935 location_t loc;
1936 bool rval_ops = true;
1937
1938 if (!x || x == error_mark_node)
1939 return x;
1940
1941 if (processing_template_decl
1942 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
1943 return x;
1944
1945 /* Don't bother to cache DECLs or constants. */
1946 if (DECL_P (x) || CONSTANT_CLASS_P (x))
1947 return x;
1948
1949 if (fold_cache == NULL)
1950 fold_cache = hash_map<tree, tree>::create_ggc (101);
1951
1952 if (tree *cached = fold_cache->get (x))
1953 return *cached;
1954
1955 code = TREE_CODE (x);
1956 switch (code)
1957 {
1958 case SIZEOF_EXPR:
1959 x = fold_sizeof_expr (x);
1960 break;
1961
1962 case VIEW_CONVERT_EXPR:
1963 rval_ops = false;
1964 case CONVERT_EXPR:
1965 case NOP_EXPR:
1966 case NON_LVALUE_EXPR:
1967
1968 if (VOID_TYPE_P (TREE_TYPE (x)))
1969 return x;
1970
1971 loc = EXPR_LOCATION (x);
1972 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
1973
1974 if (code == CONVERT_EXPR
1975 && SCALAR_TYPE_P (TREE_TYPE (x))
1976 && op0 != void_node)
1977 /* During parsing we used convert_to_*_nofold; re-convert now using the
1978 folding variants, since fold() doesn't do those transformations. */
1979 x = fold (convert (TREE_TYPE (x), op0));
1980 else if (op0 != TREE_OPERAND (x, 0))
1981 {
1982 if (op0 == error_mark_node)
1983 x = error_mark_node;
1984 else
1985 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
1986 }
1987 else
1988 x = fold (x);
1989
1990 /* Conversion of an out-of-range value has implementation-defined
1991 behavior; the language considers it different from arithmetic
1992 overflow, which is undefined. */
1993 if (TREE_CODE (op0) == INTEGER_CST
1994 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
1995 TREE_OVERFLOW (x) = false;
1996
1997 break;
1998
1999 case ADDR_EXPR:
2000 case REALPART_EXPR:
2001 case IMAGPART_EXPR:
2002 rval_ops = false;
2003 case CONJ_EXPR:
2004 case FIX_TRUNC_EXPR:
2005 case FLOAT_EXPR:
2006 case NEGATE_EXPR:
2007 case ABS_EXPR:
2008 case BIT_NOT_EXPR:
2009 case TRUTH_NOT_EXPR:
2010 case FIXED_CONVERT_EXPR:
2011 case INDIRECT_REF:
2012
2013 loc = EXPR_LOCATION (x);
2014 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2015
2016 if (op0 != TREE_OPERAND (x, 0))
2017 {
2018 if (op0 == error_mark_node)
2019 x = error_mark_node;
2020 else
2021 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2022 }
2023 else
2024 x = fold (x);
2025
2026 gcc_assert (TREE_CODE (x) != COND_EXPR
2027 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2028 break;
2029
2030 case UNARY_PLUS_EXPR:
2031 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2032 if (op0 == error_mark_node)
2033 x = error_mark_node;
2034 else
2035 x = fold_convert (TREE_TYPE (x), op0);
2036 break;
2037
2038 case POSTDECREMENT_EXPR:
2039 case POSTINCREMENT_EXPR:
2040 case INIT_EXPR:
2041 case PREDECREMENT_EXPR:
2042 case PREINCREMENT_EXPR:
2043 case COMPOUND_EXPR:
2044 case MODIFY_EXPR:
2045 rval_ops = false;
2046 case POINTER_PLUS_EXPR:
2047 case PLUS_EXPR:
2048 case MINUS_EXPR:
2049 case MULT_EXPR:
2050 case TRUNC_DIV_EXPR:
2051 case CEIL_DIV_EXPR:
2052 case FLOOR_DIV_EXPR:
2053 case ROUND_DIV_EXPR:
2054 case TRUNC_MOD_EXPR:
2055 case CEIL_MOD_EXPR:
2056 case ROUND_MOD_EXPR:
2057 case RDIV_EXPR:
2058 case EXACT_DIV_EXPR:
2059 case MIN_EXPR:
2060 case MAX_EXPR:
2061 case LSHIFT_EXPR:
2062 case RSHIFT_EXPR:
2063 case LROTATE_EXPR:
2064 case RROTATE_EXPR:
2065 case BIT_AND_EXPR:
2066 case BIT_IOR_EXPR:
2067 case BIT_XOR_EXPR:
2068 case TRUTH_AND_EXPR:
2069 case TRUTH_ANDIF_EXPR:
2070 case TRUTH_OR_EXPR:
2071 case TRUTH_ORIF_EXPR:
2072 case TRUTH_XOR_EXPR:
2073 case LT_EXPR: case LE_EXPR:
2074 case GT_EXPR: case GE_EXPR:
2075 case EQ_EXPR: case NE_EXPR:
2076 case UNORDERED_EXPR: case ORDERED_EXPR:
2077 case UNLT_EXPR: case UNLE_EXPR:
2078 case UNGT_EXPR: case UNGE_EXPR:
2079 case UNEQ_EXPR: case LTGT_EXPR:
2080 case RANGE_EXPR: case COMPLEX_EXPR:
2081
2082 loc = EXPR_LOCATION (x);
2083 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2084 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2085
2086 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2087 {
2088 if (op0 == error_mark_node || op1 == error_mark_node)
2089 x = error_mark_node;
2090 else
2091 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2092 }
2093 else
2094 x = fold (x);
2095
2096 if (TREE_NO_WARNING (org_x)
2097 && warn_nonnull_compare
2098 && COMPARISON_CLASS_P (org_x))
2099 {
2100 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2101 ;
2102 else if (COMPARISON_CLASS_P (x))
2103 TREE_NO_WARNING (x) = 1;
2104 /* Otherwise give up on optimizing these, let GIMPLE folders
2105 optimize those later on. */
2106 else if (op0 != TREE_OPERAND (org_x, 0)
2107 || op1 != TREE_OPERAND (org_x, 1))
2108 {
2109 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2110 TREE_NO_WARNING (x) = 1;
2111 }
2112 else
2113 x = org_x;
2114 }
2115 break;
2116
2117 case VEC_COND_EXPR:
2118 case COND_EXPR:
2119
2120 /* Don't bother folding a void condition, since it can't produce a
2121 constant value. Also, some statement-level uses of COND_EXPR leave
2122 one of the branches NULL, so folding would crash. */
2123 if (VOID_TYPE_P (TREE_TYPE (x)))
2124 return x;
2125
2126 loc = EXPR_LOCATION (x);
2127 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2128 op1 = cp_fold (TREE_OPERAND (x, 1));
2129 op2 = cp_fold (TREE_OPERAND (x, 2));
2130
2131 if (op0 != TREE_OPERAND (x, 0)
2132 || op1 != TREE_OPERAND (x, 1)
2133 || op2 != TREE_OPERAND (x, 2))
2134 {
2135 if (op0 == error_mark_node
2136 || op1 == error_mark_node
2137 || op2 == error_mark_node)
2138 x = error_mark_node;
2139 else
2140 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2141 }
2142 else
2143 x = fold (x);
2144
2145 /* A COND_EXPR might have incompatible types in branches if one or both
2146 arms are bitfields. If folding exposed such a branch, fix it up. */
2147 if (TREE_CODE (x) != code)
2148 if (tree type = is_bitfield_expr_with_lowered_type (x))
2149 x = fold_convert (type, x);
2150
2151 break;
2152
2153 case CALL_EXPR:
2154 {
2155 int i, m, sv = optimize, nw = sv, changed = 0;
2156 tree callee = get_callee_fndecl (x);
2157
2158 /* Some built-in function calls will be evaluated at compile-time in
2159 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2160 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2161 if (callee && DECL_BUILT_IN (callee) && !optimize
2162 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2163 && current_function_decl
2164 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2165 nw = 1;
2166
2167 x = copy_node (x);
2168
2169 m = call_expr_nargs (x);
2170 for (i = 0; i < m; i++)
2171 {
2172 r = cp_fold (CALL_EXPR_ARG (x, i));
2173 if (r != CALL_EXPR_ARG (x, i))
2174 {
2175 if (r == error_mark_node)
2176 {
2177 x = error_mark_node;
2178 break;
2179 }
2180 changed = 1;
2181 }
2182 CALL_EXPR_ARG (x, i) = r;
2183 }
2184 if (x == error_mark_node)
2185 break;
2186
2187 optimize = nw;
2188 r = fold (x);
2189 optimize = sv;
2190
2191 if (TREE_CODE (r) != CALL_EXPR)
2192 {
2193 x = cp_fold (r);
2194 break;
2195 }
2196
2197 optimize = nw;
2198
2199 /* Invoke maybe_constant_value for functions declared
2200 constexpr and not called with AGGR_INIT_EXPRs.
2201 TODO:
2202 Do constexpr expansion of expressions where the call itself is not
2203 constant, but the call followed by an INDIRECT_REF is. */
2204 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2205 && !flag_no_inline)
2206 r = maybe_constant_value (x);
2207 optimize = sv;
2208
2209 if (TREE_CODE (r) != CALL_EXPR)
2210 {
2211 x = r;
2212 break;
2213 }
2214
2215 if (!changed)
2216 x = org_x;
2217 break;
2218 }
2219
2220 case CONSTRUCTOR:
2221 {
2222 unsigned i;
2223 constructor_elt *p;
2224 bool changed = false;
2225 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2226 vec<constructor_elt, va_gc> *nelts = NULL;
2227 vec_safe_reserve (nelts, vec_safe_length (elts));
2228 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2229 {
2230 tree op = cp_fold (p->value);
2231 constructor_elt e = { p->index, op };
2232 nelts->quick_push (e);
2233 if (op != p->value)
2234 {
2235 if (op == error_mark_node)
2236 {
2237 x = error_mark_node;
2238 changed = false;
2239 break;
2240 }
2241 changed = true;
2242 }
2243 }
2244 if (changed)
2245 x = build_constructor (TREE_TYPE (x), nelts);
2246 else
2247 vec_free (nelts);
2248 break;
2249 }
2250 case TREE_VEC:
2251 {
2252 bool changed = false;
2253 vec<tree, va_gc> *vec = make_tree_vector ();
2254 int i, n = TREE_VEC_LENGTH (x);
2255 vec_safe_reserve (vec, n);
2256
2257 for (i = 0; i < n; i++)
2258 {
2259 tree op = cp_fold (TREE_VEC_ELT (x, i));
2260 vec->quick_push (op);
2261 if (op != TREE_VEC_ELT (x, i))
2262 changed = true;
2263 }
2264
2265 if (changed)
2266 {
2267 r = copy_node (x);
2268 for (i = 0; i < n; i++)
2269 TREE_VEC_ELT (r, i) = (*vec)[i];
2270 x = r;
2271 }
2272
2273 release_tree_vector (vec);
2274 }
2275
2276 break;
2277
2278 case ARRAY_REF:
2279 case ARRAY_RANGE_REF:
2280
2281 loc = EXPR_LOCATION (x);
2282 op0 = cp_fold (TREE_OPERAND (x, 0));
2283 op1 = cp_fold (TREE_OPERAND (x, 1));
2284 op2 = cp_fold (TREE_OPERAND (x, 2));
2285 op3 = cp_fold (TREE_OPERAND (x, 3));
2286
2287 if (op0 != TREE_OPERAND (x, 0)
2288 || op1 != TREE_OPERAND (x, 1)
2289 || op2 != TREE_OPERAND (x, 2)
2290 || op3 != TREE_OPERAND (x, 3))
2291 {
2292 if (op0 == error_mark_node
2293 || op1 == error_mark_node
2294 || op2 == error_mark_node
2295 || op3 == error_mark_node)
2296 x = error_mark_node;
2297 else
2298 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2299 }
2300
2301 x = fold (x);
2302 break;
2303
2304 default:
2305 return org_x;
2306 }
2307
2308 fold_cache->put (org_x, x);
2309 /* Prevent that we try to fold an already folded result again. */
2310 if (x != org_x)
2311 fold_cache->put (x, x);
2312
2313 return x;
2314 }
2315
2316 #include "gt-cp-cp-gimplify.h"