1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
35 #include "cp-cilkplus.h"
37 /* Forward declarations. */
39 static tree
cp_genericize_r (tree
*, int *, void *);
40 static tree
cp_fold_r (tree
*, int *, void *);
41 static void cp_genericize_tree (tree
*);
42 static tree
cp_fold (tree
);
44 /* Local declarations. */
46 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
48 /* Stack of labels which are targets for "break" or "continue",
49 linked through TREE_CHAIN. */
50 static tree bc_label
[2];
52 /* Begin a scope which can be exited by a break or continue statement. BC
55 Just creates a label with location LOCATION and pushes it into the current
59 begin_bc_block (enum bc_t bc
, location_t location
)
61 tree label
= create_artificial_label (location
);
62 DECL_CHAIN (label
) = bc_label
[bc
];
65 LABEL_DECL_BREAK (label
) = true;
67 LABEL_DECL_CONTINUE (label
) = true;
71 /* Finish a scope which can be exited by a break or continue statement.
72 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
73 an expression for the contents of the scope.
75 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
76 BLOCK. Otherwise, just forget the label. */
79 finish_bc_block (tree
*block
, enum bc_t bc
, tree label
)
81 gcc_assert (label
== bc_label
[bc
]);
83 if (TREE_USED (label
))
84 append_to_statement_list (build1 (LABEL_EXPR
, void_type_node
, label
),
87 bc_label
[bc
] = DECL_CHAIN (label
);
88 DECL_CHAIN (label
) = NULL_TREE
;
91 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
92 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
93 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
94 of gimplify_cilk_spawn. */
97 cilk_cp_gimplify_call_params_in_spawned_fn (tree
*expr_p
, gimple_seq
*pre_p
,
102 cilk_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
);
103 if (TREE_CODE (*expr_p
) == AGGR_INIT_EXPR
)
104 for (ii
= 0; ii
< aggr_init_expr_nargs (*expr_p
); ii
++)
105 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p
, ii
), pre_p
, post_p
,
106 is_gimple_reg
, fb_rvalue
);
110 /* Get the LABEL_EXPR to represent a break or continue statement
111 in the current block scope. BC indicates which. */
114 get_bc_label (enum bc_t bc
)
116 tree label
= bc_label
[bc
];
118 /* Mark the label used for finish_bc_block. */
119 TREE_USED (label
) = 1;
123 /* Genericize a TRY_BLOCK. */
126 genericize_try_block (tree
*stmt_p
)
128 tree body
= TRY_STMTS (*stmt_p
);
129 tree cleanup
= TRY_HANDLERS (*stmt_p
);
131 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
134 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
137 genericize_catch_block (tree
*stmt_p
)
139 tree type
= HANDLER_TYPE (*stmt_p
);
140 tree body
= HANDLER_BODY (*stmt_p
);
142 /* FIXME should the caught type go in TREE_TYPE? */
143 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
146 /* A terser interface for building a representation of an exception
150 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
154 /* FIXME should the allowed types go in TREE_TYPE? */
155 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
156 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
158 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
159 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
164 /* Genericize an EH_SPEC_BLOCK by converting it to a
165 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
168 genericize_eh_spec_block (tree
*stmt_p
)
170 tree body
= EH_SPEC_STMTS (*stmt_p
);
171 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
172 tree failure
= build_call_n (call_unexpected_node
, 1, build_exc_ptr ());
174 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
175 TREE_NO_WARNING (*stmt_p
) = true;
176 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
179 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
182 genericize_if_stmt (tree
*stmt_p
)
184 tree stmt
, cond
, then_
, else_
;
185 location_t locus
= EXPR_LOCATION (*stmt_p
);
188 cond
= IF_COND (stmt
);
189 then_
= THEN_CLAUSE (stmt
);
190 else_
= ELSE_CLAUSE (stmt
);
193 then_
= build_empty_stmt (locus
);
195 else_
= build_empty_stmt (locus
);
197 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
199 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
202 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
203 if (!EXPR_HAS_LOCATION (stmt
))
204 protected_set_expr_location (stmt
, locus
);
208 /* Build a generic representation of one of the C loop forms. COND is the
209 loop condition or NULL_TREE. BODY is the (possibly compound) statement
210 controlled by the loop. INCR is the increment expression of a for-loop,
211 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
212 evaluated before the loop body as in while and for loops, or after the
213 loop body as in do-while loops. */
216 genericize_cp_loop (tree
*stmt_p
, location_t start_locus
, tree cond
, tree body
,
217 tree incr
, bool cond_is_first
, int *walk_subtrees
,
222 tree stmt_list
= NULL
;
224 blab
= begin_bc_block (bc_break
, start_locus
);
225 clab
= begin_bc_block (bc_continue
, start_locus
);
227 protected_set_expr_location (incr
, start_locus
);
229 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
230 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
231 cp_walk_tree (&incr
, cp_genericize_r
, data
, NULL
);
234 if (cond
&& TREE_CODE (cond
) != INTEGER_CST
)
236 /* If COND is constant, don't bother building an exit. If it's false,
237 we won't build a loop. If it's true, any exits are in the body. */
238 location_t cloc
= EXPR_LOC_OR_LOC (cond
, start_locus
);
239 exit
= build1_loc (cloc
, GOTO_EXPR
, void_type_node
,
240 get_bc_label (bc_break
));
241 exit
= fold_build3_loc (cloc
, COND_EXPR
, void_type_node
, cond
,
242 build_empty_stmt (cloc
), exit
);
245 if (exit
&& cond_is_first
)
246 append_to_statement_list (exit
, &stmt_list
);
247 append_to_statement_list (body
, &stmt_list
);
248 finish_bc_block (&stmt_list
, bc_continue
, clab
);
249 append_to_statement_list (incr
, &stmt_list
);
250 if (exit
&& !cond_is_first
)
251 append_to_statement_list (exit
, &stmt_list
);
254 stmt_list
= build_empty_stmt (start_locus
);
257 if (cond
&& integer_zerop (cond
))
260 loop
= fold_build3_loc (start_locus
, COND_EXPR
,
261 void_type_node
, cond
, stmt_list
,
262 build_empty_stmt (start_locus
));
268 location_t loc
= start_locus
;
269 if (!cond
|| integer_nonzerop (cond
))
270 loc
= EXPR_LOCATION (expr_first (body
));
271 if (loc
== UNKNOWN_LOCATION
)
273 loop
= build1_loc (loc
, LOOP_EXPR
, void_type_node
, stmt_list
);
277 append_to_statement_list (loop
, &stmt_list
);
278 finish_bc_block (&stmt_list
, bc_break
, blab
);
280 stmt_list
= build_empty_stmt (start_locus
);
285 /* Genericize a FOR_STMT node *STMT_P. */
288 genericize_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
293 tree init
= FOR_INIT_STMT (stmt
);
297 cp_walk_tree (&init
, cp_genericize_r
, data
, NULL
);
298 append_to_statement_list (init
, &expr
);
301 genericize_cp_loop (&loop
, EXPR_LOCATION (stmt
), FOR_COND (stmt
),
302 FOR_BODY (stmt
), FOR_EXPR (stmt
), 1, walk_subtrees
, data
);
303 append_to_statement_list (loop
, &expr
);
304 if (expr
== NULL_TREE
)
309 /* Genericize a WHILE_STMT node *STMT_P. */
312 genericize_while_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
315 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), WHILE_COND (stmt
),
316 WHILE_BODY (stmt
), NULL_TREE
, 1, walk_subtrees
, data
);
319 /* Genericize a DO_STMT node *STMT_P. */
322 genericize_do_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
325 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), DO_COND (stmt
),
326 DO_BODY (stmt
), NULL_TREE
, 0, walk_subtrees
, data
);
329 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
332 genericize_switch_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
335 tree break_block
, body
, cond
, type
;
336 location_t stmt_locus
= EXPR_LOCATION (stmt
);
338 break_block
= begin_bc_block (bc_break
, stmt_locus
);
340 body
= SWITCH_STMT_BODY (stmt
);
342 body
= build_empty_stmt (stmt_locus
);
343 cond
= SWITCH_STMT_COND (stmt
);
344 type
= SWITCH_STMT_TYPE (stmt
);
346 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
347 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
348 cp_walk_tree (&type
, cp_genericize_r
, data
, NULL
);
351 *stmt_p
= build3_loc (stmt_locus
, SWITCH_EXPR
, type
, cond
, body
, NULL_TREE
);
352 finish_bc_block (stmt_p
, bc_break
, break_block
);
355 /* Genericize a CONTINUE_STMT node *STMT_P. */
358 genericize_continue_stmt (tree
*stmt_p
)
360 tree stmt_list
= NULL
;
361 tree pred
= build_predict_expr (PRED_CONTINUE
, NOT_TAKEN
);
362 tree label
= get_bc_label (bc_continue
);
363 location_t location
= EXPR_LOCATION (*stmt_p
);
364 tree jump
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
365 append_to_statement_list_force (pred
, &stmt_list
);
366 append_to_statement_list (jump
, &stmt_list
);
370 /* Genericize a BREAK_STMT node *STMT_P. */
373 genericize_break_stmt (tree
*stmt_p
)
375 tree label
= get_bc_label (bc_break
);
376 location_t location
= EXPR_LOCATION (*stmt_p
);
377 *stmt_p
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
380 /* Genericize a OMP_FOR node *STMT_P. */
383 genericize_omp_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
386 location_t locus
= EXPR_LOCATION (stmt
);
387 tree clab
= begin_bc_block (bc_continue
, locus
);
389 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_genericize_r
, data
, NULL
);
390 if (TREE_CODE (stmt
) != OMP_TASKLOOP
)
391 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
392 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_genericize_r
, data
, NULL
);
393 cp_walk_tree (&OMP_FOR_COND (stmt
), cp_genericize_r
, data
, NULL
);
394 cp_walk_tree (&OMP_FOR_INCR (stmt
), cp_genericize_r
, data
, NULL
);
395 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_genericize_r
, data
, NULL
);
398 finish_bc_block (&OMP_FOR_BODY (stmt
), bc_continue
, clab
);
401 /* Hook into the middle of gimplifying an OMP_FOR node. */
403 static enum gimplify_status
404 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
406 tree for_stmt
= *expr_p
;
407 gimple_seq seq
= NULL
;
409 /* Protect ourselves from recursion. */
410 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
412 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
414 gimplify_and_add (for_stmt
, &seq
);
415 gimple_seq_add_seq (pre_p
, seq
);
417 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
422 /* Gimplify an EXPR_STMT node. */
425 gimplify_expr_stmt (tree
*stmt_p
)
427 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
429 if (stmt
== error_mark_node
)
432 /* Gimplification of a statement expression will nullify the
433 statement if all its side effects are moved to *PRE_P and *POST_P.
435 In this case we will not want to emit the gimplified statement.
436 However, we may still want to emit a warning, so we do that before
438 if (stmt
&& warn_unused_value
)
440 if (!TREE_SIDE_EFFECTS (stmt
))
442 if (!IS_EMPTY_STMT (stmt
)
443 && !VOID_TYPE_P (TREE_TYPE (stmt
))
444 && !TREE_NO_WARNING (stmt
))
445 warning (OPT_Wunused_value
, "statement with no effect");
448 warn_if_unused_value (stmt
, input_location
);
451 if (stmt
== NULL_TREE
)
452 stmt
= alloc_stmt_list ();
457 /* Gimplify initialization from an AGGR_INIT_EXPR. */
460 cp_gimplify_init_expr (tree
*expr_p
)
462 tree from
= TREE_OPERAND (*expr_p
, 1);
463 tree to
= TREE_OPERAND (*expr_p
, 0);
466 /* What about code that pulls out the temp and uses it elsewhere? I
467 think that such code never uses the TARGET_EXPR as an initializer. If
468 I'm wrong, we'll abort because the temp won't have any RTL. In that
469 case, I guess we'll need to replace references somehow. */
470 if (TREE_CODE (from
) == TARGET_EXPR
)
471 from
= TARGET_EXPR_INITIAL (from
);
473 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
474 inside the TARGET_EXPR. */
477 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
479 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
480 replace the slot operand with our target.
482 Should we add a target parm to gimplify_expr instead? No, as in this
483 case we want to replace the INIT_EXPR. */
484 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
485 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
487 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
488 AGGR_INIT_EXPR_SLOT (sub
) = to
;
490 VEC_INIT_EXPR_SLOT (sub
) = to
;
493 /* The initialization is now a side-effect, so the container can
496 TREE_TYPE (from
) = void_type_node
;
499 if (cxx_dialect
>= cxx14
&& TREE_CODE (sub
) == CONSTRUCTOR
)
500 /* Handle aggregate NSDMI. */
501 replace_placeholders (sub
, to
);
506 t
= TREE_OPERAND (t
, 1);
511 /* Gimplify a MUST_NOT_THROW_EXPR. */
513 static enum gimplify_status
514 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
517 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
518 tree body
= TREE_OPERAND (stmt
, 0);
519 gimple_seq try_
= NULL
;
520 gimple_seq catch_
= NULL
;
523 gimplify_and_add (body
, &try_
);
524 mnt
= gimple_build_eh_must_not_throw (terminate_node
);
525 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
526 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
528 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
539 /* Return TRUE if an operand (OP) of a given TYPE being copied is
540 really just an empty class copy.
542 Check that the operand has a simple form so that TARGET_EXPRs and
543 non-empty CONSTRUCTORs get reduced properly, and we leave the
544 return slot optimization alone because it isn't a copy. */
547 simple_empty_class_p (tree type
, tree op
)
550 ((TREE_CODE (op
) == COMPOUND_EXPR
551 && simple_empty_class_p (type
, TREE_OPERAND (op
, 1)))
552 || is_gimple_lvalue (op
)
553 || INDIRECT_REF_P (op
)
554 || (TREE_CODE (op
) == CONSTRUCTOR
555 && CONSTRUCTOR_NELTS (op
) == 0
556 && !TREE_CLOBBER_P (op
))
557 || (TREE_CODE (op
) == CALL_EXPR
558 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
559 && is_really_empty_class (type
);
562 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
565 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
567 int saved_stmts_are_full_exprs_p
= 0;
568 enum tree_code code
= TREE_CODE (*expr_p
);
569 enum gimplify_status ret
;
571 if (STATEMENT_CODE_P (code
))
573 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
574 current_stmt_tree ()->stmts_are_full_exprs_p
575 = STMT_IS_FULL_EXPR_P (*expr_p
);
581 simplify_aggr_init_expr (expr_p
);
587 location_t loc
= input_location
;
588 tree init
= VEC_INIT_EXPR_INIT (*expr_p
);
589 int from_array
= (init
&& TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
590 gcc_assert (EXPR_HAS_LOCATION (*expr_p
));
591 input_location
= EXPR_LOCATION (*expr_p
);
592 *expr_p
= build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p
), NULL_TREE
,
593 init
, VEC_INIT_EXPR_VALUE_INIT (*expr_p
),
595 tf_warning_or_error
);
596 cp_genericize_tree (expr_p
);
598 input_location
= loc
;
603 /* FIXME communicate throw type to back end, probably by moving
604 THROW_EXPR into ../tree.def. */
605 *expr_p
= TREE_OPERAND (*expr_p
, 0);
609 case MUST_NOT_THROW_EXPR
:
610 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
613 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
614 LHS of an assignment might also be involved in the RHS, as in bug
617 if (fn_contains_cilk_spawn_p (cfun
))
619 if (cilk_cp_detect_spawn_and_unwrap (expr_p
))
621 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
,
623 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
625 if (seen_error () && contains_cilk_spawn_stmt (*expr_p
))
629 cp_gimplify_init_expr (expr_p
);
630 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
632 /* Otherwise fall through. */
636 if (fn_contains_cilk_spawn_p (cfun
)
637 && cilk_cp_detect_spawn_and_unwrap (expr_p
)
640 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
641 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
643 /* If the back end isn't clever enough to know that the lhs and rhs
644 types are the same, add an explicit conversion. */
645 tree op0
= TREE_OPERAND (*expr_p
, 0);
646 tree op1
= TREE_OPERAND (*expr_p
, 1);
648 if (!error_operand_p (op0
)
649 && !error_operand_p (op1
)
650 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
651 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
652 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
653 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
654 TREE_TYPE (op0
), op1
);
656 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
))
658 /* Remove any copies of empty classes. Also drop volatile
659 variables on the RHS to avoid infinite recursion from
660 gimplify_expr trying to load the value. */
661 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
662 is_gimple_lvalue
, fb_lvalue
);
663 if (TREE_SIDE_EFFECTS (op1
))
665 if (TREE_THIS_VOLATILE (op1
)
666 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
667 op1
= build_fold_addr_expr (op1
);
669 gimplify_and_add (op1
, pre_p
);
671 *expr_p
= TREE_OPERAND (*expr_p
, 0);
677 case EMPTY_CLASS_EXPR
:
678 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
679 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
684 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
689 genericize_try_block (expr_p
);
694 genericize_catch_block (expr_p
);
699 genericize_eh_spec_block (expr_p
);
718 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
722 gimplify_expr_stmt (expr_p
);
726 case UNARY_PLUS_EXPR
:
728 tree arg
= TREE_OPERAND (*expr_p
, 0);
729 tree type
= TREE_TYPE (*expr_p
);
730 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
736 case CILK_SPAWN_STMT
:
737 gcc_assert(fn_contains_cilk_spawn_p (cfun
)
738 && cilk_cp_detect_spawn_and_unwrap (expr_p
));
742 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
743 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
748 if (fn_contains_cilk_spawn_p (cfun
)
749 && cilk_cp_detect_spawn_and_unwrap (expr_p
)
752 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
753 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
755 /* DR 1030 says that we need to evaluate the elements of an
756 initializer-list in forward order even when it's used as arguments to
757 a constructor. So if the target wants to evaluate them in reverse
758 order and there's more than one argument other than 'this', gimplify
761 if (PUSH_ARGS_REVERSED
&& CALL_EXPR_LIST_INIT_P (*expr_p
)
762 && call_expr_nargs (*expr_p
) > 2)
764 int nargs
= call_expr_nargs (*expr_p
);
765 location_t loc
= EXPR_LOC_OR_LOC (*expr_p
, input_location
);
766 for (int i
= 1; i
< nargs
; ++i
)
768 enum gimplify_status t
769 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
);
777 if (TREE_OPERAND (*expr_p
, 0)
778 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
779 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
781 expr_p
= &TREE_OPERAND (*expr_p
, 0);
782 code
= TREE_CODE (*expr_p
);
783 /* Avoid going through the INIT_EXPR case, which can
784 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
785 goto modify_expr_case
;
790 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
794 /* Restore saved state. */
795 if (STATEMENT_CODE_P (code
))
796 current_stmt_tree ()->stmts_are_full_exprs_p
797 = saved_stmts_are_full_exprs_p
;
803 is_invisiref_parm (const_tree t
)
805 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
806 && DECL_BY_REFERENCE (t
));
809 /* Return true if the uid in both int tree maps are equal. */
812 cxx_int_tree_map_hasher::equal (cxx_int_tree_map
*a
, cxx_int_tree_map
*b
)
814 return (a
->uid
== b
->uid
);
817 /* Hash a UID in a cxx_int_tree_map. */
820 cxx_int_tree_map_hasher::hash (cxx_int_tree_map
*item
)
825 /* A stable comparison routine for use with splay trees and DECLs. */
828 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
833 return DECL_UID (a
) - DECL_UID (b
);
836 /* OpenMP context during genericization. */
838 struct cp_genericize_omp_taskreg
842 struct cp_genericize_omp_taskreg
*outer
;
843 splay_tree variables
;
846 /* Return true if genericization should try to determine if
847 DECL is firstprivate or shared within task regions. */
850 omp_var_to_track (tree decl
)
852 tree type
= TREE_TYPE (decl
);
853 if (is_invisiref_parm (decl
))
854 type
= TREE_TYPE (type
);
855 while (TREE_CODE (type
) == ARRAY_TYPE
)
856 type
= TREE_TYPE (type
);
857 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
859 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
861 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
866 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
869 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
871 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
872 (splay_tree_key
) decl
);
875 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
877 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
878 if (!omp_ctx
->default_shared
)
880 struct cp_genericize_omp_taskreg
*octx
;
882 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
884 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
885 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
887 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
890 if (octx
->is_parallel
)
894 && (TREE_CODE (decl
) == PARM_DECL
895 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
896 && DECL_CONTEXT (decl
) == current_function_decl
)))
897 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
898 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
900 /* DECL is implicitly determined firstprivate in
901 the current task construct. Ensure copy ctor and
902 dtor are instantiated, because during gimplification
903 it will be already too late. */
904 tree type
= TREE_TYPE (decl
);
905 if (is_invisiref_parm (decl
))
906 type
= TREE_TYPE (type
);
907 while (TREE_CODE (type
) == ARRAY_TYPE
)
908 type
= TREE_TYPE (type
);
909 get_copy_ctor (type
, tf_none
);
910 get_dtor (type
, tf_none
);
913 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
917 /* Genericization context. */
919 struct cp_genericize_data
921 hash_set
<tree
> *p_set
;
922 vec
<tree
> bind_expr_stack
;
923 struct cp_genericize_omp_taskreg
*omp_ctx
;
928 /* Perform any pre-gimplification folding of C++ front end trees to
930 Note: The folding of none-omp cases is something to move into
931 the middle-end. As for now we have most foldings only on GENERIC
932 in fold-const, we need to perform this before transformation to
936 cp_fold_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
941 *stmt_p
= stmt
= cp_fold (*stmt_p
);
943 if (((hash_set
<tree
> *) data
)->add (stmt
))
945 /* Don't walk subtrees of stmts we've already walked once, otherwise
946 we can have exponential complexity with e.g. lots of nested
947 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
948 always the same tree, which the first time cp_fold_r has been
949 called on it had the subtrees walked. */
954 code
= TREE_CODE (stmt
);
955 if (code
== OMP_FOR
|| code
== OMP_SIMD
|| code
== OMP_DISTRIBUTE
956 || code
== OMP_TASKLOOP
|| code
== CILK_FOR
|| code
== CILK_SIMD
957 || code
== OACC_LOOP
)
962 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_fold_r
, data
, NULL
);
963 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_fold_r
, data
, NULL
);
964 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_fold_r
, data
, NULL
);
965 x
= OMP_FOR_COND (stmt
);
966 if (x
&& TREE_CODE_CLASS (TREE_CODE (x
)) == tcc_comparison
)
968 cp_walk_tree (&TREE_OPERAND (x
, 0), cp_fold_r
, data
, NULL
);
969 cp_walk_tree (&TREE_OPERAND (x
, 1), cp_fold_r
, data
, NULL
);
971 else if (x
&& TREE_CODE (x
) == TREE_VEC
)
973 n
= TREE_VEC_LENGTH (x
);
974 for (i
= 0; i
< n
; i
++)
976 tree o
= TREE_VEC_ELT (x
, i
);
977 if (o
&& TREE_CODE_CLASS (TREE_CODE (o
)) == tcc_comparison
)
978 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
981 x
= OMP_FOR_INCR (stmt
);
982 if (x
&& TREE_CODE (x
) == TREE_VEC
)
984 n
= TREE_VEC_LENGTH (x
);
985 for (i
= 0; i
< n
; i
++)
987 tree o
= TREE_VEC_ELT (x
, i
);
988 if (o
&& TREE_CODE (o
) == MODIFY_EXPR
)
989 o
= TREE_OPERAND (o
, 1);
990 if (o
&& (TREE_CODE (o
) == PLUS_EXPR
|| TREE_CODE (o
) == MINUS_EXPR
991 || TREE_CODE (o
) == POINTER_PLUS_EXPR
))
993 cp_walk_tree (&TREE_OPERAND (o
, 0), cp_fold_r
, data
, NULL
);
994 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
998 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_fold_r
, data
, NULL
);
1005 /* Fold ALL the trees! FIXME we should be able to remove this, but
1006 apparently that still causes optimization regressions. */
1009 cp_fold_function (tree fndecl
)
1011 hash_set
<tree
> pset
;
1012 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_fold_r
, &pset
, NULL
);
1015 /* Perform any pre-gimplification lowering of C++ front end trees to
1019 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1021 tree stmt
= *stmt_p
;
1022 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
1023 hash_set
<tree
> *p_set
= wtd
->p_set
;
1025 /* If in an OpenMP context, note var uses. */
1026 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
1028 || TREE_CODE (stmt
) == PARM_DECL
1029 || TREE_CODE (stmt
) == RESULT_DECL
)
1030 && omp_var_to_track (stmt
))
1031 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
1033 /* Don't dereference parms in a thunk, pass the references through. */
1034 if ((TREE_CODE (stmt
) == CALL_EXPR
&& CALL_FROM_THUNK_P (stmt
))
1035 || (TREE_CODE (stmt
) == AGGR_INIT_EXPR
&& AGGR_INIT_FROM_THUNK_P (stmt
)))
1041 /* Otherwise, do dereference invisible reference parms. */
1042 if (is_invisiref_parm (stmt
))
1044 *stmt_p
= convert_from_reference (stmt
);
1049 /* Map block scope extern declarations to visible declarations with the
1050 same name and type in outer scopes if any. */
1051 if (cp_function_chain
->extern_decl_map
1052 && VAR_OR_FUNCTION_DECL_P (stmt
)
1053 && DECL_EXTERNAL (stmt
))
1055 struct cxx_int_tree_map
*h
, in
;
1056 in
.uid
= DECL_UID (stmt
);
1057 h
= cp_function_chain
->extern_decl_map
->find_with_hash (&in
, in
.uid
);
1066 /* Other than invisiref parms, don't walk the same tree twice. */
1067 if (p_set
->contains (stmt
))
1073 if (TREE_CODE (stmt
) == ADDR_EXPR
1074 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1076 /* If in an OpenMP context, note var uses. */
1077 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
1078 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
1079 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
1080 *stmt_p
= fold_convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1083 else if (TREE_CODE (stmt
) == RETURN_EXPR
1084 && TREE_OPERAND (stmt
, 0)
1085 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1086 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1088 else if (TREE_CODE (stmt
) == OMP_CLAUSE
)
1089 switch (OMP_CLAUSE_CODE (stmt
))
1091 case OMP_CLAUSE_LASTPRIVATE
:
1092 /* Don't dereference an invisiref in OpenMP clauses. */
1093 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1096 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
1097 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
1098 cp_genericize_r
, data
, NULL
);
1101 case OMP_CLAUSE_PRIVATE
:
1102 /* Don't dereference an invisiref in OpenMP clauses. */
1103 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1105 else if (wtd
->omp_ctx
!= NULL
)
1107 /* Private clause doesn't cause any references to the
1108 var in outer contexts, avoid calling
1109 omp_cxx_notice_variable for it. */
1110 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1111 wtd
->omp_ctx
= NULL
;
1112 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1118 case OMP_CLAUSE_SHARED
:
1119 case OMP_CLAUSE_FIRSTPRIVATE
:
1120 case OMP_CLAUSE_COPYIN
:
1121 case OMP_CLAUSE_COPYPRIVATE
:
1122 /* Don't dereference an invisiref in OpenMP clauses. */
1123 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1126 case OMP_CLAUSE_REDUCTION
:
1127 /* Don't dereference an invisiref in reduction clause's
1128 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1129 still needs to be genericized. */
1130 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1133 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1134 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1135 cp_genericize_r
, data
, NULL
);
1136 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1137 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1138 cp_genericize_r
, data
, NULL
);
1144 else if (IS_TYPE_OR_DECL_P (stmt
))
1147 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1148 to lower this construct before scanning it, so we need to lower these
1149 before doing anything else. */
1150 else if (TREE_CODE (stmt
) == CLEANUP_STMT
)
1151 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1152 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1155 CLEANUP_BODY (stmt
),
1156 CLEANUP_EXPR (stmt
));
1158 else if (TREE_CODE (stmt
) == IF_STMT
)
1160 genericize_if_stmt (stmt_p
);
1161 /* *stmt_p has changed, tail recurse to handle it again. */
1162 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1165 /* COND_EXPR might have incompatible types in branches if one or both
1166 arms are bitfields. Fix it up now. */
1167 else if (TREE_CODE (stmt
) == COND_EXPR
)
1170 = (TREE_OPERAND (stmt
, 1)
1171 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1174 = (TREE_OPERAND (stmt
, 2)
1175 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1178 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1179 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1181 TREE_OPERAND (stmt
, 1)
1182 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1183 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1187 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1188 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1190 TREE_OPERAND (stmt
, 2)
1191 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1192 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1197 else if (TREE_CODE (stmt
) == BIND_EXPR
)
1199 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0))
1202 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1204 && !DECL_EXTERNAL (decl
)
1205 && omp_var_to_track (decl
))
1208 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1209 (splay_tree_key
) decl
);
1211 splay_tree_insert (wtd
->omp_ctx
->variables
,
1212 (splay_tree_key
) decl
,
1214 ? OMP_CLAUSE_DEFAULT_SHARED
1215 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1219 & (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1221 /* The point here is to not sanitize static initializers. */
1222 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1223 wtd
->no_sanitize_p
= true;
1224 for (tree decl
= BIND_EXPR_VARS (stmt
);
1226 decl
= DECL_CHAIN (decl
))
1228 && TREE_STATIC (decl
)
1229 && DECL_INITIAL (decl
))
1230 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1231 wtd
->no_sanitize_p
= no_sanitize_p
;
1233 wtd
->bind_expr_stack
.safe_push (stmt
);
1234 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1235 cp_genericize_r
, data
, NULL
);
1236 wtd
->bind_expr_stack
.pop ();
1239 else if (TREE_CODE (stmt
) == USING_STMT
)
1241 tree block
= NULL_TREE
;
1243 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1244 BLOCK, and append an IMPORTED_DECL to its
1245 BLOCK_VARS chained list. */
1246 if (wtd
->bind_expr_stack
.exists ())
1249 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1250 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1255 tree using_directive
;
1256 gcc_assert (TREE_OPERAND (stmt
, 0));
1258 using_directive
= make_node (IMPORTED_DECL
);
1259 TREE_TYPE (using_directive
) = void_type_node
;
1261 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
)
1262 = TREE_OPERAND (stmt
, 0);
1263 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1264 BLOCK_VARS (block
) = using_directive
;
1266 /* The USING_STMT won't appear in GENERIC. */
1267 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1271 else if (TREE_CODE (stmt
) == DECL_EXPR
1272 && TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1274 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1275 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1278 else if (TREE_CODE (stmt
) == DECL_EXPR
)
1280 tree d
= DECL_EXPR_DECL (stmt
);
1281 if (TREE_CODE (d
) == VAR_DECL
)
1282 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1284 else if (TREE_CODE (stmt
) == OMP_PARALLEL
1285 || TREE_CODE (stmt
) == OMP_TASK
1286 || TREE_CODE (stmt
) == OMP_TASKLOOP
)
1288 struct cp_genericize_omp_taskreg omp_ctx
;
1293 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1294 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1295 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1296 omp_ctx
.outer
= wtd
->omp_ctx
;
1297 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1298 wtd
->omp_ctx
= &omp_ctx
;
1299 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1300 switch (OMP_CLAUSE_CODE (c
))
1302 case OMP_CLAUSE_SHARED
:
1303 case OMP_CLAUSE_PRIVATE
:
1304 case OMP_CLAUSE_FIRSTPRIVATE
:
1305 case OMP_CLAUSE_LASTPRIVATE
:
1306 decl
= OMP_CLAUSE_DECL (c
);
1307 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1309 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1312 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1313 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1314 ? OMP_CLAUSE_DEFAULT_SHARED
1315 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1316 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
1318 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1320 case OMP_CLAUSE_DEFAULT
:
1321 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1322 omp_ctx
.default_shared
= true;
1326 if (TREE_CODE (stmt
) == OMP_TASKLOOP
)
1327 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1329 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1330 wtd
->omp_ctx
= omp_ctx
.outer
;
1331 splay_tree_delete (omp_ctx
.variables
);
1333 else if (TREE_CODE (stmt
) == TRY_BLOCK
)
1336 tree try_block
= wtd
->try_block
;
1337 wtd
->try_block
= stmt
;
1338 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1339 wtd
->try_block
= try_block
;
1340 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1342 else if (TREE_CODE (stmt
) == MUST_NOT_THROW_EXPR
)
1344 /* MUST_NOT_THROW_COND might be something else with TM. */
1345 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1348 tree try_block
= wtd
->try_block
;
1349 wtd
->try_block
= stmt
;
1350 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1351 wtd
->try_block
= try_block
;
1354 else if (TREE_CODE (stmt
) == THROW_EXPR
)
1356 location_t loc
= location_of (stmt
);
1357 if (TREE_NO_WARNING (stmt
))
1359 else if (wtd
->try_block
)
1361 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
1362 && warning_at (loc
, OPT_Wterminate
,
1363 "throw will always call terminate()")
1364 && cxx_dialect
>= cxx11
1365 && DECL_DESTRUCTOR_P (current_function_decl
))
1366 inform (loc
, "in C++11 destructors default to noexcept");
1370 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
1371 && DECL_DESTRUCTOR_P (current_function_decl
)
1372 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
1374 && (get_defaulted_eh_spec (current_function_decl
)
1375 == empty_except_spec
))
1376 warning_at (loc
, OPT_Wc__11_compat
,
1377 "in C++11 this throw will terminate because "
1378 "destructors default to noexcept");
1381 else if (TREE_CODE (stmt
) == CONVERT_EXPR
)
1382 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1383 else if (TREE_CODE (stmt
) == FOR_STMT
)
1384 genericize_for_stmt (stmt_p
, walk_subtrees
, data
);
1385 else if (TREE_CODE (stmt
) == WHILE_STMT
)
1386 genericize_while_stmt (stmt_p
, walk_subtrees
, data
);
1387 else if (TREE_CODE (stmt
) == DO_STMT
)
1388 genericize_do_stmt (stmt_p
, walk_subtrees
, data
);
1389 else if (TREE_CODE (stmt
) == SWITCH_STMT
)
1390 genericize_switch_stmt (stmt_p
, walk_subtrees
, data
);
1391 else if (TREE_CODE (stmt
) == CONTINUE_STMT
)
1392 genericize_continue_stmt (stmt_p
);
1393 else if (TREE_CODE (stmt
) == BREAK_STMT
)
1394 genericize_break_stmt (stmt_p
);
1395 else if (TREE_CODE (stmt
) == OMP_FOR
1396 || TREE_CODE (stmt
) == OMP_SIMD
1397 || TREE_CODE (stmt
) == OMP_DISTRIBUTE
)
1398 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1399 else if (TREE_CODE (stmt
) == PTRMEM_CST
)
1401 /* By the time we get here we're handing off to the back end, so we don't
1402 need or want to preserve PTRMEM_CST anymore. */
1403 *stmt_p
= cplus_expand_constant (stmt
);
1406 else if ((flag_sanitize
1407 & (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1408 && !wtd
->no_sanitize_p
)
1410 if ((flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1411 && TREE_CODE (stmt
) == NOP_EXPR
1412 && TREE_CODE (TREE_TYPE (stmt
)) == REFERENCE_TYPE
)
1413 ubsan_maybe_instrument_reference (stmt
);
1414 else if (TREE_CODE (stmt
) == CALL_EXPR
)
1416 tree fn
= CALL_EXPR_FN (stmt
);
1418 && !error_operand_p (fn
)
1419 && POINTER_TYPE_P (TREE_TYPE (fn
))
1420 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
1423 = TREE_CODE (fn
) == ADDR_EXPR
1424 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1425 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
1426 if (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1427 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
1428 if ((flag_sanitize
& SANITIZE_VPTR
) && !is_ctor
)
1429 cp_ubsan_maybe_instrument_member_call (stmt
);
1434 p_set
->add (*stmt_p
);
1439 /* Lower C++ front end trees to GENERIC in T_P. */
1442 cp_genericize_tree (tree
* t_p
)
1444 struct cp_genericize_data wtd
;
1446 wtd
.p_set
= new hash_set
<tree
>;
1447 wtd
.bind_expr_stack
.create (0);
1449 wtd
.try_block
= NULL_TREE
;
1450 wtd
.no_sanitize_p
= false;
1451 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1453 wtd
.bind_expr_stack
.release ();
1454 if (flag_sanitize
& SANITIZE_VPTR
)
1455 cp_ubsan_instrument_member_accesses (t_p
);
1458 /* If a function that should end with a return in non-void
1459 function doesn't obviously end with return, add ubsan
1460 instrumentation code to verify it at runtime. */
1463 cp_ubsan_maybe_instrument_return (tree fndecl
)
1465 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
1466 || DECL_CONSTRUCTOR_P (fndecl
)
1467 || DECL_DESTRUCTOR_P (fndecl
)
1468 || !targetm
.warn_func_return (fndecl
))
1471 tree t
= DECL_SAVED_TREE (fndecl
);
1474 switch (TREE_CODE (t
))
1477 t
= BIND_EXPR_BODY (t
);
1479 case TRY_FINALLY_EXPR
:
1480 t
= TREE_OPERAND (t
, 0);
1482 case STATEMENT_LIST
:
1484 tree_stmt_iterator i
= tsi_last (t
);
1501 t
= DECL_SAVED_TREE (fndecl
);
1502 if (TREE_CODE (t
) == BIND_EXPR
1503 && TREE_CODE (BIND_EXPR_BODY (t
)) == STATEMENT_LIST
)
1505 tree_stmt_iterator i
= tsi_last (BIND_EXPR_BODY (t
));
1506 t
= ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl
));
1507 tsi_link_after (&i
, t
, TSI_NEW_STMT
);
1512 cp_genericize (tree fndecl
)
1516 /* Fix up the types of parms passed by invisible reference. */
1517 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1518 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1520 /* If a function's arguments are copied to create a thunk,
1521 then DECL_BY_REFERENCE will be set -- but the type of the
1522 argument will be a pointer type, so we will never get
1524 gcc_assert (!DECL_BY_REFERENCE (t
));
1525 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1526 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1527 DECL_BY_REFERENCE (t
) = 1;
1528 TREE_ADDRESSABLE (t
) = 0;
1532 /* Do the same for the return value. */
1533 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1535 t
= DECL_RESULT (fndecl
);
1536 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1537 DECL_BY_REFERENCE (t
) = 1;
1538 TREE_ADDRESSABLE (t
) = 0;
1542 /* Adjust DECL_VALUE_EXPR of the original var. */
1543 tree outer
= outer_curly_brace_block (current_function_decl
);
1547 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1548 if (DECL_NAME (t
) == DECL_NAME (var
)
1549 && DECL_HAS_VALUE_EXPR_P (var
)
1550 && DECL_VALUE_EXPR (var
) == t
)
1552 tree val
= convert_from_reference (t
);
1553 SET_DECL_VALUE_EXPR (var
, val
);
1559 /* If we're a clone, the body is already GIMPLE. */
1560 if (DECL_CLONED_FUNCTION_P (fndecl
))
1563 /* Expand all the array notations here. */
1565 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl
)))
1566 DECL_SAVED_TREE (fndecl
) =
1567 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl
));
1569 /* We do want to see every occurrence of the parms, so we can't just use
1570 walk_tree's hash functionality. */
1571 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
));
1573 if (flag_sanitize
& SANITIZE_RETURN
1574 && do_ubsan_in_current_function ())
1575 cp_ubsan_maybe_instrument_return (fndecl
);
1577 /* Do everything else. */
1578 c_genericize (fndecl
);
1580 gcc_assert (bc_label
[bc_break
] == NULL
);
1581 gcc_assert (bc_label
[bc_continue
] == NULL
);
1584 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1585 NULL if there is in fact nothing to do. ARG2 may be null if FN
1586 actually only takes one argument. */
1589 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1591 tree defparm
, parm
, t
;
1599 nargs
= list_length (DECL_ARGUMENTS (fn
));
1600 argarray
= XALLOCAVEC (tree
, nargs
);
1602 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1604 defparm
= TREE_CHAIN (defparm
);
1606 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1608 tree inner_type
= TREE_TYPE (arg1
);
1609 tree start1
, end1
, p1
;
1610 tree start2
= NULL
, p2
= NULL
;
1611 tree ret
= NULL
, lab
;
1617 inner_type
= TREE_TYPE (inner_type
);
1618 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1619 size_zero_node
, NULL
, NULL
);
1621 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1622 size_zero_node
, NULL
, NULL
);
1624 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1625 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1627 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1629 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1630 end1
= fold_build_pointer_plus (start1
, end1
);
1632 p1
= create_tmp_var (TREE_TYPE (start1
));
1633 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1634 append_to_statement_list (t
, &ret
);
1638 p2
= create_tmp_var (TREE_TYPE (start2
));
1639 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
1640 append_to_statement_list (t
, &ret
);
1643 lab
= create_artificial_label (input_location
);
1644 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1645 append_to_statement_list (t
, &ret
);
1650 /* Handle default arguments. */
1651 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1652 parm
= TREE_CHAIN (parm
), i
++)
1653 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1654 TREE_PURPOSE (parm
), fn
, i
,
1655 tf_warning_or_error
);
1656 t
= build_call_a (fn
, i
, argarray
);
1657 t
= fold_convert (void_type_node
, t
);
1658 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1659 append_to_statement_list (t
, &ret
);
1661 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
1662 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1663 append_to_statement_list (t
, &ret
);
1667 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
1668 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1669 append_to_statement_list (t
, &ret
);
1672 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1673 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1674 append_to_statement_list (t
, &ret
);
1680 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1682 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1683 /* Handle default arguments. */
1684 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1685 parm
= TREE_CHAIN (parm
), i
++)
1686 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1687 TREE_PURPOSE (parm
),
1688 fn
, i
, tf_warning_or_error
);
1689 t
= build_call_a (fn
, i
, argarray
);
1690 t
= fold_convert (void_type_node
, t
);
1691 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1695 /* Return code to initialize DECL with its default constructor, or
1696 NULL if there's nothing to do. */
1699 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
1701 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1705 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1710 /* Return code to initialize DST with a copy constructor from SRC. */
1713 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1715 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1719 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1721 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1726 /* Similarly, except use an assignment operator instead. */
1729 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1731 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1735 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1737 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1742 /* Return code to destroy DECL. */
1745 cxx_omp_clause_dtor (tree clause
, tree decl
)
1747 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1751 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1756 /* True if OpenMP should privatize what this DECL points to rather
1757 than the DECL itself. */
1760 cxx_omp_privatize_by_reference (const_tree decl
)
1762 return (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
1763 || is_invisiref_parm (decl
));
1766 /* Return true if DECL is const qualified var having no mutable member. */
1768 cxx_omp_const_qual_no_mutable (tree decl
)
1770 tree type
= TREE_TYPE (decl
);
1771 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1773 if (!is_invisiref_parm (decl
))
1775 type
= TREE_TYPE (type
);
1777 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1779 /* NVR doesn't preserve const qualification of the
1781 tree outer
= outer_curly_brace_block (current_function_decl
);
1785 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1786 if (DECL_NAME (decl
) == DECL_NAME (var
)
1787 && (TYPE_MAIN_VARIANT (type
)
1788 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1790 if (TYPE_READONLY (TREE_TYPE (var
)))
1791 type
= TREE_TYPE (var
);
1797 if (type
== error_mark_node
)
1800 /* Variables with const-qualified type having no mutable member
1801 are predetermined shared. */
1802 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1808 /* True if OpenMP sharing attribute of DECL is predetermined. */
1810 enum omp_clause_default_kind
1811 cxx_omp_predetermined_sharing (tree decl
)
1813 /* Static data members are predetermined shared. */
1814 if (TREE_STATIC (decl
))
1816 tree ctx
= CP_DECL_CONTEXT (decl
);
1817 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1818 return OMP_CLAUSE_DEFAULT_SHARED
;
1821 /* Const qualified vars having no mutable member are predetermined
1823 if (cxx_omp_const_qual_no_mutable (decl
))
1824 return OMP_CLAUSE_DEFAULT_SHARED
;
1826 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1829 /* Finalize an implicitly determined clause. */
1832 cxx_omp_finish_clause (tree c
, gimple_seq
*)
1834 tree decl
, inner_type
;
1835 bool make_shared
= false;
1837 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
1840 decl
= OMP_CLAUSE_DECL (c
);
1841 decl
= require_complete_type (decl
);
1842 inner_type
= TREE_TYPE (decl
);
1843 if (decl
== error_mark_node
)
1845 else if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1846 inner_type
= TREE_TYPE (inner_type
);
1848 /* We're interested in the base element, not arrays. */
1849 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1850 inner_type
= TREE_TYPE (inner_type
);
1852 /* Check for special function availability by building a call to one.
1853 Save the results, because later we won't be in the right context
1854 for making these queries. */
1856 && CLASS_TYPE_P (inner_type
)
1857 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false, true))
1861 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
1864 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1865 disregarded in OpenMP construct, because it is going to be
1866 remapped during OpenMP lowering. SHARED is true if DECL
1867 is going to be shared, false if it is going to be privatized. */
1870 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
1874 && DECL_HAS_VALUE_EXPR_P (decl
)
1875 && DECL_ARTIFICIAL (decl
)
1876 && DECL_LANG_SPECIFIC (decl
)
1877 && DECL_OMP_PRIVATIZED_MEMBER (decl
);
1880 /* Perform folding on expression X. */
1883 cp_fully_fold (tree x
)
1888 /* Fold expression X which is used as an rvalue if RVAL is true. */
1891 cp_fold_maybe_rvalue (tree x
, bool rval
)
1896 if (rval
&& DECL_P (x
))
1898 tree v
= decl_constant_value (x
);
1899 if (v
!= x
&& v
!= error_mark_node
)
1910 /* Fold expression X which is used as an rvalue. */
1913 cp_fold_rvalue (tree x
)
1915 return cp_fold_maybe_rvalue (x
, true);
1918 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
1919 and certain changes are made to the folding done. Or should be (FIXME). We
1920 never touch maybe_const, as it is only used for the C front-end
1921 C_MAYBE_CONST_EXPR. */
1924 c_fully_fold (tree x
, bool /*in_init*/, bool */
*maybe_const*/
)
1926 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
1928 return cp_fold_rvalue (x
);
1931 static GTY((deletable
)) hash_map
<tree
, tree
> *fold_cache
;
1933 /* Dispose of the whole FOLD_CACHE. */
1936 clear_fold_cache (void)
1938 if (fold_cache
!= NULL
)
1939 fold_cache
->empty ();
1942 /* This function tries to fold an expression X.
1943 To avoid combinatorial explosion, folding results are kept in fold_cache.
1944 If we are processing a template or X is invalid, we don't fold at all.
1945 For performance reasons we don't cache expressions representing a
1946 declaration or constant.
1947 Function returns X or its folded variant. */
1952 tree op0
, op1
, op2
, op3
;
1953 tree org_x
= x
, r
= NULL_TREE
;
1954 enum tree_code code
;
1956 bool rval_ops
= true;
1958 if (!x
|| x
== error_mark_node
)
1961 if (processing_template_decl
1962 || (EXPR_P (x
) && (!TREE_TYPE (x
) || TREE_TYPE (x
) == error_mark_node
)))
1965 /* Don't bother to cache DECLs or constants. */
1966 if (DECL_P (x
) || CONSTANT_CLASS_P (x
))
1969 if (fold_cache
== NULL
)
1970 fold_cache
= hash_map
<tree
, tree
>::create_ggc (101);
1972 if (tree
*cached
= fold_cache
->get (x
))
1975 code
= TREE_CODE (x
);
1979 x
= fold_sizeof_expr (x
);
1982 case VIEW_CONVERT_EXPR
:
1986 case NON_LVALUE_EXPR
:
1988 if (VOID_TYPE_P (TREE_TYPE (x
)))
1991 loc
= EXPR_LOCATION (x
);
1992 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
1994 if (code
== CONVERT_EXPR
1995 && SCALAR_TYPE_P (TREE_TYPE (x
))
1996 && op0
!= void_node
)
1997 /* During parsing we used convert_to_*_nofold; re-convert now using the
1998 folding variants, since fold() doesn't do those transformations. */
1999 x
= fold (convert (TREE_TYPE (x
), op0
));
2000 else if (op0
!= TREE_OPERAND (x
, 0))
2002 if (op0
== error_mark_node
)
2003 x
= error_mark_node
;
2005 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2010 /* Conversion of an out-of-range value has implementation-defined
2011 behavior; the language considers it different from arithmetic
2012 overflow, which is undefined. */
2013 if (TREE_CODE (op0
) == INTEGER_CST
2014 && TREE_OVERFLOW_P (x
) && !TREE_OVERFLOW_P (op0
))
2015 TREE_OVERFLOW (x
) = false;
2020 /* We don't need the decltype(auto) obfuscation anymore. */
2021 if (REF_PARENTHESIZED_P (x
))
2023 tree p
= maybe_undo_parenthesized_ref (x
);
2033 case FIX_TRUNC_EXPR
:
2038 case TRUTH_NOT_EXPR
:
2039 case FIXED_CONVERT_EXPR
:
2042 loc
= EXPR_LOCATION (x
);
2043 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2045 if (op0
!= TREE_OPERAND (x
, 0))
2047 if (op0
== error_mark_node
)
2048 x
= error_mark_node
;
2051 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2052 if (code
== INDIRECT_REF
2053 && (INDIRECT_REF_P (x
) || TREE_CODE (x
) == MEM_REF
))
2055 TREE_READONLY (x
) = TREE_READONLY (org_x
);
2056 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
2057 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
2064 gcc_assert (TREE_CODE (x
) != COND_EXPR
2065 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x
, 0))));
2068 case UNARY_PLUS_EXPR
:
2069 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2070 if (op0
== error_mark_node
)
2071 x
= error_mark_node
;
2073 x
= fold_convert (TREE_TYPE (x
), op0
);
2076 case POSTDECREMENT_EXPR
:
2077 case POSTINCREMENT_EXPR
:
2079 case PREDECREMENT_EXPR
:
2080 case PREINCREMENT_EXPR
:
2084 case POINTER_PLUS_EXPR
:
2088 case TRUNC_DIV_EXPR
:
2090 case FLOOR_DIV_EXPR
:
2091 case ROUND_DIV_EXPR
:
2092 case TRUNC_MOD_EXPR
:
2094 case ROUND_MOD_EXPR
:
2096 case EXACT_DIV_EXPR
:
2106 case TRUTH_AND_EXPR
:
2107 case TRUTH_ANDIF_EXPR
:
2109 case TRUTH_ORIF_EXPR
:
2110 case TRUTH_XOR_EXPR
:
2111 case LT_EXPR
: case LE_EXPR
:
2112 case GT_EXPR
: case GE_EXPR
:
2113 case EQ_EXPR
: case NE_EXPR
:
2114 case UNORDERED_EXPR
: case ORDERED_EXPR
:
2115 case UNLT_EXPR
: case UNLE_EXPR
:
2116 case UNGT_EXPR
: case UNGE_EXPR
:
2117 case UNEQ_EXPR
: case LTGT_EXPR
:
2118 case RANGE_EXPR
: case COMPLEX_EXPR
:
2120 loc
= EXPR_LOCATION (x
);
2121 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2122 op1
= cp_fold_rvalue (TREE_OPERAND (x
, 1));
2124 if (op0
!= TREE_OPERAND (x
, 0) || op1
!= TREE_OPERAND (x
, 1))
2126 if (op0
== error_mark_node
|| op1
== error_mark_node
)
2127 x
= error_mark_node
;
2129 x
= fold_build2_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
);
2134 if (TREE_NO_WARNING (org_x
)
2135 && warn_nonnull_compare
2136 && COMPARISON_CLASS_P (org_x
))
2138 if (x
== error_mark_node
|| TREE_CODE (x
) == INTEGER_CST
)
2140 else if (COMPARISON_CLASS_P (x
))
2141 TREE_NO_WARNING (x
) = 1;
2142 /* Otherwise give up on optimizing these, let GIMPLE folders
2143 optimize those later on. */
2144 else if (op0
!= TREE_OPERAND (org_x
, 0)
2145 || op1
!= TREE_OPERAND (org_x
, 1))
2147 x
= build2_loc (loc
, code
, TREE_TYPE (org_x
), op0
, op1
);
2148 TREE_NO_WARNING (x
) = 1;
2158 /* Don't bother folding a void condition, since it can't produce a
2159 constant value. Also, some statement-level uses of COND_EXPR leave
2160 one of the branches NULL, so folding would crash. */
2161 if (VOID_TYPE_P (TREE_TYPE (x
)))
2164 loc
= EXPR_LOCATION (x
);
2165 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2166 op1
= cp_fold (TREE_OPERAND (x
, 1));
2167 op2
= cp_fold (TREE_OPERAND (x
, 2));
2169 if (op0
!= TREE_OPERAND (x
, 0)
2170 || op1
!= TREE_OPERAND (x
, 1)
2171 || op2
!= TREE_OPERAND (x
, 2))
2173 if (op0
== error_mark_node
2174 || op1
== error_mark_node
2175 || op2
== error_mark_node
)
2176 x
= error_mark_node
;
2178 x
= fold_build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2183 /* A COND_EXPR might have incompatible types in branches if one or both
2184 arms are bitfields. If folding exposed such a branch, fix it up. */
2185 if (TREE_CODE (x
) != code
)
2186 if (tree type
= is_bitfield_expr_with_lowered_type (x
))
2187 x
= fold_convert (type
, x
);
2193 int i
, m
, sv
= optimize
, nw
= sv
, changed
= 0;
2194 tree callee
= get_callee_fndecl (x
);
2196 /* Some built-in function calls will be evaluated at compile-time in
2197 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2198 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2199 if (callee
&& DECL_BUILT_IN (callee
) && !optimize
2200 && DECL_IS_BUILTIN_CONSTANT_P (callee
)
2201 && current_function_decl
2202 && DECL_DECLARED_CONSTEXPR_P (current_function_decl
))
2207 m
= call_expr_nargs (x
);
2208 for (i
= 0; i
< m
; i
++)
2210 r
= cp_fold (CALL_EXPR_ARG (x
, i
));
2211 if (r
!= CALL_EXPR_ARG (x
, i
))
2213 if (r
== error_mark_node
)
2215 x
= error_mark_node
;
2220 CALL_EXPR_ARG (x
, i
) = r
;
2222 if (x
== error_mark_node
)
2229 if (TREE_CODE (r
) != CALL_EXPR
)
2237 /* Invoke maybe_constant_value for functions declared
2238 constexpr and not called with AGGR_INIT_EXPRs.
2240 Do constexpr expansion of expressions where the call itself is not
2241 constant, but the call followed by an INDIRECT_REF is. */
2242 if (callee
&& DECL_DECLARED_CONSTEXPR_P (callee
)
2244 r
= maybe_constant_value (x
);
2247 if (TREE_CODE (r
) != CALL_EXPR
)
2262 bool changed
= false;
2263 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (x
);
2264 vec
<constructor_elt
, va_gc
> *nelts
= NULL
;
2265 vec_safe_reserve (nelts
, vec_safe_length (elts
));
2266 FOR_EACH_VEC_SAFE_ELT (elts
, i
, p
)
2268 tree op
= cp_fold (p
->value
);
2269 constructor_elt e
= { p
->index
, op
};
2270 nelts
->quick_push (e
);
2273 if (op
== error_mark_node
)
2275 x
= error_mark_node
;
2283 x
= build_constructor (TREE_TYPE (x
), nelts
);
2290 bool changed
= false;
2291 vec
<tree
, va_gc
> *vec
= make_tree_vector ();
2292 int i
, n
= TREE_VEC_LENGTH (x
);
2293 vec_safe_reserve (vec
, n
);
2295 for (i
= 0; i
< n
; i
++)
2297 tree op
= cp_fold (TREE_VEC_ELT (x
, i
));
2298 vec
->quick_push (op
);
2299 if (op
!= TREE_VEC_ELT (x
, i
))
2306 for (i
= 0; i
< n
; i
++)
2307 TREE_VEC_ELT (r
, i
) = (*vec
)[i
];
2311 release_tree_vector (vec
);
2317 case ARRAY_RANGE_REF
:
2319 loc
= EXPR_LOCATION (x
);
2320 op0
= cp_fold (TREE_OPERAND (x
, 0));
2321 op1
= cp_fold (TREE_OPERAND (x
, 1));
2322 op2
= cp_fold (TREE_OPERAND (x
, 2));
2323 op3
= cp_fold (TREE_OPERAND (x
, 3));
2325 if (op0
!= TREE_OPERAND (x
, 0)
2326 || op1
!= TREE_OPERAND (x
, 1)
2327 || op2
!= TREE_OPERAND (x
, 2)
2328 || op3
!= TREE_OPERAND (x
, 3))
2330 if (op0
== error_mark_node
2331 || op1
== error_mark_node
2332 || op2
== error_mark_node
2333 || op3
== error_mark_node
)
2334 x
= error_mark_node
;
2337 x
= build4_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
, op3
);
2338 TREE_READONLY (x
) = TREE_READONLY (org_x
);
2339 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
2340 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
2351 fold_cache
->put (org_x
, x
);
2352 /* Prevent that we try to fold an already folded result again. */
2354 fold_cache
->put (x
, x
);
2359 #include "gt-cp-cp-gimplify.h"