1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
40 #include "fold-const.h"
45 #include "gimple-fold.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
56 #include "omp-general.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "stringpool.h"
68 #include "omp-offload.h"
70 #include "tree-nested.h"
72 /* Hash set of poisoned variables in a bind expr. */
73 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
75 enum gimplify_omp_var_data
78 GOVD_EXPLICIT
= 0x000002,
79 GOVD_SHARED
= 0x000004,
80 GOVD_PRIVATE
= 0x000008,
81 GOVD_FIRSTPRIVATE
= 0x000010,
82 GOVD_LASTPRIVATE
= 0x000020,
83 GOVD_REDUCTION
= 0x000040,
86 GOVD_DEBUG_PRIVATE
= 0x000200,
87 GOVD_PRIVATE_OUTER_REF
= 0x000400,
88 GOVD_LINEAR
= 0x000800,
89 GOVD_ALIGNED
= 0x001000,
91 /* Flag for GOVD_MAP: don't copy back. */
92 GOVD_MAP_TO_ONLY
= 0x002000,
94 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
95 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
97 GOVD_MAP_0LEN_ARRAY
= 0x008000,
99 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
100 GOVD_MAP_ALWAYS_TO
= 0x010000,
102 /* Flag for shared vars that are or might be stored to in the region. */
103 GOVD_WRITTEN
= 0x020000,
105 /* Flag for GOVD_MAP, if it is a forced mapping. */
106 GOVD_MAP_FORCE
= 0x040000,
108 /* Flag for GOVD_MAP: must be present already. */
109 GOVD_MAP_FORCE_PRESENT
= 0x080000,
111 /* Flag for GOVD_MAP: only allocate. */
112 GOVD_MAP_ALLOC_ONLY
= 0x100000,
114 /* Flag for GOVD_MAP: only copy back. */
115 GOVD_MAP_FROM_ONLY
= 0x200000,
117 GOVD_NONTEMPORAL
= 0x400000,
119 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
120 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
122 GOVD_CONDTEMP
= 0x1000000,
124 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
125 GOVD_REDUCTION_INSCAN
= 0x2000000,
127 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
129 GOVD_MAP_HAS_ATTACHMENTS
= 8388608,
131 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
139 ORT_WORKSHARE
= 0x00,
140 ORT_TASKGROUP
= 0x01,
144 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
147 ORT_UNTIED_TASK
= ORT_TASK
| 1,
148 ORT_TASKLOOP
= ORT_TASK
| 2,
149 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
152 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
153 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
154 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
157 ORT_TARGET_DATA
= 0x40,
159 /* Data region with offloading. */
161 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
162 ORT_IMPLICIT_TARGET
= ORT_TARGET
| 2,
164 /* OpenACC variants. */
165 ORT_ACC
= 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
167 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
168 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
169 ORT_ACC_SERIAL
= ORT_ACC
| ORT_TARGET
| 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
177 /* Gimplify hashtable helper. */
179 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
181 static inline hashval_t
hash (const elt_t
*);
182 static inline bool equal (const elt_t
*, const elt_t
*);
187 struct gimplify_ctx
*prev_context
;
189 vec
<gbind
*> bind_expr_stack
;
191 gimple_seq conditional_cleanups
;
195 vec
<tree
> case_labels
;
196 hash_set
<tree
> *live_switch_vars
;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table
<gimplify_hasher
> *temp_htab
;
201 unsigned into_ssa
: 1;
202 unsigned allow_rhs_cond_expr
: 1;
203 unsigned in_cleanup_point_expr
: 1;
204 unsigned keep_stack
: 1;
205 unsigned save_stack
: 1;
206 unsigned in_switch_expr
: 1;
209 enum gimplify_defaultmap_kind
217 struct gimplify_omp_ctx
219 struct gimplify_omp_ctx
*outer_context
;
220 splay_tree variables
;
221 hash_set
<tree
> *privatized_types
;
223 /* Iteration variables in an OMP_FOR. */
224 vec
<tree
> loop_iter_var
;
226 enum omp_clause_default_kind default_kind
;
227 enum omp_region_type region_type
;
231 bool target_firstprivatize_array_bases
;
233 bool order_concurrent
;
237 static struct gimplify_ctx
*gimplify_ctxp
;
238 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
239 static bool in_omp_construct
;
241 /* Forward declaration. */
242 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
243 static hash_map
<tree
, tree
> *oacc_declare_returns
;
244 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
245 bool (*) (tree
), fallback_t
, bool);
247 /* Shorter alias name for the above function for use in gimplify.c
251 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
253 gimple_seq_add_stmt_without_update (seq_p
, gs
);
256 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
257 NULL, a new sequence is allocated. This function is
258 similar to gimple_seq_add_seq, but does not scan the operands.
259 During gimplification, we need to manipulate statement sequences
260 before the def/use vectors have been constructed. */
263 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
265 gimple_stmt_iterator si
;
270 si
= gsi_last (*dst_p
);
271 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
275 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
276 and popping gimplify contexts. */
278 static struct gimplify_ctx
*ctx_pool
= NULL
;
280 /* Return a gimplify context struct from the pool. */
282 static inline struct gimplify_ctx
*
285 struct gimplify_ctx
* c
= ctx_pool
;
288 ctx_pool
= c
->prev_context
;
290 c
= XNEW (struct gimplify_ctx
);
292 memset (c
, '\0', sizeof (*c
));
296 /* Put gimplify context C back into the pool. */
299 ctx_free (struct gimplify_ctx
*c
)
301 c
->prev_context
= ctx_pool
;
305 /* Free allocated ctx stack memory. */
308 free_gimplify_stack (void)
310 struct gimplify_ctx
*c
;
312 while ((c
= ctx_pool
))
314 ctx_pool
= c
->prev_context
;
320 /* Set up a context for the gimplifier. */
323 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
325 struct gimplify_ctx
*c
= ctx_alloc ();
327 c
->prev_context
= gimplify_ctxp
;
329 gimplify_ctxp
->into_ssa
= in_ssa
;
330 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
333 /* Tear down a context for the gimplifier. If BODY is non-null, then
334 put the temporaries into the outer BIND_EXPR. Otherwise, put them
337 BODY is not a sequence, but the first tuple in a sequence. */
340 pop_gimplify_context (gimple
*body
)
342 struct gimplify_ctx
*c
= gimplify_ctxp
;
345 && (!c
->bind_expr_stack
.exists ()
346 || c
->bind_expr_stack
.is_empty ()));
347 c
->bind_expr_stack
.release ();
348 gimplify_ctxp
= c
->prev_context
;
351 declare_vars (c
->temps
, body
, false);
353 record_vars (c
->temps
);
360 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
363 gimple_push_bind_expr (gbind
*bind_stmt
)
365 gimplify_ctxp
->bind_expr_stack
.reserve (8);
366 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
369 /* Pop the first element off the stack of bindings. */
372 gimple_pop_bind_expr (void)
374 gimplify_ctxp
->bind_expr_stack
.pop ();
377 /* Return the first element of the stack of bindings. */
380 gimple_current_bind_expr (void)
382 return gimplify_ctxp
->bind_expr_stack
.last ();
385 /* Return the stack of bindings created during gimplification. */
388 gimple_bind_expr_stack (void)
390 return gimplify_ctxp
->bind_expr_stack
;
393 /* Return true iff there is a COND_EXPR between us and the innermost
394 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
397 gimple_conditional_context (void)
399 return gimplify_ctxp
->conditions
> 0;
402 /* Note that we've entered a COND_EXPR. */
405 gimple_push_condition (void)
407 #ifdef ENABLE_GIMPLE_CHECKING
408 if (gimplify_ctxp
->conditions
== 0)
409 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
411 ++(gimplify_ctxp
->conditions
);
414 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
415 now, add any conditional cleanups we've seen to the prequeue. */
418 gimple_pop_condition (gimple_seq
*pre_p
)
420 int conds
= --(gimplify_ctxp
->conditions
);
422 gcc_assert (conds
>= 0);
425 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
426 gimplify_ctxp
->conditional_cleanups
= NULL
;
430 /* A stable comparison routine for use with splay trees and DECLs. */
433 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
438 return DECL_UID (a
) - DECL_UID (b
);
441 /* Create a new omp construct that deals with variable remapping. */
443 static struct gimplify_omp_ctx
*
444 new_omp_context (enum omp_region_type region_type
)
446 struct gimplify_omp_ctx
*c
;
448 c
= XCNEW (struct gimplify_omp_ctx
);
449 c
->outer_context
= gimplify_omp_ctxp
;
450 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
451 c
->privatized_types
= new hash_set
<tree
>;
452 c
->location
= input_location
;
453 c
->region_type
= region_type
;
454 if ((region_type
& ORT_TASK
) == 0)
455 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
457 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
458 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
459 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
460 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
461 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
466 /* Destroy an omp construct that deals with variable remapping. */
469 delete_omp_context (struct gimplify_omp_ctx
*c
)
471 splay_tree_delete (c
->variables
);
472 delete c
->privatized_types
;
473 c
->loop_iter_var
.release ();
477 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
478 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
480 /* Both gimplify the statement T and append it to *SEQ_P. This function
481 behaves exactly as gimplify_stmt, but you don't have to pass T as a
485 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
487 gimplify_stmt (&t
, seq_p
);
490 /* Gimplify statement T into sequence *SEQ_P, and return the first
491 tuple in the sequence of generated tuples for this statement.
492 Return NULL if gimplifying T produced no tuples. */
495 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
497 gimple_stmt_iterator last
= gsi_last (*seq_p
);
499 gimplify_and_add (t
, seq_p
);
501 if (!gsi_end_p (last
))
504 return gsi_stmt (last
);
507 return gimple_seq_first_stmt (*seq_p
);
510 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
511 LHS, or for a call argument. */
514 is_gimple_mem_rhs (tree t
)
516 /* If we're dealing with a renamable type, either source or dest must be
517 a renamed variable. */
518 if (is_gimple_reg_type (TREE_TYPE (t
)))
519 return is_gimple_val (t
);
521 return is_gimple_val (t
) || is_gimple_lvalue (t
);
524 /* Return true if T is a CALL_EXPR or an expression that can be
525 assigned to a temporary. Note that this predicate should only be
526 used during gimplification. See the rationale for this in
527 gimplify_modify_expr. */
530 is_gimple_reg_rhs_or_call (tree t
)
532 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
533 || TREE_CODE (t
) == CALL_EXPR
);
536 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
537 this predicate should only be used during gimplification. See the
538 rationale for this in gimplify_modify_expr. */
541 is_gimple_mem_rhs_or_call (tree t
)
543 /* If we're dealing with a renamable type, either source or dest must be
544 a renamed variable. */
545 if (is_gimple_reg_type (TREE_TYPE (t
)))
546 return is_gimple_val (t
);
548 return (is_gimple_val (t
)
549 || is_gimple_lvalue (t
)
550 || TREE_CLOBBER_P (t
)
551 || TREE_CODE (t
) == CALL_EXPR
);
554 /* Create a temporary with a name derived from VAL. Subroutine of
555 lookup_tmp_var; nobody else should call this function. */
558 create_tmp_from_val (tree val
)
560 /* Drop all qualifiers and address-space information from the value type. */
561 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
562 tree var
= create_tmp_var (type
, get_name (val
));
566 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
567 an existing expression temporary. */
570 lookup_tmp_var (tree val
, bool is_formal
)
574 /* If not optimizing, never really reuse a temporary. local-alloc
575 won't allocate any variable that is used in more than one basic
576 block, which means it will go into memory, causing much extra
577 work in reload and final and poorer code generation, outweighing
578 the extra memory allocation here. */
579 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
580 ret
= create_tmp_from_val (val
);
587 if (!gimplify_ctxp
->temp_htab
)
588 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
589 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
592 elt_p
= XNEW (elt_t
);
594 elt_p
->temp
= ret
= create_tmp_from_val (val
);
607 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
610 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
611 bool is_formal
, bool allow_ssa
)
615 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
616 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
617 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
621 && gimplify_ctxp
->into_ssa
622 && is_gimple_reg_type (TREE_TYPE (val
)))
624 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
625 if (! gimple_in_ssa_p (cfun
))
627 const char *name
= get_name (val
);
629 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
633 t
= lookup_tmp_var (val
, is_formal
);
635 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
637 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
639 /* gimplify_modify_expr might want to reduce this further. */
640 gimplify_and_add (mod
, pre_p
);
646 /* Return a formal temporary variable initialized with VAL. PRE_P is as
647 in gimplify_expr. Only use this function if:
649 1) The value of the unfactored expression represented by VAL will not
650 change between the initialization and use of the temporary, and
651 2) The temporary will not be otherwise modified.
653 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
654 and #2 means it is inappropriate for && temps.
656 For other cases, use get_initialized_tmp_var instead. */
659 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
661 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true);
664 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
665 are as in gimplify_expr. */
668 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
,
669 gimple_seq
*post_p
/* = NULL */,
670 bool allow_ssa
/* = true */)
672 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
);
675 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
676 generate debug info for them; otherwise don't. */
679 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
686 gbind
*scope
= as_a
<gbind
*> (gs
);
688 temps
= nreverse (last
);
690 block
= gimple_bind_block (scope
);
691 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
692 if (!block
|| !debug_info
)
694 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
695 gimple_bind_set_vars (scope
, temps
);
699 /* We need to attach the nodes both to the BIND_EXPR and to its
700 associated BLOCK for debugging purposes. The key point here
701 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
702 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
703 if (BLOCK_VARS (block
))
704 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
707 gimple_bind_set_vars (scope
,
708 chainon (gimple_bind_vars (scope
), temps
));
709 BLOCK_VARS (block
) = temps
;
715 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
716 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
717 no such upper bound can be obtained. */
720 force_constant_size (tree var
)
722 /* The only attempt we make is by querying the maximum size of objects
723 of the variable's type. */
725 HOST_WIDE_INT max_size
;
727 gcc_assert (VAR_P (var
));
729 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
731 gcc_assert (max_size
>= 0);
734 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
736 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
739 /* Push the temporary variable TMP into the current binding. */
742 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
744 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
746 /* Later processing assumes that the object size is constant, which might
747 not be true at this point. Force the use of a constant upper bound in
749 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
750 force_constant_size (tmp
);
752 DECL_CONTEXT (tmp
) = fn
->decl
;
753 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
755 record_vars_into (tmp
, fn
->decl
);
758 /* Push the temporary variable TMP into the current binding. */
761 gimple_add_tmp_var (tree tmp
)
763 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
765 /* Later processing assumes that the object size is constant, which might
766 not be true at this point. Force the use of a constant upper bound in
768 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
769 force_constant_size (tmp
);
771 DECL_CONTEXT (tmp
) = current_function_decl
;
772 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
776 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
777 gimplify_ctxp
->temps
= tmp
;
779 /* Mark temporaries local within the nearest enclosing parallel. */
780 if (gimplify_omp_ctxp
)
782 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
783 int flag
= GOVD_LOCAL
;
785 && (ctx
->region_type
== ORT_WORKSHARE
786 || ctx
->region_type
== ORT_TASKGROUP
787 || ctx
->region_type
== ORT_SIMD
788 || ctx
->region_type
== ORT_ACC
))
790 if (ctx
->region_type
== ORT_SIMD
791 && TREE_ADDRESSABLE (tmp
)
792 && !TREE_STATIC (tmp
))
794 if (TREE_CODE (DECL_SIZE_UNIT (tmp
)) != INTEGER_CST
)
795 ctx
->add_safelen1
= true;
800 ctx
= ctx
->outer_context
;
803 omp_add_variable (ctx
, tmp
, flag
| GOVD_SEEN
);
812 /* This case is for nested functions. We need to expose the locals
814 body_seq
= gimple_body (current_function_decl
);
815 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
821 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
822 nodes that are referenced more than once in GENERIC functions. This is
823 necessary because gimplification (translation into GIMPLE) is performed
824 by modifying tree nodes in-place, so gimplication of a shared node in a
825 first context could generate an invalid GIMPLE form in a second context.
827 This is achieved with a simple mark/copy/unmark algorithm that walks the
828 GENERIC representation top-down, marks nodes with TREE_VISITED the first
829 time it encounters them, duplicates them if they already have TREE_VISITED
830 set, and finally removes the TREE_VISITED marks it has set.
832 The algorithm works only at the function level, i.e. it generates a GENERIC
833 representation of a function with no nodes shared within the function when
834 passed a GENERIC function (except for nodes that are allowed to be shared).
836 At the global level, it is also necessary to unshare tree nodes that are
837 referenced in more than one function, for the same aforementioned reason.
838 This requires some cooperation from the front-end. There are 2 strategies:
840 1. Manual unsharing. The front-end needs to call unshare_expr on every
841 expression that might end up being shared across functions.
843 2. Deep unsharing. This is an extension of regular unsharing. Instead
844 of calling unshare_expr on expressions that might be shared across
845 functions, the front-end pre-marks them with TREE_VISITED. This will
846 ensure that they are unshared on the first reference within functions
847 when the regular unsharing algorithm runs. The counterpart is that
848 this algorithm must look deeper than for manual unsharing, which is
849 specified by LANG_HOOKS_DEEP_UNSHARING.
851 If there are only few specific cases of node sharing across functions, it is
852 probably easier for a front-end to unshare the expressions manually. On the
853 contrary, if the expressions generated at the global level are as widespread
854 as expressions generated within functions, deep unsharing is very likely the
857 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
858 These nodes model computations that must be done once. If we were to
859 unshare something like SAVE_EXPR(i++), the gimplification process would
860 create wrong code. However, if DATA is non-null, it must hold a pointer
861 set that is used to unshare the subtrees of these nodes. */
864 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
867 enum tree_code code
= TREE_CODE (t
);
869 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
870 copy their subtrees if we can make sure to do it only once. */
871 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
873 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
879 /* Stop at types, decls, constants like copy_tree_r. */
880 else if (TREE_CODE_CLASS (code
) == tcc_type
881 || TREE_CODE_CLASS (code
) == tcc_declaration
882 || TREE_CODE_CLASS (code
) == tcc_constant
)
885 /* Cope with the statement expression extension. */
886 else if (code
== STATEMENT_LIST
)
889 /* Leave the bulk of the work to copy_tree_r itself. */
891 copy_tree_r (tp
, walk_subtrees
, NULL
);
896 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
897 If *TP has been visited already, then *TP is deeply copied by calling
898 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
901 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
904 enum tree_code code
= TREE_CODE (t
);
906 /* Skip types, decls, and constants. But we do want to look at their
907 types and the bounds of types. Mark them as visited so we properly
908 unmark their subtrees on the unmark pass. If we've already seen them,
909 don't look down further. */
910 if (TREE_CODE_CLASS (code
) == tcc_type
911 || TREE_CODE_CLASS (code
) == tcc_declaration
912 || TREE_CODE_CLASS (code
) == tcc_constant
)
914 if (TREE_VISITED (t
))
917 TREE_VISITED (t
) = 1;
920 /* If this node has been visited already, unshare it and don't look
922 else if (TREE_VISITED (t
))
924 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
928 /* Otherwise, mark the node as visited and keep looking. */
930 TREE_VISITED (t
) = 1;
935 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
936 copy_if_shared_r callback unmodified. */
939 copy_if_shared (tree
*tp
, void *data
)
941 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
944 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
945 any nested functions. */
948 unshare_body (tree fndecl
)
950 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
951 /* If the language requires deep unsharing, we need a pointer set to make
952 sure we don't repeatedly unshare subtrees of unshareable nodes. */
953 hash_set
<tree
> *visited
954 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
956 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
957 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
958 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
963 for (cgn
= first_nested_function (cgn
); cgn
;
964 cgn
= next_nested_function (cgn
))
965 unshare_body (cgn
->decl
);
968 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
969 Subtrees are walked until the first unvisited node is encountered. */
972 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
976 /* If this node has been visited, unmark it and keep looking. */
977 if (TREE_VISITED (t
))
978 TREE_VISITED (t
) = 0;
980 /* Otherwise, don't look any deeper. */
987 /* Unmark the visited trees rooted at *TP. */
990 unmark_visited (tree
*tp
)
992 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
995 /* Likewise, but mark all trees as not visited. */
998 unvisit_body (tree fndecl
)
1000 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
1002 unmark_visited (&DECL_SAVED_TREE (fndecl
));
1003 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
1004 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
1007 for (cgn
= first_nested_function (cgn
);
1008 cgn
; cgn
= next_nested_function (cgn
))
1009 unvisit_body (cgn
->decl
);
1012 /* Unconditionally make an unshared copy of EXPR. This is used when using
1013 stored expressions which span multiple functions, such as BINFO_VTABLE,
1014 as the normal unsharing process can't tell that they're shared. */
1017 unshare_expr (tree expr
)
1019 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1023 /* Worker for unshare_expr_without_location. */
1026 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1029 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1035 /* Similar to unshare_expr but also prune all expression locations
1039 unshare_expr_without_location (tree expr
)
1041 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1043 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1047 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1048 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1049 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1050 EXPR is the location of the EXPR. */
1053 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1058 if (EXPR_HAS_LOCATION (expr
))
1059 return EXPR_LOCATION (expr
);
1061 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1064 tree_stmt_iterator i
= tsi_start (expr
);
1067 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1073 if (!found
|| !tsi_one_before_end_p (i
))
1076 return rexpr_location (tsi_stmt (i
), or_else
);
1079 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1080 rexpr_location for the potential recursion. */
1083 rexpr_has_location (tree expr
)
1085 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1089 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1090 contain statements and have a value. Assign its value to a temporary
1091 and give it void_type_node. Return the temporary, or NULL_TREE if
1092 WRAPPER was already void. */
1095 voidify_wrapper_expr (tree wrapper
, tree temp
)
1097 tree type
= TREE_TYPE (wrapper
);
1098 if (type
&& !VOID_TYPE_P (type
))
1102 /* Set p to point to the body of the wrapper. Loop until we find
1103 something that isn't a wrapper. */
1104 for (p
= &wrapper
; p
&& *p
; )
1106 switch (TREE_CODE (*p
))
1109 TREE_SIDE_EFFECTS (*p
) = 1;
1110 TREE_TYPE (*p
) = void_type_node
;
1111 /* For a BIND_EXPR, the body is operand 1. */
1112 p
= &BIND_EXPR_BODY (*p
);
1115 case CLEANUP_POINT_EXPR
:
1116 case TRY_FINALLY_EXPR
:
1117 case TRY_CATCH_EXPR
:
1118 TREE_SIDE_EFFECTS (*p
) = 1;
1119 TREE_TYPE (*p
) = void_type_node
;
1120 p
= &TREE_OPERAND (*p
, 0);
1123 case STATEMENT_LIST
:
1125 tree_stmt_iterator i
= tsi_last (*p
);
1126 TREE_SIDE_EFFECTS (*p
) = 1;
1127 TREE_TYPE (*p
) = void_type_node
;
1128 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1133 /* Advance to the last statement. Set all container types to
1135 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1137 TREE_SIDE_EFFECTS (*p
) = 1;
1138 TREE_TYPE (*p
) = void_type_node
;
1142 case TRANSACTION_EXPR
:
1143 TREE_SIDE_EFFECTS (*p
) = 1;
1144 TREE_TYPE (*p
) = void_type_node
;
1145 p
= &TRANSACTION_EXPR_BODY (*p
);
1149 /* Assume that any tree upon which voidify_wrapper_expr is
1150 directly called is a wrapper, and that its body is op0. */
1153 TREE_SIDE_EFFECTS (*p
) = 1;
1154 TREE_TYPE (*p
) = void_type_node
;
1155 p
= &TREE_OPERAND (*p
, 0);
1163 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1167 /* The wrapper is on the RHS of an assignment that we're pushing
1169 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1170 || TREE_CODE (temp
) == MODIFY_EXPR
);
1171 TREE_OPERAND (temp
, 1) = *p
;
1176 temp
= create_tmp_var (type
, "retval");
1177 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1186 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1187 a temporary through which they communicate. */
1190 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1194 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1195 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1196 gimple_call_set_lhs (*save
, tmp_var
);
1199 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1203 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1206 build_asan_poison_call_expr (tree decl
)
1208 /* Do not poison variables that have size equal to zero. */
1209 tree unit_size
= DECL_SIZE_UNIT (decl
);
1210 if (zerop (unit_size
))
1213 tree base
= build_fold_addr_expr (decl
);
1215 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1217 build_int_cst (integer_type_node
,
1222 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1223 on POISON flag, shadow memory of a DECL variable. The call will be
1224 put on location identified by IT iterator, where BEFORE flag drives
1225 position where the stmt will be put. */
1228 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1231 tree unit_size
= DECL_SIZE_UNIT (decl
);
1232 tree base
= build_fold_addr_expr (decl
);
1234 /* Do not poison variables that have size equal to zero. */
1235 if (zerop (unit_size
))
1238 /* It's necessary to have all stack variables aligned to ASAN granularity
1240 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1241 unsigned shadow_granularity
1242 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE
: ASAN_SHADOW_GRANULARITY
;
1243 if (DECL_ALIGN_UNIT (decl
) <= shadow_granularity
)
1244 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* shadow_granularity
);
1246 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1249 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1250 build_int_cst (integer_type_node
, flags
),
1254 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1256 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1259 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1260 either poisons or unpoisons a DECL. Created statement is appended
1261 to SEQ_P gimple sequence. */
1264 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1266 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1267 bool before
= false;
1272 asan_poison_variable (decl
, poison
, &it
, before
);
1275 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1278 sort_by_decl_uid (const void *a
, const void *b
)
1280 const tree
*t1
= (const tree
*)a
;
1281 const tree
*t2
= (const tree
*)b
;
1283 int uid1
= DECL_UID (*t1
);
1284 int uid2
= DECL_UID (*t2
);
1288 else if (uid1
> uid2
)
1294 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1295 depending on POISON flag. Created statement is appended
1296 to SEQ_P gimple sequence. */
1299 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1301 unsigned c
= variables
->elements ();
1305 auto_vec
<tree
> sorted_variables (c
);
1307 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1308 it
!= variables
->end (); ++it
)
1309 sorted_variables
.safe_push (*it
);
1311 sorted_variables
.qsort (sort_by_decl_uid
);
1315 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1317 asan_poison_variable (var
, poison
, seq_p
);
1319 /* Add use_after_scope_memory attribute for the variable in order
1320 to prevent re-written into SSA. */
1321 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1322 DECL_ATTRIBUTES (var
)))
1323 DECL_ATTRIBUTES (var
)
1324 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1326 DECL_ATTRIBUTES (var
));
1330 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1332 static enum gimplify_status
1333 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1335 tree bind_expr
= *expr_p
;
1336 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1337 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1340 gimple_seq body
, cleanup
;
1342 location_t start_locus
= 0, end_locus
= 0;
1343 tree ret_clauses
= NULL
;
1345 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1347 /* Mark variables seen in this bind expr. */
1348 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1352 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1354 /* Mark variable as local. */
1355 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1357 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1358 || splay_tree_lookup (ctx
->variables
,
1359 (splay_tree_key
) t
) == NULL
)
1361 int flag
= GOVD_LOCAL
;
1362 if (ctx
->region_type
== ORT_SIMD
1363 && TREE_ADDRESSABLE (t
)
1364 && !TREE_STATIC (t
))
1366 if (TREE_CODE (DECL_SIZE_UNIT (t
)) != INTEGER_CST
)
1367 ctx
->add_safelen1
= true;
1369 flag
= GOVD_PRIVATE
;
1371 omp_add_variable (ctx
, t
, flag
| GOVD_SEEN
);
1373 /* Static locals inside of target construct or offloaded
1374 routines need to be "omp declare target". */
1375 if (TREE_STATIC (t
))
1376 for (; ctx
; ctx
= ctx
->outer_context
)
1377 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1379 if (!lookup_attribute ("omp declare target",
1380 DECL_ATTRIBUTES (t
)))
1382 tree id
= get_identifier ("omp declare target");
1384 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1385 varpool_node
*node
= varpool_node::get (t
);
1388 node
->offloadable
= 1;
1389 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1391 g
->have_offload
= true;
1393 vec_safe_push (offload_vars
, t
);
1401 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1403 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1404 cfun
->has_local_explicit_reg_vars
= true;
1408 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1409 BIND_EXPR_BLOCK (bind_expr
));
1410 gimple_push_bind_expr (bind_stmt
);
1412 gimplify_ctxp
->keep_stack
= false;
1413 gimplify_ctxp
->save_stack
= false;
1415 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1417 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1418 gimple_bind_set_body (bind_stmt
, body
);
1420 /* Source location wise, the cleanup code (stack_restore and clobbers)
1421 belongs to the end of the block, so propagate what we have. The
1422 stack_save operation belongs to the beginning of block, which we can
1423 infer from the bind_expr directly if the block has no explicit
1425 if (BIND_EXPR_BLOCK (bind_expr
))
1427 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1428 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1430 if (start_locus
== 0)
1431 start_locus
= EXPR_LOCATION (bind_expr
);
1436 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1437 the stack space allocated to the VLAs. */
1438 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1440 gcall
*stack_restore
;
1442 /* Save stack on entry and restore it on exit. Add a try_finally
1443 block to achieve this. */
1444 build_stack_save_restore (&stack_save
, &stack_restore
);
1446 gimple_set_location (stack_save
, start_locus
);
1447 gimple_set_location (stack_restore
, end_locus
);
1449 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1452 /* Add clobbers for all variables that go out of scope. */
1453 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1456 && !is_global_var (t
)
1457 && DECL_CONTEXT (t
) == current_function_decl
)
1459 if (!DECL_HARD_REGISTER (t
)
1460 && !TREE_THIS_VOLATILE (t
)
1461 && !DECL_HAS_VALUE_EXPR_P (t
)
1462 /* Only care for variables that have to be in memory. Others
1463 will be rewritten into SSA names, hence moved to the
1465 && !is_gimple_reg (t
)
1466 && flag_stack_reuse
!= SR_NONE
)
1468 tree clobber
= build_clobber (TREE_TYPE (t
));
1469 gimple
*clobber_stmt
;
1470 clobber_stmt
= gimple_build_assign (t
, clobber
);
1471 gimple_set_location (clobber_stmt
, end_locus
);
1472 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1475 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1478 if (DECL_HAS_VALUE_EXPR_P (key
))
1480 key
= DECL_VALUE_EXPR (key
);
1481 if (TREE_CODE (key
) == INDIRECT_REF
)
1482 key
= TREE_OPERAND (key
, 0);
1484 tree
*c
= oacc_declare_returns
->get (key
);
1488 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1490 ret_clauses
= unshare_expr (*c
);
1492 oacc_declare_returns
->remove (key
);
1494 if (oacc_declare_returns
->is_empty ())
1496 delete oacc_declare_returns
;
1497 oacc_declare_returns
= NULL
;
1503 if (asan_poisoned_variables
!= NULL
1504 && asan_poisoned_variables
->contains (t
))
1506 asan_poisoned_variables
->remove (t
);
1507 asan_poison_variable (t
, true, &cleanup
);
1510 if (gimplify_ctxp
->live_switch_vars
!= NULL
1511 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1512 gimplify_ctxp
->live_switch_vars
->remove (t
);
1518 gimple_stmt_iterator si
= gsi_start (cleanup
);
1520 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1522 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1528 gimple_seq new_body
;
1531 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1532 GIMPLE_TRY_FINALLY
);
1535 gimplify_seq_add_stmt (&new_body
, stack_save
);
1536 gimplify_seq_add_stmt (&new_body
, gs
);
1537 gimple_bind_set_body (bind_stmt
, new_body
);
1540 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1541 if (!gimplify_ctxp
->keep_stack
)
1542 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1543 gimplify_ctxp
->save_stack
= old_save_stack
;
1545 gimple_pop_bind_expr ();
1547 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1555 *expr_p
= NULL_TREE
;
1559 /* Maybe add early return predict statement to PRE_P sequence. */
1562 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1564 /* If we are not in a conditional context, add PREDICT statement. */
1565 if (gimple_conditional_context ())
1567 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1569 gimplify_seq_add_stmt (pre_p
, predict
);
1573 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1574 GIMPLE value, it is assigned to a new temporary and the statement is
1575 re-written to return the temporary.
1577 PRE_P points to the sequence where side effects that must happen before
1578 STMT should be stored. */
1580 static enum gimplify_status
1581 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1584 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1585 tree result_decl
, result
;
1587 if (ret_expr
== error_mark_node
)
1591 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1593 maybe_add_early_return_predict_stmt (pre_p
);
1594 greturn
*ret
= gimple_build_return (ret_expr
);
1595 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1596 gimplify_seq_add_stmt (pre_p
, ret
);
1600 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1601 result_decl
= NULL_TREE
;
1602 else if (TREE_CODE (ret_expr
) == COMPOUND_EXPR
)
1604 /* Used in C++ for handling EH cleanup of the return value if a local
1605 cleanup throws. Assume the front-end knows what it's doing. */
1606 result_decl
= DECL_RESULT (current_function_decl
);
1607 /* But crash if we end up trying to modify ret_expr below. */
1608 ret_expr
= NULL_TREE
;
1612 result_decl
= TREE_OPERAND (ret_expr
, 0);
1614 /* See through a return by reference. */
1615 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1616 result_decl
= TREE_OPERAND (result_decl
, 0);
1618 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1619 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1620 && TREE_CODE (result_decl
) == RESULT_DECL
);
1623 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1624 Recall that aggregate_value_p is FALSE for any aggregate type that is
1625 returned in registers. If we're returning values in registers, then
1626 we don't want to extend the lifetime of the RESULT_DECL, particularly
1627 across another call. In addition, for those aggregates for which
1628 hard_function_value generates a PARALLEL, we'll die during normal
1629 expansion of structure assignments; there's special code in expand_return
1630 to handle this case that does not exist in expand_expr. */
1633 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1635 if (!poly_int_tree_p (DECL_SIZE (result_decl
)))
1637 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1638 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1639 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1640 should be effectively allocated by the caller, i.e. all calls to
1641 this function must be subject to the Return Slot Optimization. */
1642 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1643 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1645 result
= result_decl
;
1647 else if (gimplify_ctxp
->return_temp
)
1648 result
= gimplify_ctxp
->return_temp
;
1651 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1653 /* ??? With complex control flow (usually involving abnormal edges),
1654 we can wind up warning about an uninitialized value for this. Due
1655 to how this variable is constructed and initialized, this is never
1656 true. Give up and never warn. */
1657 TREE_NO_WARNING (result
) = 1;
1659 gimplify_ctxp
->return_temp
= result
;
1662 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1663 Then gimplify the whole thing. */
1664 if (result
!= result_decl
)
1665 TREE_OPERAND (ret_expr
, 0) = result
;
1667 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1669 maybe_add_early_return_predict_stmt (pre_p
);
1670 ret
= gimple_build_return (result
);
1671 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1672 gimplify_seq_add_stmt (pre_p
, ret
);
1677 /* Gimplify a variable-length array DECL. */
1680 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1682 /* This is a variable-sized decl. Simplify its size and mark it
1683 for deferred expansion. */
1684 tree t
, addr
, ptr_type
;
1686 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1687 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1689 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1690 if (DECL_HAS_VALUE_EXPR_P (decl
))
1693 /* All occurrences of this decl in final gimplified code will be
1694 replaced by indirection. Setting DECL_VALUE_EXPR does two
1695 things: First, it lets the rest of the gimplifier know what
1696 replacement to use. Second, it lets the debug info know
1697 where to find the value. */
1698 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1699 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1700 DECL_IGNORED_P (addr
) = 0;
1701 t
= build_fold_indirect_ref (addr
);
1702 TREE_THIS_NOTRAP (t
) = 1;
1703 SET_DECL_VALUE_EXPR (decl
, t
);
1704 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1706 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1707 max_int_size_in_bytes (TREE_TYPE (decl
)));
1708 /* The call has been built for a variable-sized object. */
1709 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1710 t
= fold_convert (ptr_type
, t
);
1711 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1713 gimplify_and_add (t
, seq_p
);
1715 /* Record the dynamic allocation associated with DECL if requested. */
1716 if (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
)
1717 record_dynamic_alloc (decl
);
1720 /* A helper function to be called via walk_tree. Mark all labels under *TP
1721 as being forced. To be called for DECL_INITIAL of static variables. */
1724 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1728 if (TREE_CODE (*tp
) == LABEL_DECL
)
1730 FORCED_LABEL (*tp
) = 1;
1731 cfun
->has_forced_label_in_static
= 1;
1737 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1738 and initialization explicit. */
1740 static enum gimplify_status
1741 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1743 tree stmt
= *stmt_p
;
1744 tree decl
= DECL_EXPR_DECL (stmt
);
1746 *stmt_p
= NULL_TREE
;
1748 if (TREE_TYPE (decl
) == error_mark_node
)
1751 if ((TREE_CODE (decl
) == TYPE_DECL
1753 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1755 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1756 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1757 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1760 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1761 in case its size expressions contain problematic nodes like CALL_EXPR. */
1762 if (TREE_CODE (decl
) == TYPE_DECL
1763 && DECL_ORIGINAL_TYPE (decl
)
1764 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1766 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1767 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1768 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1771 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1773 tree init
= DECL_INITIAL (decl
);
1774 bool is_vla
= false;
1777 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl
), &size
)
1778 || (!TREE_STATIC (decl
)
1779 && flag_stack_check
== GENERIC_STACK_CHECK
1781 (unsigned HOST_WIDE_INT
) STACK_CHECK_MAX_VAR_SIZE
)))
1783 gimplify_vla_decl (decl
, seq_p
);
1787 if (asan_poisoned_variables
1789 && TREE_ADDRESSABLE (decl
)
1790 && !TREE_STATIC (decl
)
1791 && !DECL_HAS_VALUE_EXPR_P (decl
)
1792 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
1793 && dbg_cnt (asan_use_after_scope
)
1794 && !gimplify_omp_ctxp
)
1796 asan_poisoned_variables
->add (decl
);
1797 asan_poison_variable (decl
, false, seq_p
);
1798 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1799 gimplify_ctxp
->live_switch_vars
->add (decl
);
1802 /* Some front ends do not explicitly declare all anonymous
1803 artificial variables. We compensate here by declaring the
1804 variables, though it would be better if the front ends would
1805 explicitly declare them. */
1806 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1807 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1808 gimple_add_tmp_var (decl
);
1810 if (init
&& init
!= error_mark_node
)
1812 if (!TREE_STATIC (decl
))
1814 DECL_INITIAL (decl
) = NULL_TREE
;
1815 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1816 gimplify_and_add (init
, seq_p
);
1820 /* We must still examine initializers for static variables
1821 as they may contain a label address. */
1822 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1829 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1830 and replacing the LOOP_EXPR with goto, but if the loop contains an
1831 EXIT_EXPR, we need to append a label for it to jump to. */
1833 static enum gimplify_status
1834 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1836 tree saved_label
= gimplify_ctxp
->exit_label
;
1837 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1839 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1841 gimplify_ctxp
->exit_label
= NULL_TREE
;
1843 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1845 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1847 if (gimplify_ctxp
->exit_label
)
1848 gimplify_seq_add_stmt (pre_p
,
1849 gimple_build_label (gimplify_ctxp
->exit_label
));
1851 gimplify_ctxp
->exit_label
= saved_label
;
1857 /* Gimplify a statement list onto a sequence. These may be created either
1858 by an enlightened front-end, or by shortcut_cond_expr. */
1860 static enum gimplify_status
1861 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
1863 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
1865 tree_stmt_iterator i
= tsi_start (*expr_p
);
1867 while (!tsi_end_p (i
))
1869 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
1882 /* Callback for walk_gimple_seq. */
1885 warn_switch_unreachable_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
1886 struct walk_stmt_info
*wi
)
1888 gimple
*stmt
= gsi_stmt (*gsi_p
);
1890 *handled_ops_p
= true;
1891 switch (gimple_code (stmt
))
1894 /* A compiler-generated cleanup or a user-written try block.
1895 If it's empty, don't dive into it--that would result in
1896 worse location info. */
1897 if (gimple_try_eval (stmt
) == NULL
)
1900 return integer_zero_node
;
1905 case GIMPLE_EH_FILTER
:
1906 case GIMPLE_TRANSACTION
:
1907 /* Walk the sub-statements. */
1908 *handled_ops_p
= false;
1912 /* Ignore these. We may generate them before declarations that
1913 are never executed. If there's something to warn about,
1914 there will be non-debug stmts too, and we'll catch those. */
1918 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
1920 *handled_ops_p
= false;
1925 /* Save the first "real" statement (not a decl/lexical scope/...). */
1927 return integer_zero_node
;
1932 /* Possibly warn about unreachable statements between switch's controlling
1933 expression and the first case. SEQ is the body of a switch expression. */
1936 maybe_warn_switch_unreachable (gimple_seq seq
)
1938 if (!warn_switch_unreachable
1939 /* This warning doesn't play well with Fortran when optimizations
1941 || lang_GNU_Fortran ()
1945 struct walk_stmt_info wi
;
1946 memset (&wi
, 0, sizeof (wi
));
1947 walk_gimple_seq (seq
, warn_switch_unreachable_r
, NULL
, &wi
);
1948 gimple
*stmt
= (gimple
*) wi
.info
;
1950 if (stmt
&& gimple_code (stmt
) != GIMPLE_LABEL
)
1952 if (gimple_code (stmt
) == GIMPLE_GOTO
1953 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
1954 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
1955 /* Don't warn for compiler-generated gotos. These occur
1956 in Duff's devices, for example. */;
1958 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
1959 "statement will never be executed");
1964 /* A label entry that pairs label and a location. */
1971 /* Find LABEL in vector of label entries VEC. */
1973 static struct label_entry
*
1974 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
1977 struct label_entry
*l
;
1979 FOR_EACH_VEC_ELT (*vec
, i
, l
)
1980 if (l
->label
== label
)
1985 /* Return true if LABEL, a LABEL_DECL, represents a case label
1986 in a vector of labels CASES. */
1989 case_label_p (const vec
<tree
> *cases
, tree label
)
1994 FOR_EACH_VEC_ELT (*cases
, i
, l
)
1995 if (CASE_LABEL (l
) == label
)
2000 /* Find the last nondebug statement in a scope STMT. */
2003 last_stmt_in_scope (gimple
*stmt
)
2008 switch (gimple_code (stmt
))
2012 gbind
*bind
= as_a
<gbind
*> (stmt
);
2013 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
2014 return last_stmt_in_scope (stmt
);
2019 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2020 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
2021 gimple
*last_eval
= last_stmt_in_scope (stmt
);
2022 if (gimple_stmt_may_fallthru (last_eval
)
2023 && (last_eval
== NULL
2024 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
2025 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2027 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
2028 return last_stmt_in_scope (stmt
);
2042 /* Collect interesting labels in LABELS and return the statement preceding
2043 another case label, or a user-defined label. Store a location useful
2044 to give warnings at *PREVLOC (usually the location of the returned
2045 statement or of its surrounding scope). */
2048 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2049 auto_vec
<struct label_entry
> *labels
,
2050 location_t
*prevloc
)
2052 gimple
*prev
= NULL
;
2054 *prevloc
= UNKNOWN_LOCATION
;
2057 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2059 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2060 which starts on a GIMPLE_SWITCH and ends with a break label.
2061 Handle that as a single statement that can fall through. */
2062 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2063 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2064 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2066 && gimple_code (first
) == GIMPLE_SWITCH
2067 && gimple_code (last
) == GIMPLE_LABEL
)
2069 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2070 if (SWITCH_BREAK_LABEL_P (label
))
2078 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2079 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2081 /* Nested scope. Only look at the last statement of
2082 the innermost scope. */
2083 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2084 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2088 /* It might be a label without a location. Use the
2089 location of the scope then. */
2090 if (!gimple_has_location (prev
))
2091 *prevloc
= bind_loc
;
2097 /* Ifs are tricky. */
2098 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2100 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2101 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2102 location_t if_loc
= gimple_location (cond_stmt
);
2105 if (i > 1) goto <D.2259>; else goto D;
2106 we can't do much with the else-branch. */
2107 if (!DECL_ARTIFICIAL (false_lab
))
2110 /* Go on until the false label, then one step back. */
2111 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2113 gimple
*stmt
= gsi_stmt (*gsi_p
);
2114 if (gimple_code (stmt
) == GIMPLE_LABEL
2115 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2119 /* Not found? Oops. */
2120 if (gsi_end_p (*gsi_p
))
2123 struct label_entry l
= { false_lab
, if_loc
};
2124 labels
->safe_push (l
);
2126 /* Go to the last statement of the then branch. */
2129 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2135 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2136 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2138 /* Look at the statement before, it might be
2139 attribute fallthrough, in which case don't warn. */
2141 bool fallthru_before_dest
2142 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2144 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2145 if (!fallthru_before_dest
)
2147 struct label_entry l
= { goto_dest
, if_loc
};
2148 labels
->safe_push (l
);
2151 /* And move back. */
2155 /* Remember the last statement. Skip labels that are of no interest
2157 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2159 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2160 if (find_label_entry (labels
, label
))
2161 prev
= gsi_stmt (*gsi_p
);
2163 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2165 else if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_PREDICT
)
2167 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2168 prev
= gsi_stmt (*gsi_p
);
2171 while (!gsi_end_p (*gsi_p
)
2172 /* Stop if we find a case or a user-defined label. */
2173 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2174 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2176 if (prev
&& gimple_has_location (prev
))
2177 *prevloc
= gimple_location (prev
);
2181 /* Return true if the switch fallthough warning should occur. LABEL is
2182 the label statement that we're falling through to. */
2185 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2187 gimple_stmt_iterator gsi
= *gsi_p
;
2189 /* Don't warn if the label is marked with a "falls through" comment. */
2190 if (FALLTHROUGH_LABEL_P (label
))
2193 /* Don't warn for non-case labels followed by a statement:
2198 as these are likely intentional. */
2199 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2202 while (!gsi_end_p (gsi
)
2203 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2204 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2205 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2206 gsi_next_nondebug (&gsi
);
2207 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2211 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2212 immediately breaks. */
2215 /* Skip all immediately following labels. */
2216 while (!gsi_end_p (gsi
)
2217 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2218 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2219 gsi_next_nondebug (&gsi
);
2221 /* { ... something; default:; } */
2223 /* { ... something; default: break; } or
2224 { ... something; default: goto L; } */
2225 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2226 /* { ... something; default: return; } */
2227 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2233 /* Callback for walk_gimple_seq. */
2236 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2237 struct walk_stmt_info
*)
2239 gimple
*stmt
= gsi_stmt (*gsi_p
);
2241 *handled_ops_p
= true;
2242 switch (gimple_code (stmt
))
2247 case GIMPLE_EH_FILTER
:
2248 case GIMPLE_TRANSACTION
:
2249 /* Walk the sub-statements. */
2250 *handled_ops_p
= false;
2253 /* Find a sequence of form:
2260 and possibly warn. */
2263 /* Found a label. Skip all immediately following labels. */
2264 while (!gsi_end_p (*gsi_p
)
2265 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2266 gsi_next_nondebug (gsi_p
);
2268 /* There might be no more statements. */
2269 if (gsi_end_p (*gsi_p
))
2270 return integer_zero_node
;
2272 /* Vector of labels that fall through. */
2273 auto_vec
<struct label_entry
> labels
;
2275 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2277 /* There might be no more statements. */
2278 if (gsi_end_p (*gsi_p
))
2279 return integer_zero_node
;
2281 gimple
*next
= gsi_stmt (*gsi_p
);
2283 /* If what follows is a label, then we may have a fallthrough. */
2284 if (gimple_code (next
) == GIMPLE_LABEL
2285 && gimple_has_location (next
)
2286 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2289 struct label_entry
*l
;
2290 bool warned_p
= false;
2291 auto_diagnostic_group d
;
2292 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2294 else if (gimple_code (prev
) == GIMPLE_LABEL
2295 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2296 && (l
= find_label_entry (&labels
, label
)))
2297 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2298 "this statement may fall through");
2299 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2300 /* Try to be clever and don't warn when the statement
2301 can't actually fall through. */
2302 && gimple_stmt_may_fallthru (prev
)
2303 && prevloc
!= UNKNOWN_LOCATION
)
2304 warned_p
= warning_at (prevloc
,
2305 OPT_Wimplicit_fallthrough_
,
2306 "this statement may fall through");
2308 inform (gimple_location (next
), "here");
2310 /* Mark this label as processed so as to prevent multiple
2311 warnings in nested switches. */
2312 FALLTHROUGH_LABEL_P (label
) = true;
2314 /* So that next warn_implicit_fallthrough_r will start looking for
2315 a new sequence starting with this label. */
2326 /* Warn when a switch case falls through. */
2329 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2331 if (!warn_implicit_fallthrough
)
2334 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2337 || lang_GNU_OBJC ()))
2340 struct walk_stmt_info wi
;
2341 memset (&wi
, 0, sizeof (wi
));
2342 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2345 /* Callback for walk_gimple_seq. */
2348 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2349 struct walk_stmt_info
*wi
)
2351 gimple
*stmt
= gsi_stmt (*gsi_p
);
2353 *handled_ops_p
= true;
2354 switch (gimple_code (stmt
))
2359 case GIMPLE_EH_FILTER
:
2360 case GIMPLE_TRANSACTION
:
2361 /* Walk the sub-statements. */
2362 *handled_ops_p
= false;
2365 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2367 gsi_remove (gsi_p
, true);
2368 if (gsi_end_p (*gsi_p
))
2370 *static_cast<location_t
*>(wi
->info
) = gimple_location (stmt
);
2371 return integer_zero_node
;
2375 location_t loc
= gimple_location (stmt
);
2377 gimple_stmt_iterator gsi2
= *gsi_p
;
2378 stmt
= gsi_stmt (gsi2
);
2379 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2381 /* Go on until the artificial label. */
2382 tree goto_dest
= gimple_goto_dest (stmt
);
2383 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2385 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2386 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2391 /* Not found? Stop. */
2392 if (gsi_end_p (gsi2
))
2395 /* Look one past it. */
2399 /* We're looking for a case label or default label here. */
2400 while (!gsi_end_p (gsi2
))
2402 stmt
= gsi_stmt (gsi2
);
2403 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2405 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2406 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2412 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2414 else if (!is_gimple_debug (stmt
))
2415 /* Anything else is not expected. */
2420 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2421 "a case label or default label");
2430 /* Expand all FALLTHROUGH () calls in SEQ. */
2433 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2435 struct walk_stmt_info wi
;
2437 memset (&wi
, 0, sizeof (wi
));
2438 wi
.info
= (void *) &loc
;
2439 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2440 if (wi
.callback_result
== integer_zero_node
)
2441 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2442 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2443 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2444 "a case label or default label");
2448 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2451 static enum gimplify_status
2452 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2454 tree switch_expr
= *expr_p
;
2455 gimple_seq switch_body_seq
= NULL
;
2456 enum gimplify_status ret
;
2457 tree index_type
= TREE_TYPE (switch_expr
);
2458 if (index_type
== NULL_TREE
)
2459 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2461 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2463 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2466 if (SWITCH_BODY (switch_expr
))
2469 vec
<tree
> saved_labels
;
2470 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2471 tree default_case
= NULL_TREE
;
2472 gswitch
*switch_stmt
;
2474 /* Save old labels, get new ones from body, then restore the old
2475 labels. Save all the things from the switch body to append after. */
2476 saved_labels
= gimplify_ctxp
->case_labels
;
2477 gimplify_ctxp
->case_labels
.create (8);
2479 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2480 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2481 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2482 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2483 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2485 gimplify_ctxp
->live_switch_vars
= NULL
;
2487 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2488 gimplify_ctxp
->in_switch_expr
= true;
2490 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2492 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2493 maybe_warn_switch_unreachable (switch_body_seq
);
2494 maybe_warn_implicit_fallthrough (switch_body_seq
);
2495 /* Only do this for the outermost GIMPLE_SWITCH. */
2496 if (!gimplify_ctxp
->in_switch_expr
)
2497 expand_FALLTHROUGH (&switch_body_seq
);
2499 labels
= gimplify_ctxp
->case_labels
;
2500 gimplify_ctxp
->case_labels
= saved_labels
;
2502 if (gimplify_ctxp
->live_switch_vars
)
2504 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2505 delete gimplify_ctxp
->live_switch_vars
;
2507 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2509 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2512 bool add_bind
= false;
2515 glabel
*new_default
;
2518 = build_case_label (NULL_TREE
, NULL_TREE
,
2519 create_artificial_label (UNKNOWN_LOCATION
));
2520 if (old_in_switch_expr
)
2522 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
2525 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2526 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2528 else if (old_in_switch_expr
)
2530 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
2531 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
2533 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2534 if (SWITCH_BREAK_LABEL_P (label
))
2539 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2540 default_case
, labels
);
2541 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2542 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2543 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2544 so that we can easily find the start and end of the switch
2548 gimple_seq bind_body
= NULL
;
2549 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
2550 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
2551 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
2552 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
2553 gimplify_seq_add_stmt (pre_p
, bind
);
2557 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2558 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2568 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2570 static enum gimplify_status
2571 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2573 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2574 == current_function_decl
);
2576 tree label
= LABEL_EXPR_LABEL (*expr_p
);
2577 glabel
*label_stmt
= gimple_build_label (label
);
2578 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2579 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2581 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2582 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2584 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2585 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2591 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2593 static enum gimplify_status
2594 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2596 struct gimplify_ctx
*ctxp
;
2599 /* Invalid programs can play Duff's Device type games with, for example,
2600 #pragma omp parallel. At least in the C front end, we don't
2601 detect such invalid branches until after gimplification, in the
2602 diagnose_omp_blocks pass. */
2603 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2604 if (ctxp
->case_labels
.exists ())
2607 tree label
= CASE_LABEL (*expr_p
);
2608 label_stmt
= gimple_build_label (label
);
2609 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2610 ctxp
->case_labels
.safe_push (*expr_p
);
2611 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2613 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2614 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2616 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2617 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2623 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2627 build_and_jump (tree
*label_p
)
2629 if (label_p
== NULL
)
2630 /* If there's nowhere to jump, just fall through. */
2633 if (*label_p
== NULL_TREE
)
2635 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2639 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2642 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2643 This also involves building a label to jump to and communicating it to
2644 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2646 static enum gimplify_status
2647 gimplify_exit_expr (tree
*expr_p
)
2649 tree cond
= TREE_OPERAND (*expr_p
, 0);
2652 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2653 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2659 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2660 different from its canonical type, wrap the whole thing inside a
2661 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2664 The canonical type of a COMPONENT_REF is the type of the field being
2665 referenced--unless the field is a bit-field which can be read directly
2666 in a smaller mode, in which case the canonical type is the
2667 sign-appropriate type corresponding to that mode. */
2670 canonicalize_component_ref (tree
*expr_p
)
2672 tree expr
= *expr_p
;
2675 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2677 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2678 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2680 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2682 /* One could argue that all the stuff below is not necessary for
2683 the non-bitfield case and declare it a FE error if type
2684 adjustment would be needed. */
2685 if (TREE_TYPE (expr
) != type
)
2687 #ifdef ENABLE_TYPES_CHECKING
2688 tree old_type
= TREE_TYPE (expr
);
2692 /* We need to preserve qualifiers and propagate them from
2694 type_quals
= TYPE_QUALS (type
)
2695 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2696 if (TYPE_QUALS (type
) != type_quals
)
2697 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2699 /* Set the type of the COMPONENT_REF to the underlying type. */
2700 TREE_TYPE (expr
) = type
;
2702 #ifdef ENABLE_TYPES_CHECKING
2703 /* It is now a FE error, if the conversion from the canonical
2704 type to the original expression type is not useless. */
2705 gcc_assert (useless_type_conversion_p (old_type
, type
));
2710 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2711 to foo, embed that change in the ADDR_EXPR by converting
2716 where L is the lower bound. For simplicity, only do this for constant
2718 The constraint is that the type of &array[L] is trivially convertible
2722 canonicalize_addr_expr (tree
*expr_p
)
2724 tree expr
= *expr_p
;
2725 tree addr_expr
= TREE_OPERAND (expr
, 0);
2726 tree datype
, ddatype
, pddatype
;
2728 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2729 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2730 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2733 /* The addr_expr type should be a pointer to an array. */
2734 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2735 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2738 /* The pointer to element type shall be trivially convertible to
2739 the expression pointer type. */
2740 ddatype
= TREE_TYPE (datype
);
2741 pddatype
= build_pointer_type (ddatype
);
2742 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2746 /* The lower bound and element sizes must be constant. */
2747 if (!TYPE_SIZE_UNIT (ddatype
)
2748 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2749 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2750 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2753 /* All checks succeeded. Build a new node to merge the cast. */
2754 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2755 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2756 NULL_TREE
, NULL_TREE
);
2757 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2759 /* We can have stripped a required restrict qualifier above. */
2760 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2761 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2764 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2765 underneath as appropriate. */
2767 static enum gimplify_status
2768 gimplify_conversion (tree
*expr_p
)
2770 location_t loc
= EXPR_LOCATION (*expr_p
);
2771 gcc_assert (CONVERT_EXPR_P (*expr_p
));
2773 /* Then strip away all but the outermost conversion. */
2774 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
2776 /* And remove the outermost conversion if it's useless. */
2777 if (tree_ssa_useless_type_conversion (*expr_p
))
2778 *expr_p
= TREE_OPERAND (*expr_p
, 0);
2780 /* If we still have a conversion at the toplevel,
2781 then canonicalize some constructs. */
2782 if (CONVERT_EXPR_P (*expr_p
))
2784 tree sub
= TREE_OPERAND (*expr_p
, 0);
2786 /* If a NOP conversion is changing the type of a COMPONENT_REF
2787 expression, then canonicalize its type now in order to expose more
2788 redundant conversions. */
2789 if (TREE_CODE (sub
) == COMPONENT_REF
)
2790 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
2792 /* If a NOP conversion is changing a pointer to array of foo
2793 to a pointer to foo, embed that change in the ADDR_EXPR. */
2794 else if (TREE_CODE (sub
) == ADDR_EXPR
)
2795 canonicalize_addr_expr (expr_p
);
2798 /* If we have a conversion to a non-register type force the
2799 use of a VIEW_CONVERT_EXPR instead. */
2800 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
2801 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
2802 TREE_OPERAND (*expr_p
, 0));
2804 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2805 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
2806 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
2811 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2812 DECL_VALUE_EXPR, and it's worth re-examining things. */
2814 static enum gimplify_status
2815 gimplify_var_or_parm_decl (tree
*expr_p
)
2817 tree decl
= *expr_p
;
2819 /* ??? If this is a local variable, and it has not been seen in any
2820 outer BIND_EXPR, then it's probably the result of a duplicate
2821 declaration, for which we've already issued an error. It would
2822 be really nice if the front end wouldn't leak these at all.
2823 Currently the only known culprit is C++ destructors, as seen
2824 in g++.old-deja/g++.jason/binding.C. */
2826 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
2827 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
2828 && decl_function_context (decl
) == current_function_decl
)
2830 gcc_assert (seen_error ());
2834 /* When within an OMP context, notice uses of variables. */
2835 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
2838 /* If the decl is an alias for another expression, substitute it now. */
2839 if (DECL_HAS_VALUE_EXPR_P (decl
))
2841 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
2848 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2851 recalculate_side_effects (tree t
)
2853 enum tree_code code
= TREE_CODE (t
);
2854 int len
= TREE_OPERAND_LENGTH (t
);
2857 switch (TREE_CODE_CLASS (code
))
2859 case tcc_expression
:
2865 case PREDECREMENT_EXPR
:
2866 case PREINCREMENT_EXPR
:
2867 case POSTDECREMENT_EXPR
:
2868 case POSTINCREMENT_EXPR
:
2869 /* All of these have side-effects, no matter what their
2878 case tcc_comparison
: /* a comparison expression */
2879 case tcc_unary
: /* a unary arithmetic expression */
2880 case tcc_binary
: /* a binary arithmetic expression */
2881 case tcc_reference
: /* a reference */
2882 case tcc_vl_exp
: /* a function call */
2883 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
2884 for (i
= 0; i
< len
; ++i
)
2886 tree op
= TREE_OPERAND (t
, i
);
2887 if (op
&& TREE_SIDE_EFFECTS (op
))
2888 TREE_SIDE_EFFECTS (t
) = 1;
2893 /* No side-effects. */
2901 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2905 : min_lval '[' val ']'
2907 | compound_lval '[' val ']'
2908 | compound_lval '.' ID
2910 This is not part of the original SIMPLE definition, which separates
2911 array and member references, but it seems reasonable to handle them
2912 together. Also, this way we don't run into problems with union
2913 aliasing; gcc requires that for accesses through a union to alias, the
2914 union reference must be explicit, which was not always the case when we
2915 were splitting up array and member refs.
2917 PRE_P points to the sequence where side effects that must happen before
2918 *EXPR_P should be stored.
2920 POST_P points to the sequence where side effects that must happen after
2921 *EXPR_P should be stored. */
2923 static enum gimplify_status
2924 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
2925 fallback_t fallback
)
2928 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
2930 location_t loc
= EXPR_LOCATION (*expr_p
);
2931 tree expr
= *expr_p
;
2933 /* Create a stack of the subexpressions so later we can walk them in
2934 order from inner to outer. */
2935 auto_vec
<tree
, 10> expr_stack
;
2937 /* We can handle anything that get_inner_reference can deal with. */
2938 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
2941 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2942 if (TREE_CODE (*p
) == INDIRECT_REF
)
2943 *p
= fold_indirect_ref_loc (loc
, *p
);
2945 if (handled_component_p (*p
))
2947 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2948 additional COMPONENT_REFs. */
2949 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
2950 && gimplify_var_or_parm_decl (p
) == GS_OK
)
2955 expr_stack
.safe_push (*p
);
2958 gcc_assert (expr_stack
.length ());
2960 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2961 walked through and P points to the innermost expression.
2963 Java requires that we elaborated nodes in source order. That
2964 means we must gimplify the inner expression followed by each of
2965 the indices, in order. But we can't gimplify the inner
2966 expression until we deal with any variable bounds, sizes, or
2967 positions in order to deal with PLACEHOLDER_EXPRs.
2969 So we do this in three steps. First we deal with the annotations
2970 for any variables in the components, then we gimplify the base,
2971 then we gimplify any indices, from left to right. */
2972 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
2974 tree t
= expr_stack
[i
];
2976 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2978 /* Gimplify the low bound and element type size and put them into
2979 the ARRAY_REF. If these values are set, they have already been
2981 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
2983 tree low
= unshare_expr (array_ref_low_bound (t
));
2984 if (!is_gimple_min_invariant (low
))
2986 TREE_OPERAND (t
, 2) = low
;
2987 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
2988 post_p
, is_gimple_reg
,
2990 ret
= MIN (ret
, tret
);
2995 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
2996 is_gimple_reg
, fb_rvalue
);
2997 ret
= MIN (ret
, tret
);
3000 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
3002 tree elmt_size
= array_ref_element_size (t
);
3003 if (!is_gimple_min_invariant (elmt_size
))
3005 elmt_size
= unshare_expr (elmt_size
);
3006 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
3007 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
3009 /* Divide the element size by the alignment of the element
3011 elmt_size
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3014 TREE_OPERAND (t
, 3) = elmt_size
;
3015 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
3016 post_p
, is_gimple_reg
,
3018 ret
= MIN (ret
, tret
);
3023 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
3024 is_gimple_reg
, fb_rvalue
);
3025 ret
= MIN (ret
, tret
);
3028 else if (TREE_CODE (t
) == COMPONENT_REF
)
3030 /* Set the field offset into T and gimplify it. */
3031 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3033 tree offset
= component_ref_field_offset (t
);
3034 if (!is_gimple_min_invariant (offset
))
3036 offset
= unshare_expr (offset
);
3037 tree field
= TREE_OPERAND (t
, 1);
3039 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
3041 /* Divide the offset by its alignment. */
3042 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3045 TREE_OPERAND (t
, 2) = offset
;
3046 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
3047 post_p
, is_gimple_reg
,
3049 ret
= MIN (ret
, tret
);
3054 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3055 is_gimple_reg
, fb_rvalue
);
3056 ret
= MIN (ret
, tret
);
3061 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3062 so as to match the min_lval predicate. Failure to do so may result
3063 in the creation of large aggregate temporaries. */
3064 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3065 fallback
| fb_lvalue
);
3066 ret
= MIN (ret
, tret
);
3068 /* And finally, the indices and operands of ARRAY_REF. During this
3069 loop we also remove any useless conversions. */
3070 for (; expr_stack
.length () > 0; )
3072 tree t
= expr_stack
.pop ();
3074 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3076 /* Gimplify the dimension. */
3077 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1)))
3079 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3080 is_gimple_val
, fb_rvalue
);
3081 ret
= MIN (ret
, tret
);
3085 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3087 /* The innermost expression P may have originally had
3088 TREE_SIDE_EFFECTS set which would have caused all the outer
3089 expressions in *EXPR_P leading to P to also have had
3090 TREE_SIDE_EFFECTS set. */
3091 recalculate_side_effects (t
);
3094 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3095 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3097 canonicalize_component_ref (expr_p
);
3100 expr_stack
.release ();
3102 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3107 /* Gimplify the self modifying expression pointed to by EXPR_P
3110 PRE_P points to the list where side effects that must happen before
3111 *EXPR_P should be stored.
3113 POST_P points to the list where side effects that must happen after
3114 *EXPR_P should be stored.
3116 WANT_VALUE is nonzero iff we want to use the value of this expression
3117 in another expression.
3119 ARITH_TYPE is the type the computation should be performed in. */
3121 enum gimplify_status
3122 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3123 bool want_value
, tree arith_type
)
3125 enum tree_code code
;
3126 tree lhs
, lvalue
, rhs
, t1
;
3127 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3129 enum tree_code arith_code
;
3130 enum gimplify_status ret
;
3131 location_t loc
= EXPR_LOCATION (*expr_p
);
3133 code
= TREE_CODE (*expr_p
);
3135 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3136 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3138 /* Prefix or postfix? */
3139 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3140 /* Faster to treat as prefix if result is not used. */
3141 postfix
= want_value
;
3145 /* For postfix, make sure the inner expression's post side effects
3146 are executed after side effects from this expression. */
3150 /* Add or subtract? */
3151 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3152 arith_code
= PLUS_EXPR
;
3154 arith_code
= MINUS_EXPR
;
3156 /* Gimplify the LHS into a GIMPLE lvalue. */
3157 lvalue
= TREE_OPERAND (*expr_p
, 0);
3158 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3159 if (ret
== GS_ERROR
)
3162 /* Extract the operands to the arithmetic operation. */
3164 rhs
= TREE_OPERAND (*expr_p
, 1);
3166 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3167 that as the result value and in the postqueue operation. */
3170 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3171 if (ret
== GS_ERROR
)
3174 lhs
= get_initialized_tmp_var (lhs
, pre_p
);
3177 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3178 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3180 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3181 if (arith_code
== MINUS_EXPR
)
3182 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3183 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3186 t1
= fold_convert (TREE_TYPE (*expr_p
),
3187 fold_build2 (arith_code
, arith_type
,
3188 fold_convert (arith_type
, lhs
),
3189 fold_convert (arith_type
, rhs
)));
3193 gimplify_assign (lvalue
, t1
, pre_p
);
3194 gimplify_seq_add_seq (orig_post_p
, post
);
3200 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3205 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3208 maybe_with_size_expr (tree
*expr_p
)
3210 tree expr
= *expr_p
;
3211 tree type
= TREE_TYPE (expr
);
3214 /* If we've already wrapped this or the type is error_mark_node, we can't do
3216 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3217 || type
== error_mark_node
)
3220 /* If the size isn't known or is a constant, we have nothing to do. */
3221 size
= TYPE_SIZE_UNIT (type
);
3222 if (!size
|| poly_int_tree_p (size
))
3225 /* Otherwise, make a WITH_SIZE_EXPR. */
3226 size
= unshare_expr (size
);
3227 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3228 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3231 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3232 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3233 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3234 gimplified to an SSA name. */
3236 enum gimplify_status
3237 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3240 bool (*test
) (tree
);
3243 /* In general, we allow lvalues for function arguments to avoid
3244 extra overhead of copying large aggregates out of even larger
3245 aggregates into temporaries only to copy the temporaries to
3246 the argument list. Make optimizers happy by pulling out to
3247 temporaries those types that fit in registers. */
3248 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3249 test
= is_gimple_val
, fb
= fb_rvalue
;
3252 test
= is_gimple_lvalue
, fb
= fb_either
;
3253 /* Also strip a TARGET_EXPR that would force an extra copy. */
3254 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3256 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3258 && !VOID_TYPE_P (TREE_TYPE (init
)))
3263 /* If this is a variable sized type, we must remember the size. */
3264 maybe_with_size_expr (arg_p
);
3266 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3267 /* Make sure arguments have the same location as the function call
3269 protected_set_expr_location (*arg_p
, call_location
);
3271 /* There is a sequence point before a function call. Side effects in
3272 the argument list must occur before the actual call. So, when
3273 gimplifying arguments, force gimplify_expr to use an internal
3274 post queue which is then appended to the end of PRE_P. */
3275 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3278 /* Don't fold inside offloading or taskreg regions: it can break code by
3279 adding decl references that weren't in the source. We'll do it during
3280 omplower pass instead. */
3283 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3285 struct gimplify_omp_ctx
*ctx
;
3286 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3287 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3289 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3291 /* Delay folding of builtins until the IL is in consistent state
3292 so the diagnostic machinery can do a better job. */
3293 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3295 return fold_stmt (gsi
);
3298 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3299 WANT_VALUE is true if the result of the call is desired. */
3301 static enum gimplify_status
3302 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3304 tree fndecl
, parms
, p
, fnptrtype
;
3305 enum gimplify_status ret
;
3308 bool builtin_va_start_p
= false;
3309 location_t loc
= EXPR_LOCATION (*expr_p
);
3311 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3313 /* For reliable diagnostics during inlining, it is necessary that
3314 every call_expr be annotated with file and line. */
3315 if (! EXPR_HAS_LOCATION (*expr_p
))
3316 SET_EXPR_LOCATION (*expr_p
, input_location
);
3318 /* Gimplify internal functions created in the FEs. */
3319 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3324 nargs
= call_expr_nargs (*expr_p
);
3325 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3326 auto_vec
<tree
> vargs (nargs
);
3328 for (i
= 0; i
< nargs
; i
++)
3330 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3331 EXPR_LOCATION (*expr_p
));
3332 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3335 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3336 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3337 gimplify_seq_add_stmt (pre_p
, call
);
3341 /* This may be a call to a builtin function.
3343 Builtin function calls may be transformed into different
3344 (and more efficient) builtin function calls under certain
3345 circumstances. Unfortunately, gimplification can muck things
3346 up enough that the builtin expanders are not aware that certain
3347 transformations are still valid.
3349 So we attempt transformation/gimplification of the call before
3350 we gimplify the CALL_EXPR. At this time we do not manage to
3351 transform all calls in the same manner as the expanders do, but
3352 we do transform most of them. */
3353 fndecl
= get_callee_fndecl (*expr_p
);
3354 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3355 switch (DECL_FUNCTION_CODE (fndecl
))
3357 CASE_BUILT_IN_ALLOCA
:
3358 /* If the call has been built for a variable-sized object, then we
3359 want to restore the stack level when the enclosing BIND_EXPR is
3360 exited to reclaim the allocated space; otherwise, we precisely
3361 need to do the opposite and preserve the latest stack level. */
3362 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3363 gimplify_ctxp
->save_stack
= true;
3365 gimplify_ctxp
->keep_stack
= true;
3368 case BUILT_IN_VA_START
:
3370 builtin_va_start_p
= TRUE
;
3371 if (call_expr_nargs (*expr_p
) < 2)
3373 error ("too few arguments to function %<va_start%>");
3374 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3378 if (fold_builtin_next_arg (*expr_p
, true))
3380 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3386 case BUILT_IN_EH_RETURN
:
3387 cfun
->calls_eh_return
= true;
3390 case BUILT_IN_CLEAR_PADDING
:
3391 if (call_expr_nargs (*expr_p
) == 1)
3393 /* Remember the original type of the argument in an internal
3394 dummy second argument, as in GIMPLE pointer conversions are
3396 p
= CALL_EXPR_ARG (*expr_p
, 0);
3398 = build_call_expr_loc (EXPR_LOCATION (*expr_p
), fndecl
, 2, p
,
3399 build_zero_cst (TREE_TYPE (p
)));
3407 if (fndecl
&& fndecl_built_in_p (fndecl
))
3409 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3410 if (new_tree
&& new_tree
!= *expr_p
)
3412 /* There was a transformation of this call which computes the
3413 same value, but in a more efficient way. Return and try
3420 /* Remember the original function pointer type. */
3421 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3426 && (cfun
->curr_properties
& PROP_gimple_any
) == 0)
3428 tree variant
= omp_resolve_declare_variant (fndecl
);
3429 if (variant
!= fndecl
)
3430 CALL_EXPR_FN (*expr_p
) = build1 (ADDR_EXPR
, fnptrtype
, variant
);
3433 /* There is a sequence point before the call, so any side effects in
3434 the calling expression must occur before the actual call. Force
3435 gimplify_expr to use an internal post queue. */
3436 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3437 is_gimple_call_addr
, fb_rvalue
);
3439 nargs
= call_expr_nargs (*expr_p
);
3441 /* Get argument types for verification. */
3442 fndecl
= get_callee_fndecl (*expr_p
);
3445 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3447 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3449 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3450 p
= DECL_ARGUMENTS (fndecl
);
3455 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3458 /* If the last argument is __builtin_va_arg_pack () and it is not
3459 passed as a named argument, decrease the number of CALL_EXPR
3460 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3463 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3465 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3466 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3469 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
3471 tree call
= *expr_p
;
3474 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3475 CALL_EXPR_FN (call
),
3476 nargs
, CALL_EXPR_ARGP (call
));
3478 /* Copy all CALL_EXPR flags, location and block, except
3479 CALL_EXPR_VA_ARG_PACK flag. */
3480 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3481 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3482 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3483 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3484 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3485 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3487 /* Set CALL_EXPR_VA_ARG_PACK. */
3488 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3492 /* If the call returns twice then after building the CFG the call
3493 argument computations will no longer dominate the call because
3494 we add an abnormal incoming edge to the call. So do not use SSA
3496 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3498 /* Gimplify the function arguments. */
3501 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3502 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3503 PUSH_ARGS_REVERSED
? i
-- : i
++)
3505 enum gimplify_status t
;
3507 /* Avoid gimplifying the second argument to va_start, which needs to
3508 be the plain PARM_DECL. */
3509 if ((i
!= 1) || !builtin_va_start_p
)
3511 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3512 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3520 /* Gimplify the static chain. */
3521 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3523 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3524 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3527 enum gimplify_status t
;
3528 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3529 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3535 /* Verify the function result. */
3536 if (want_value
&& fndecl
3537 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3539 error_at (loc
, "using result of function returning %<void%>");
3543 /* Try this again in case gimplification exposed something. */
3544 if (ret
!= GS_ERROR
)
3546 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3548 if (new_tree
&& new_tree
!= *expr_p
)
3550 /* There was a transformation of this call which computes the
3551 same value, but in a more efficient way. Return and try
3559 *expr_p
= error_mark_node
;
3563 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3564 decl. This allows us to eliminate redundant or useless
3565 calls to "const" functions. */
3566 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3568 int flags
= call_expr_flags (*expr_p
);
3569 if (flags
& (ECF_CONST
| ECF_PURE
)
3570 /* An infinite loop is considered a side effect. */
3571 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3572 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3575 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3576 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3577 form and delegate the creation of a GIMPLE_CALL to
3578 gimplify_modify_expr. This is always possible because when
3579 WANT_VALUE is true, the caller wants the result of this call into
3580 a temporary, which means that we will emit an INIT_EXPR in
3581 internal_get_tmp_var which will then be handled by
3582 gimplify_modify_expr. */
3585 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3586 have to do is replicate it as a GIMPLE_CALL tuple. */
3587 gimple_stmt_iterator gsi
;
3588 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
3589 notice_special_calls (call
);
3590 gimplify_seq_add_stmt (pre_p
, call
);
3591 gsi
= gsi_last (*pre_p
);
3592 maybe_fold_stmt (&gsi
);
3593 *expr_p
= NULL_TREE
;
3596 /* Remember the original function type. */
3597 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3598 CALL_EXPR_FN (*expr_p
));
3603 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3604 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3606 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3607 condition is true or false, respectively. If null, we should generate
3608 our own to skip over the evaluation of this specific expression.
3610 LOCUS is the source location of the COND_EXPR.
3612 This function is the tree equivalent of do_jump.
3614 shortcut_cond_r should only be called by shortcut_cond_expr. */
3617 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3620 tree local_label
= NULL_TREE
;
3621 tree t
, expr
= NULL
;
3623 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3624 retain the shortcut semantics. Just insert the gotos here;
3625 shortcut_cond_expr will append the real blocks later. */
3626 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3628 location_t new_locus
;
3630 /* Turn if (a && b) into
3632 if (a); else goto no;
3633 if (b) goto yes; else goto no;
3636 if (false_label_p
== NULL
)
3637 false_label_p
= &local_label
;
3639 /* Keep the original source location on the first 'if'. */
3640 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3641 append_to_statement_list (t
, &expr
);
3643 /* Set the source location of the && on the second 'if'. */
3644 new_locus
= rexpr_location (pred
, locus
);
3645 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3647 append_to_statement_list (t
, &expr
);
3649 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3651 location_t new_locus
;
3653 /* Turn if (a || b) into
3656 if (b) goto yes; else goto no;
3659 if (true_label_p
== NULL
)
3660 true_label_p
= &local_label
;
3662 /* Keep the original source location on the first 'if'. */
3663 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3664 append_to_statement_list (t
, &expr
);
3666 /* Set the source location of the || on the second 'if'. */
3667 new_locus
= rexpr_location (pred
, locus
);
3668 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3670 append_to_statement_list (t
, &expr
);
3672 else if (TREE_CODE (pred
) == COND_EXPR
3673 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3674 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3676 location_t new_locus
;
3678 /* As long as we're messing with gotos, turn if (a ? b : c) into
3680 if (b) goto yes; else goto no;
3682 if (c) goto yes; else goto no;
3684 Don't do this if one of the arms has void type, which can happen
3685 in C++ when the arm is throw. */
3687 /* Keep the original source location on the first 'if'. Set the source
3688 location of the ? on the second 'if'. */
3689 new_locus
= rexpr_location (pred
, locus
);
3690 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3691 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3692 false_label_p
, locus
),
3693 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3694 false_label_p
, new_locus
));
3698 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3699 build_and_jump (true_label_p
),
3700 build_and_jump (false_label_p
));
3701 SET_EXPR_LOCATION (expr
, locus
);
3706 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3707 append_to_statement_list (t
, &expr
);
3713 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3714 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3715 statement, if it is the last one. Otherwise, return NULL. */
3718 find_goto (tree expr
)
3723 if (TREE_CODE (expr
) == GOTO_EXPR
)
3726 if (TREE_CODE (expr
) != STATEMENT_LIST
)
3729 tree_stmt_iterator i
= tsi_start (expr
);
3731 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
3734 if (!tsi_one_before_end_p (i
))
3737 return find_goto (tsi_stmt (i
));
3740 /* Same as find_goto, except that it returns NULL if the destination
3741 is not a LABEL_DECL. */
3744 find_goto_label (tree expr
)
3746 tree dest
= find_goto (expr
);
3747 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
3752 /* Given a conditional expression EXPR with short-circuit boolean
3753 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3754 predicate apart into the equivalent sequence of conditionals. */
3757 shortcut_cond_expr (tree expr
)
3759 tree pred
= TREE_OPERAND (expr
, 0);
3760 tree then_
= TREE_OPERAND (expr
, 1);
3761 tree else_
= TREE_OPERAND (expr
, 2);
3762 tree true_label
, false_label
, end_label
, t
;
3764 tree
*false_label_p
;
3765 bool emit_end
, emit_false
, jump_over_else
;
3766 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3767 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3769 /* First do simple transformations. */
3772 /* If there is no 'else', turn
3775 if (a) if (b) then c. */
3776 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3778 /* Keep the original source location on the first 'if'. */
3779 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3780 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3781 /* Set the source location of the && on the second 'if'. */
3782 if (rexpr_has_location (pred
))
3783 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3784 then_
= shortcut_cond_expr (expr
);
3785 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3786 pred
= TREE_OPERAND (pred
, 0);
3787 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
3788 SET_EXPR_LOCATION (expr
, locus
);
3794 /* If there is no 'then', turn
3797 if (a); else if (b); else d. */
3798 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3800 /* Keep the original source location on the first 'if'. */
3801 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3802 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3803 /* Set the source location of the || on the second 'if'. */
3804 if (rexpr_has_location (pred
))
3805 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3806 else_
= shortcut_cond_expr (expr
);
3807 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3808 pred
= TREE_OPERAND (pred
, 0);
3809 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
3810 SET_EXPR_LOCATION (expr
, locus
);
3814 /* If we're done, great. */
3815 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
3816 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
3819 /* Otherwise we need to mess with gotos. Change
3822 if (a); else goto no;
3825 and recursively gimplify the condition. */
3827 true_label
= false_label
= end_label
= NULL_TREE
;
3829 /* If our arms just jump somewhere, hijack those labels so we don't
3830 generate jumps to jumps. */
3832 if (tree then_goto
= find_goto_label (then_
))
3834 true_label
= GOTO_DESTINATION (then_goto
);
3839 if (tree else_goto
= find_goto_label (else_
))
3841 false_label
= GOTO_DESTINATION (else_goto
);
3846 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3848 true_label_p
= &true_label
;
3850 true_label_p
= NULL
;
3852 /* The 'else' branch also needs a label if it contains interesting code. */
3853 if (false_label
|| else_se
)
3854 false_label_p
= &false_label
;
3856 false_label_p
= NULL
;
3858 /* If there was nothing else in our arms, just forward the label(s). */
3859 if (!then_se
&& !else_se
)
3860 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3861 EXPR_LOC_OR_LOC (expr
, input_location
));
3863 /* If our last subexpression already has a terminal label, reuse it. */
3865 t
= expr_last (else_
);
3867 t
= expr_last (then_
);
3870 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
3871 end_label
= LABEL_EXPR_LABEL (t
);
3873 /* If we don't care about jumping to the 'else' branch, jump to the end
3874 if the condition is false. */
3876 false_label_p
= &end_label
;
3878 /* We only want to emit these labels if we aren't hijacking them. */
3879 emit_end
= (end_label
== NULL_TREE
);
3880 emit_false
= (false_label
== NULL_TREE
);
3882 /* We only emit the jump over the else clause if we have to--if the
3883 then clause may fall through. Otherwise we can wind up with a
3884 useless jump and a useless label at the end of gimplified code,
3885 which will cause us to think that this conditional as a whole
3886 falls through even if it doesn't. If we then inline a function
3887 which ends with such a condition, that can cause us to issue an
3888 inappropriate warning about control reaching the end of a
3889 non-void function. */
3890 jump_over_else
= block_may_fallthru (then_
);
3892 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3893 EXPR_LOC_OR_LOC (expr
, input_location
));
3896 append_to_statement_list (pred
, &expr
);
3898 append_to_statement_list (then_
, &expr
);
3903 tree last
= expr_last (expr
);
3904 t
= build_and_jump (&end_label
);
3905 if (rexpr_has_location (last
))
3906 SET_EXPR_LOCATION (t
, rexpr_location (last
));
3907 append_to_statement_list (t
, &expr
);
3911 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
3912 append_to_statement_list (t
, &expr
);
3914 append_to_statement_list (else_
, &expr
);
3916 if (emit_end
&& end_label
)
3918 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
3919 append_to_statement_list (t
, &expr
);
3925 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3928 gimple_boolify (tree expr
)
3930 tree type
= TREE_TYPE (expr
);
3931 location_t loc
= EXPR_LOCATION (expr
);
3933 if (TREE_CODE (expr
) == NE_EXPR
3934 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
3935 && integer_zerop (TREE_OPERAND (expr
, 1)))
3937 tree call
= TREE_OPERAND (expr
, 0);
3938 tree fn
= get_callee_fndecl (call
);
3940 /* For __builtin_expect ((long) (x), y) recurse into x as well
3941 if x is truth_value_p. */
3943 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
3944 && call_expr_nargs (call
) == 2)
3946 tree arg
= CALL_EXPR_ARG (call
, 0);
3949 if (TREE_CODE (arg
) == NOP_EXPR
3950 && TREE_TYPE (arg
) == TREE_TYPE (call
))
3951 arg
= TREE_OPERAND (arg
, 0);
3952 if (truth_value_p (TREE_CODE (arg
)))
3954 arg
= gimple_boolify (arg
);
3955 CALL_EXPR_ARG (call
, 0)
3956 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
3962 switch (TREE_CODE (expr
))
3964 case TRUTH_AND_EXPR
:
3966 case TRUTH_XOR_EXPR
:
3967 case TRUTH_ANDIF_EXPR
:
3968 case TRUTH_ORIF_EXPR
:
3969 /* Also boolify the arguments of truth exprs. */
3970 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
3973 case TRUTH_NOT_EXPR
:
3974 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3976 /* These expressions always produce boolean results. */
3977 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3978 TREE_TYPE (expr
) = boolean_type_node
;
3982 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
3984 case annot_expr_ivdep_kind
:
3985 case annot_expr_unroll_kind
:
3986 case annot_expr_no_vector_kind
:
3987 case annot_expr_vector_kind
:
3988 case annot_expr_parallel_kind
:
3989 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3990 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3991 TREE_TYPE (expr
) = boolean_type_node
;
3998 if (COMPARISON_CLASS_P (expr
))
4000 /* There expressions always prduce boolean results. */
4001 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4002 TREE_TYPE (expr
) = boolean_type_node
;
4005 /* Other expressions that get here must have boolean values, but
4006 might need to be converted to the appropriate mode. */
4007 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4009 return fold_convert_loc (loc
, boolean_type_node
, expr
);
4013 /* Given a conditional expression *EXPR_P without side effects, gimplify
4014 its operands. New statements are inserted to PRE_P. */
4016 static enum gimplify_status
4017 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
4019 tree expr
= *expr_p
, cond
;
4020 enum gimplify_status ret
, tret
;
4021 enum tree_code code
;
4023 cond
= gimple_boolify (COND_EXPR_COND (expr
));
4025 /* We need to handle && and || specially, as their gimplification
4026 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4027 code
= TREE_CODE (cond
);
4028 if (code
== TRUTH_ANDIF_EXPR
)
4029 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
4030 else if (code
== TRUTH_ORIF_EXPR
)
4031 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
4032 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_condexpr
, fb_rvalue
);
4033 COND_EXPR_COND (*expr_p
) = cond
;
4035 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
4036 is_gimple_val
, fb_rvalue
);
4037 ret
= MIN (ret
, tret
);
4038 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
4039 is_gimple_val
, fb_rvalue
);
4041 return MIN (ret
, tret
);
4044 /* Return true if evaluating EXPR could trap.
4045 EXPR is GENERIC, while tree_could_trap_p can be called
4049 generic_expr_could_trap_p (tree expr
)
4053 if (!expr
|| is_gimple_val (expr
))
4056 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
4059 n
= TREE_OPERAND_LENGTH (expr
);
4060 for (i
= 0; i
< n
; i
++)
4061 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
4067 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4076 The second form is used when *EXPR_P is of type void.
4078 PRE_P points to the list where side effects that must happen before
4079 *EXPR_P should be stored. */
4081 static enum gimplify_status
4082 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4084 tree expr
= *expr_p
;
4085 tree type
= TREE_TYPE (expr
);
4086 location_t loc
= EXPR_LOCATION (expr
);
4087 tree tmp
, arm1
, arm2
;
4088 enum gimplify_status ret
;
4089 tree label_true
, label_false
, label_cont
;
4090 bool have_then_clause_p
, have_else_clause_p
;
4092 enum tree_code pred_code
;
4093 gimple_seq seq
= NULL
;
4095 /* If this COND_EXPR has a value, copy the values into a temporary within
4097 if (!VOID_TYPE_P (type
))
4099 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4102 /* If either an rvalue is ok or we do not require an lvalue, create the
4103 temporary. But we cannot do that if the type is addressable. */
4104 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4105 && !TREE_ADDRESSABLE (type
))
4107 if (gimplify_ctxp
->allow_rhs_cond_expr
4108 /* If either branch has side effects or could trap, it can't be
4109 evaluated unconditionally. */
4110 && !TREE_SIDE_EFFECTS (then_
)
4111 && !generic_expr_could_trap_p (then_
)
4112 && !TREE_SIDE_EFFECTS (else_
)
4113 && !generic_expr_could_trap_p (else_
))
4114 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4116 tmp
= create_tmp_var (type
, "iftmp");
4120 /* Otherwise, only create and copy references to the values. */
4123 type
= build_pointer_type (type
);
4125 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4126 then_
= build_fold_addr_expr_loc (loc
, then_
);
4128 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4129 else_
= build_fold_addr_expr_loc (loc
, else_
);
4132 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4134 tmp
= create_tmp_var (type
, "iftmp");
4135 result
= build_simple_mem_ref_loc (loc
, tmp
);
4138 /* Build the new then clause, `tmp = then_;'. But don't build the
4139 assignment if the value is void; in C++ it can be if it's a throw. */
4140 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4141 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4143 /* Similarly, build the new else clause, `tmp = else_;'. */
4144 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4145 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4147 TREE_TYPE (expr
) = void_type_node
;
4148 recalculate_side_effects (expr
);
4150 /* Move the COND_EXPR to the prequeue. */
4151 gimplify_stmt (&expr
, pre_p
);
4157 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4158 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4159 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4160 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4162 /* Make sure the condition has BOOLEAN_TYPE. */
4163 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4165 /* Break apart && and || conditions. */
4166 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4167 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4169 expr
= shortcut_cond_expr (expr
);
4171 if (expr
!= *expr_p
)
4175 /* We can't rely on gimplify_expr to re-gimplify the expanded
4176 form properly, as cleanups might cause the target labels to be
4177 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4178 set up a conditional context. */
4179 gimple_push_condition ();
4180 gimplify_stmt (expr_p
, &seq
);
4181 gimple_pop_condition (pre_p
);
4182 gimple_seq_add_seq (pre_p
, seq
);
4188 /* Now do the normal gimplification. */
4190 /* Gimplify condition. */
4191 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
,
4192 is_gimple_condexpr_for_cond
, fb_rvalue
);
4193 if (ret
== GS_ERROR
)
4195 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4197 gimple_push_condition ();
4199 have_then_clause_p
= have_else_clause_p
= false;
4200 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4202 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4203 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4204 have different locations, otherwise we end up with incorrect
4205 location information on the branches. */
4207 || !EXPR_HAS_LOCATION (expr
)
4208 || !rexpr_has_location (label_true
)
4209 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4211 have_then_clause_p
= true;
4212 label_true
= GOTO_DESTINATION (label_true
);
4215 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4216 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4218 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4219 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4220 have different locations, otherwise we end up with incorrect
4221 location information on the branches. */
4223 || !EXPR_HAS_LOCATION (expr
)
4224 || !rexpr_has_location (label_false
)
4225 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4227 have_else_clause_p
= true;
4228 label_false
= GOTO_DESTINATION (label_false
);
4231 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4233 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4235 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4237 gimple_set_no_warning (cond_stmt
, TREE_NO_WARNING (COND_EXPR_COND (expr
)));
4238 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4239 gimple_stmt_iterator gsi
= gsi_last (seq
);
4240 maybe_fold_stmt (&gsi
);
4242 label_cont
= NULL_TREE
;
4243 if (!have_then_clause_p
)
4245 /* For if (...) {} else { code; } put label_true after
4247 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4248 && !have_else_clause_p
4249 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4250 label_cont
= label_true
;
4253 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4254 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4255 /* For if (...) { code; } else {} or
4256 if (...) { code; } else goto label; or
4257 if (...) { code; return; } else { ... }
4258 label_cont isn't needed. */
4259 if (!have_else_clause_p
4260 && TREE_OPERAND (expr
, 2) != NULL_TREE
4261 && gimple_seq_may_fallthru (seq
))
4264 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4266 g
= gimple_build_goto (label_cont
);
4268 /* GIMPLE_COND's are very low level; they have embedded
4269 gotos. This particular embedded goto should not be marked
4270 with the location of the original COND_EXPR, as it would
4271 correspond to the COND_EXPR's condition, not the ELSE or the
4272 THEN arms. To avoid marking it with the wrong location, flag
4273 it as "no location". */
4274 gimple_set_do_not_emit_location (g
);
4276 gimplify_seq_add_stmt (&seq
, g
);
4280 if (!have_else_clause_p
)
4282 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4283 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4286 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4288 gimple_pop_condition (pre_p
);
4289 gimple_seq_add_seq (pre_p
, seq
);
4291 if (ret
== GS_ERROR
)
4293 else if (have_then_clause_p
|| have_else_clause_p
)
4297 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4298 expr
= TREE_OPERAND (expr
, 0);
4299 gimplify_stmt (&expr
, pre_p
);
4306 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4307 to be marked addressable.
4309 We cannot rely on such an expression being directly markable if a temporary
4310 has been created by the gimplification. In this case, we create another
4311 temporary and initialize it with a copy, which will become a store after we
4312 mark it addressable. This can happen if the front-end passed us something
4313 that it could not mark addressable yet, like a Fortran pass-by-reference
4314 parameter (int) floatvar. */
4317 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4319 while (handled_component_p (*expr_p
))
4320 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4321 if (is_gimple_reg (*expr_p
))
4323 /* Do not allow an SSA name as the temporary. */
4324 tree var
= get_initialized_tmp_var (*expr_p
, seq_p
, NULL
, false);
4325 DECL_NOT_GIMPLE_REG_P (var
) = 1;
4330 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4331 a call to __builtin_memcpy. */
4333 static enum gimplify_status
4334 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4337 tree t
, to
, to_ptr
, from
, from_ptr
;
4339 location_t loc
= EXPR_LOCATION (*expr_p
);
4341 to
= TREE_OPERAND (*expr_p
, 0);
4342 from
= TREE_OPERAND (*expr_p
, 1);
4344 /* Mark the RHS addressable. Beware that it may not be possible to do so
4345 directly if a temporary has been created by the gimplification. */
4346 prepare_gimple_addressable (&from
, seq_p
);
4348 mark_addressable (from
);
4349 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4350 gimplify_arg (&from_ptr
, seq_p
, loc
);
4352 mark_addressable (to
);
4353 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4354 gimplify_arg (&to_ptr
, seq_p
, loc
);
4356 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4358 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4359 gimple_call_set_alloca_for_var (gs
, true);
4363 /* tmp = memcpy() */
4364 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4365 gimple_call_set_lhs (gs
, t
);
4366 gimplify_seq_add_stmt (seq_p
, gs
);
4368 *expr_p
= build_simple_mem_ref (t
);
4372 gimplify_seq_add_stmt (seq_p
, gs
);
4377 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4378 a call to __builtin_memset. In this case we know that the RHS is
4379 a CONSTRUCTOR with an empty element list. */
4381 static enum gimplify_status
4382 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4385 tree t
, from
, to
, to_ptr
;
4387 location_t loc
= EXPR_LOCATION (*expr_p
);
4389 /* Assert our assumptions, to abort instead of producing wrong code
4390 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4391 not be immediately exposed. */
4392 from
= TREE_OPERAND (*expr_p
, 1);
4393 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4394 from
= TREE_OPERAND (from
, 0);
4396 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4397 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4400 to
= TREE_OPERAND (*expr_p
, 0);
4402 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4403 gimplify_arg (&to_ptr
, seq_p
, loc
);
4404 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4406 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4410 /* tmp = memset() */
4411 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4412 gimple_call_set_lhs (gs
, t
);
4413 gimplify_seq_add_stmt (seq_p
, gs
);
4415 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4419 gimplify_seq_add_stmt (seq_p
, gs
);
4424 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4425 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4426 assignment. Return non-null if we detect a potential overlap. */
4428 struct gimplify_init_ctor_preeval_data
4430 /* The base decl of the lhs object. May be NULL, in which case we
4431 have to assume the lhs is indirect. */
4434 /* The alias set of the lhs object. */
4435 alias_set_type lhs_alias_set
;
4439 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4441 struct gimplify_init_ctor_preeval_data
*data
4442 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4445 /* If we find the base object, obviously we have overlap. */
4446 if (data
->lhs_base_decl
== t
)
4449 /* If the constructor component is indirect, determine if we have a
4450 potential overlap with the lhs. The only bits of information we
4451 have to go on at this point are addressability and alias sets. */
4452 if ((INDIRECT_REF_P (t
)
4453 || TREE_CODE (t
) == MEM_REF
)
4454 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4455 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4458 /* If the constructor component is a call, determine if it can hide a
4459 potential overlap with the lhs through an INDIRECT_REF like above.
4460 ??? Ugh - this is completely broken. In fact this whole analysis
4461 doesn't look conservative. */
4462 if (TREE_CODE (t
) == CALL_EXPR
)
4464 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4466 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4467 if (POINTER_TYPE_P (TREE_VALUE (type
))
4468 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4469 && alias_sets_conflict_p (data
->lhs_alias_set
,
4471 (TREE_TYPE (TREE_VALUE (type
)))))
4475 if (IS_TYPE_OR_DECL_P (t
))
4480 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4481 force values that overlap with the lhs (as described by *DATA)
4482 into temporaries. */
4485 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4486 struct gimplify_init_ctor_preeval_data
*data
)
4488 enum gimplify_status one
;
4490 /* If the value is constant, then there's nothing to pre-evaluate. */
4491 if (TREE_CONSTANT (*expr_p
))
4493 /* Ensure it does not have side effects, it might contain a reference to
4494 the object we're initializing. */
4495 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4499 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4500 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4503 /* Recurse for nested constructors. */
4504 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4506 unsigned HOST_WIDE_INT ix
;
4507 constructor_elt
*ce
;
4508 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4510 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4511 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4516 /* If this is a variable sized type, we must remember the size. */
4517 maybe_with_size_expr (expr_p
);
4519 /* Gimplify the constructor element to something appropriate for the rhs
4520 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4521 the gimplifier will consider this a store to memory. Doing this
4522 gimplification now means that we won't have to deal with complicated
4523 language-specific trees, nor trees like SAVE_EXPR that can induce
4524 exponential search behavior. */
4525 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4526 if (one
== GS_ERROR
)
4532 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4533 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4534 always be true for all scalars, since is_gimple_mem_rhs insists on a
4535 temporary variable for them. */
4536 if (DECL_P (*expr_p
))
4539 /* If this is of variable size, we have no choice but to assume it doesn't
4540 overlap since we can't make a temporary for it. */
4541 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4544 /* Otherwise, we must search for overlap ... */
4545 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4548 /* ... and if found, force the value into a temporary. */
4549 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4552 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4553 a RANGE_EXPR in a CONSTRUCTOR for an array.
4557 object[var] = value;
4564 We increment var _after_ the loop exit check because we might otherwise
4565 fail if upper == TYPE_MAX_VALUE (type for upper).
4567 Note that we never have to deal with SAVE_EXPRs here, because this has
4568 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4570 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4571 gimple_seq
*, bool);
4574 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4575 tree value
, tree array_elt_type
,
4576 gimple_seq
*pre_p
, bool cleared
)
4578 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4579 tree var
, var_type
, cref
, tmp
;
4581 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4582 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4583 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4585 /* Create and initialize the index variable. */
4586 var_type
= TREE_TYPE (upper
);
4587 var
= create_tmp_var (var_type
);
4588 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4590 /* Add the loop entry label. */
4591 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4593 /* Build the reference. */
4594 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4595 var
, NULL_TREE
, NULL_TREE
);
4597 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4598 the store. Otherwise just assign value to the reference. */
4600 if (TREE_CODE (value
) == CONSTRUCTOR
)
4601 /* NB we might have to call ourself recursively through
4602 gimplify_init_ctor_eval if the value is a constructor. */
4603 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4606 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4608 /* We exit the loop when the index var is equal to the upper bound. */
4609 gimplify_seq_add_stmt (pre_p
,
4610 gimple_build_cond (EQ_EXPR
, var
, upper
,
4611 loop_exit_label
, fall_thru_label
));
4613 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4615 /* Otherwise, increment the index var... */
4616 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4617 fold_convert (var_type
, integer_one_node
));
4618 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4620 /* ...and jump back to the loop entry. */
4621 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4623 /* Add the loop exit label. */
4624 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4627 /* Return true if FDECL is accessing a field that is zero sized. */
4630 zero_sized_field_decl (const_tree fdecl
)
4632 if (TREE_CODE (fdecl
) == FIELD_DECL
&& DECL_SIZE (fdecl
)
4633 && integer_zerop (DECL_SIZE (fdecl
)))
4638 /* Return true if TYPE is zero sized. */
4641 zero_sized_type (const_tree type
)
4643 if (AGGREGATE_TYPE_P (type
) && TYPE_SIZE (type
)
4644 && integer_zerop (TYPE_SIZE (type
)))
4649 /* A subroutine of gimplify_init_constructor. Generate individual
4650 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4651 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4652 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4656 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4657 gimple_seq
*pre_p
, bool cleared
)
4659 tree array_elt_type
= NULL
;
4660 unsigned HOST_WIDE_INT ix
;
4661 tree purpose
, value
;
4663 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4664 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4666 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4670 /* NULL values are created above for gimplification errors. */
4674 if (cleared
&& initializer_zerop (value
))
4677 /* ??? Here's to hoping the front end fills in all of the indices,
4678 so we don't have to figure out what's missing ourselves. */
4679 gcc_assert (purpose
);
4681 /* Skip zero-sized fields, unless value has side-effects. This can
4682 happen with calls to functions returning a zero-sized type, which
4683 we shouldn't discard. As a number of downstream passes don't
4684 expect sets of zero-sized fields, we rely on the gimplification of
4685 the MODIFY_EXPR we make below to drop the assignment statement. */
4686 if (! TREE_SIDE_EFFECTS (value
) && zero_sized_field_decl (purpose
))
4689 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4691 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4693 tree lower
= TREE_OPERAND (purpose
, 0);
4694 tree upper
= TREE_OPERAND (purpose
, 1);
4696 /* If the lower bound is equal to upper, just treat it as if
4697 upper was the index. */
4698 if (simple_cst_equal (lower
, upper
))
4702 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4703 array_elt_type
, pre_p
, cleared
);
4710 /* Do not use bitsizetype for ARRAY_REF indices. */
4711 if (TYPE_DOMAIN (TREE_TYPE (object
)))
4713 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
4715 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4716 purpose
, NULL_TREE
, NULL_TREE
);
4720 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
4721 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
4722 unshare_expr (object
), purpose
, NULL_TREE
);
4725 if (TREE_CODE (value
) == CONSTRUCTOR
4726 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
4727 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4731 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
4732 gimplify_and_add (init
, pre_p
);
4738 /* Return the appropriate RHS predicate for this LHS. */
4741 rhs_predicate_for (tree lhs
)
4743 if (is_gimple_reg (lhs
))
4744 return is_gimple_reg_rhs_or_call
;
4746 return is_gimple_mem_rhs_or_call
;
4749 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4750 before the LHS has been gimplified. */
4752 static gimple_predicate
4753 initial_rhs_predicate_for (tree lhs
)
4755 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
4756 return is_gimple_reg_rhs_or_call
;
4758 return is_gimple_mem_rhs_or_call
;
4761 /* Gimplify a C99 compound literal expression. This just means adding
4762 the DECL_EXPR before the current statement and using its anonymous
4765 static enum gimplify_status
4766 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
4767 bool (*gimple_test_f
) (tree
),
4768 fallback_t fallback
)
4770 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
4771 tree decl
= DECL_EXPR_DECL (decl_s
);
4772 tree init
= DECL_INITIAL (decl
);
4773 /* Mark the decl as addressable if the compound literal
4774 expression is addressable now, otherwise it is marked too late
4775 after we gimplify the initialization expression. */
4776 if (TREE_ADDRESSABLE (*expr_p
))
4777 TREE_ADDRESSABLE (decl
) = 1;
4778 /* Otherwise, if we don't need an lvalue and have a literal directly
4779 substitute it. Check if it matches the gimple predicate, as
4780 otherwise we'd generate a new temporary, and we can as well just
4781 use the decl we already have. */
4782 else if (!TREE_ADDRESSABLE (decl
)
4783 && !TREE_THIS_VOLATILE (decl
)
4785 && (fallback
& fb_lvalue
) == 0
4786 && gimple_test_f (init
))
4792 /* If the decl is not addressable, then it is being used in some
4793 expression or on the right hand side of a statement, and it can
4794 be put into a readonly data section. */
4795 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
4796 TREE_READONLY (decl
) = 1;
4798 /* This decl isn't mentioned in the enclosing block, so add it to the
4799 list of temps. FIXME it seems a bit of a kludge to say that
4800 anonymous artificial vars aren't pushed, but everything else is. */
4801 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
4802 gimple_add_tmp_var (decl
);
4804 gimplify_and_add (decl_s
, pre_p
);
4809 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4810 return a new CONSTRUCTOR if something changed. */
4813 optimize_compound_literals_in_ctor (tree orig_ctor
)
4815 tree ctor
= orig_ctor
;
4816 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
4817 unsigned int idx
, num
= vec_safe_length (elts
);
4819 for (idx
= 0; idx
< num
; idx
++)
4821 tree value
= (*elts
)[idx
].value
;
4822 tree newval
= value
;
4823 if (TREE_CODE (value
) == CONSTRUCTOR
)
4824 newval
= optimize_compound_literals_in_ctor (value
);
4825 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
4827 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
4828 tree decl
= DECL_EXPR_DECL (decl_s
);
4829 tree init
= DECL_INITIAL (decl
);
4831 if (!TREE_ADDRESSABLE (value
)
4832 && !TREE_ADDRESSABLE (decl
)
4834 && TREE_CODE (init
) == CONSTRUCTOR
)
4835 newval
= optimize_compound_literals_in_ctor (init
);
4837 if (newval
== value
)
4840 if (ctor
== orig_ctor
)
4842 ctor
= copy_node (orig_ctor
);
4843 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
4844 elts
= CONSTRUCTOR_ELTS (ctor
);
4846 (*elts
)[idx
].value
= newval
;
4851 /* A subroutine of gimplify_modify_expr. Break out elements of a
4852 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4854 Note that we still need to clear any elements that don't have explicit
4855 initializers, so if not all elements are initialized we keep the
4856 original MODIFY_EXPR, we just remove all of the constructor elements.
4858 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4859 GS_ERROR if we would have to create a temporary when gimplifying
4860 this constructor. Otherwise, return GS_OK.
4862 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4864 static enum gimplify_status
4865 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4866 bool want_value
, bool notify_temp_creation
)
4868 tree object
, ctor
, type
;
4869 enum gimplify_status ret
;
4870 vec
<constructor_elt
, va_gc
> *elts
;
4872 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
4874 if (!notify_temp_creation
)
4876 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
4877 is_gimple_lvalue
, fb_lvalue
);
4878 if (ret
== GS_ERROR
)
4882 object
= TREE_OPERAND (*expr_p
, 0);
4883 ctor
= TREE_OPERAND (*expr_p
, 1)
4884 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
4885 type
= TREE_TYPE (ctor
);
4886 elts
= CONSTRUCTOR_ELTS (ctor
);
4889 switch (TREE_CODE (type
))
4893 case QUAL_UNION_TYPE
:
4896 /* Use readonly data for initializers of this or smaller size
4897 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4899 const HOST_WIDE_INT min_unique_size
= 64;
4900 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4901 is smaller than this, use readonly data. */
4902 const int unique_nonzero_ratio
= 8;
4903 /* True if a single access of the object must be ensured. This is the
4904 case if the target is volatile, the type is non-addressable and more
4905 than one field need to be assigned. */
4906 const bool ensure_single_access
4907 = TREE_THIS_VOLATILE (object
)
4908 && !TREE_ADDRESSABLE (type
)
4909 && vec_safe_length (elts
) > 1;
4910 struct gimplify_init_ctor_preeval_data preeval_data
;
4911 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
4912 HOST_WIDE_INT num_unique_nonzero_elements
;
4913 bool cleared
, complete_p
, valid_const_initializer
;
4915 /* Aggregate types must lower constructors to initialization of
4916 individual elements. The exception is that a CONSTRUCTOR node
4917 with no elements indicates zero-initialization of the whole. */
4918 if (vec_safe_is_empty (elts
))
4920 if (notify_temp_creation
)
4925 /* Fetch information about the constructor to direct later processing.
4926 We might want to make static versions of it in various cases, and
4927 can only do so if it known to be a valid constant initializer. */
4928 valid_const_initializer
4929 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
4930 &num_unique_nonzero_elements
,
4931 &num_ctor_elements
, &complete_p
);
4933 /* If a const aggregate variable is being initialized, then it
4934 should never be a lose to promote the variable to be static. */
4935 if (valid_const_initializer
4936 && num_nonzero_elements
> 1
4937 && TREE_READONLY (object
)
4939 && !DECL_REGISTER (object
)
4940 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
))
4941 /* For ctors that have many repeated nonzero elements
4942 represented through RANGE_EXPRs, prefer initializing
4943 those through runtime loops over copies of large amounts
4944 of data from readonly data section. */
4945 && (num_unique_nonzero_elements
4946 > num_nonzero_elements
/ unique_nonzero_ratio
4947 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
4948 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
4950 if (notify_temp_creation
)
4953 DECL_INITIAL (object
) = ctor
;
4954 TREE_STATIC (object
) = 1;
4955 if (!DECL_NAME (object
))
4956 DECL_NAME (object
) = create_tmp_var_name ("C");
4957 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
4959 /* ??? C++ doesn't automatically append a .<number> to the
4960 assembler name, and even when it does, it looks at FE private
4961 data structures to figure out what that number should be,
4962 which are not set for this variable. I suppose this is
4963 important for local statics for inline functions, which aren't
4964 "local" in the object file sense. So in order to get a unique
4965 TU-local symbol, we must invoke the lhd version now. */
4966 lhd_set_decl_assembler_name (object
);
4968 *expr_p
= NULL_TREE
;
4972 /* If there are "lots" of initialized elements, even discounting
4973 those that are not address constants (and thus *must* be
4974 computed at runtime), then partition the constructor into
4975 constant and non-constant parts. Block copy the constant
4976 parts in, then generate code for the non-constant parts. */
4977 /* TODO. There's code in cp/typeck.c to do this. */
4979 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
4980 /* store_constructor will ignore the clearing of variable-sized
4981 objects. Initializers for such objects must explicitly set
4982 every field that needs to be set. */
4984 else if (!complete_p
)
4985 /* If the constructor isn't complete, clear the whole object
4986 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4988 ??? This ought not to be needed. For any element not present
4989 in the initializer, we should simply set them to zero. Except
4990 we'd need to *find* the elements that are not present, and that
4991 requires trickery to avoid quadratic compile-time behavior in
4992 large cases or excessive memory use in small cases. */
4993 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
4994 else if (num_ctor_elements
- num_nonzero_elements
4995 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
4996 && num_nonzero_elements
< num_ctor_elements
/ 4)
4997 /* If there are "lots" of zeros, it's more efficient to clear
4998 the memory and then set the nonzero elements. */
5000 else if (ensure_single_access
&& num_nonzero_elements
== 0)
5001 /* If a single access to the target must be ensured and all elements
5002 are zero, then it's optimal to clear whatever their number. */
5007 /* If there are "lots" of initialized elements, and all of them
5008 are valid address constants, then the entire initializer can
5009 be dropped to memory, and then memcpy'd out. Don't do this
5010 for sparse arrays, though, as it's more efficient to follow
5011 the standard CONSTRUCTOR behavior of memset followed by
5012 individual element initialization. Also don't do this for small
5013 all-zero initializers (which aren't big enough to merit
5014 clearing), and don't try to make bitwise copies of
5015 TREE_ADDRESSABLE types. */
5016 if (valid_const_initializer
5018 && !(cleared
|| num_nonzero_elements
== 0)
5019 && !TREE_ADDRESSABLE (type
))
5021 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5024 /* ??? We can still get unbounded array types, at least
5025 from the C++ front end. This seems wrong, but attempt
5026 to work around it for now. */
5029 size
= int_size_in_bytes (TREE_TYPE (object
));
5031 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
5034 /* Find the maximum alignment we can assume for the object. */
5035 /* ??? Make use of DECL_OFFSET_ALIGN. */
5036 if (DECL_P (object
))
5037 align
= DECL_ALIGN (object
);
5039 align
= TYPE_ALIGN (type
);
5041 /* Do a block move either if the size is so small as to make
5042 each individual move a sub-unit move on average, or if it
5043 is so large as to make individual moves inefficient. */
5045 && num_nonzero_elements
> 1
5046 /* For ctors that have many repeated nonzero elements
5047 represented through RANGE_EXPRs, prefer initializing
5048 those through runtime loops over copies of large amounts
5049 of data from readonly data section. */
5050 && (num_unique_nonzero_elements
5051 > num_nonzero_elements
/ unique_nonzero_ratio
5052 || size
<= min_unique_size
)
5053 && (size
< num_nonzero_elements
5054 || !can_move_by_pieces (size
, align
)))
5056 if (notify_temp_creation
)
5059 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
5060 ctor
= tree_output_constant_def (ctor
);
5061 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
5062 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
5063 TREE_OPERAND (*expr_p
, 1) = ctor
;
5065 /* This is no longer an assignment of a CONSTRUCTOR, but
5066 we still may have processing to do on the LHS. So
5067 pretend we didn't do anything here to let that happen. */
5068 return GS_UNHANDLED
;
5072 /* If a single access to the target must be ensured and there are
5073 nonzero elements or the zero elements are not assigned en masse,
5074 initialize the target from a temporary. */
5075 if (ensure_single_access
&& (num_nonzero_elements
> 0 || !cleared
))
5077 if (notify_temp_creation
)
5080 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5081 TREE_OPERAND (*expr_p
, 0) = temp
;
5082 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5084 build2 (MODIFY_EXPR
, void_type_node
,
5089 if (notify_temp_creation
)
5092 /* If there are nonzero elements and if needed, pre-evaluate to capture
5093 elements overlapping with the lhs into temporaries. We must do this
5094 before clearing to fetch the values before they are zeroed-out. */
5095 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5097 preeval_data
.lhs_base_decl
= get_base_address (object
);
5098 if (!DECL_P (preeval_data
.lhs_base_decl
))
5099 preeval_data
.lhs_base_decl
= NULL
;
5100 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5102 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5103 pre_p
, post_p
, &preeval_data
);
5106 bool ctor_has_side_effects_p
5107 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5111 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5112 Note that we still have to gimplify, in order to handle the
5113 case of variable sized types. Avoid shared tree structures. */
5114 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5115 TREE_SIDE_EFFECTS (ctor
) = 0;
5116 object
= unshare_expr (object
);
5117 gimplify_stmt (expr_p
, pre_p
);
5120 /* If we have not block cleared the object, or if there are nonzero
5121 elements in the constructor, or if the constructor has side effects,
5122 add assignments to the individual scalar fields of the object. */
5124 || num_nonzero_elements
> 0
5125 || ctor_has_side_effects_p
)
5126 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5128 *expr_p
= NULL_TREE
;
5136 if (notify_temp_creation
)
5139 /* Extract the real and imaginary parts out of the ctor. */
5140 gcc_assert (elts
->length () == 2);
5141 r
= (*elts
)[0].value
;
5142 i
= (*elts
)[1].value
;
5143 if (r
== NULL
|| i
== NULL
)
5145 tree zero
= build_zero_cst (TREE_TYPE (type
));
5152 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5153 represent creation of a complex value. */
5154 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5156 ctor
= build_complex (type
, r
, i
);
5157 TREE_OPERAND (*expr_p
, 1) = ctor
;
5161 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5162 TREE_OPERAND (*expr_p
, 1) = ctor
;
5163 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5166 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5174 unsigned HOST_WIDE_INT ix
;
5175 constructor_elt
*ce
;
5177 if (notify_temp_creation
)
5180 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5181 if (TREE_CONSTANT (ctor
))
5183 bool constant_p
= true;
5186 /* Even when ctor is constant, it might contain non-*_CST
5187 elements, such as addresses or trapping values like
5188 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5189 in VECTOR_CST nodes. */
5190 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5191 if (!CONSTANT_CLASS_P (value
))
5199 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5203 TREE_CONSTANT (ctor
) = 0;
5206 /* Vector types use CONSTRUCTOR all the way through gimple
5207 compilation as a general initializer. */
5208 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5210 enum gimplify_status tret
;
5211 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5213 if (tret
== GS_ERROR
)
5215 else if (TREE_STATIC (ctor
)
5216 && !initializer_constant_valid_p (ce
->value
,
5217 TREE_TYPE (ce
->value
)))
5218 TREE_STATIC (ctor
) = 0;
5220 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5221 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5226 /* So how did we get a CONSTRUCTOR for a scalar type? */
5230 if (ret
== GS_ERROR
)
5232 /* If we have gimplified both sides of the initializer but have
5233 not emitted an assignment, do so now. */
5236 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5237 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5238 if (want_value
&& object
== lhs
)
5239 lhs
= unshare_expr (lhs
);
5240 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5241 gimplify_seq_add_stmt (pre_p
, init
);
5255 /* Given a pointer value OP0, return a simplified version of an
5256 indirection through OP0, or NULL_TREE if no simplification is
5257 possible. This may only be applied to a rhs of an expression.
5258 Note that the resulting type may be different from the type pointed
5259 to in the sense that it is still compatible from the langhooks
5263 gimple_fold_indirect_ref_rhs (tree t
)
5265 return gimple_fold_indirect_ref (t
);
5268 /* Subroutine of gimplify_modify_expr to do simplifications of
5269 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5270 something changes. */
5272 static enum gimplify_status
5273 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5274 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5277 enum gimplify_status ret
= GS_UNHANDLED
;
5283 switch (TREE_CODE (*from_p
))
5286 /* If we're assigning from a read-only variable initialized with
5287 a constructor and not volatile, do the direct assignment from
5288 the constructor, but only if the target is not volatile either
5289 since this latter assignment might end up being done on a per
5290 field basis. However, if the target is volatile and the type
5291 is aggregate and non-addressable, gimplify_init_constructor
5292 knows that it needs to ensure a single access to the target
5293 and it will return GS_OK only in this case. */
5294 if (TREE_READONLY (*from_p
)
5295 && DECL_INITIAL (*from_p
)
5296 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
5297 && !TREE_THIS_VOLATILE (*from_p
)
5298 && (!TREE_THIS_VOLATILE (*to_p
)
5299 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p
))
5300 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p
)))))
5302 tree old_from
= *from_p
;
5303 enum gimplify_status subret
;
5305 /* Move the constructor into the RHS. */
5306 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5308 /* Let's see if gimplify_init_constructor will need to put
5310 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5312 if (subret
== GS_ERROR
)
5314 /* If so, revert the change. */
5326 /* If we have code like
5330 where the type of "x" is a (possibly cv-qualified variant
5331 of "A"), treat the entire expression as identical to "x".
5332 This kind of code arises in C++ when an object is bound
5333 to a const reference, and if "x" is a TARGET_EXPR we want
5334 to take advantage of the optimization below. */
5335 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5336 tree t
= gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0));
5339 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5342 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5343 build_fold_addr_expr (t
));
5344 if (REFERENCE_CLASS_P (t
))
5345 TREE_THIS_VOLATILE (t
) = volatile_p
;
5356 /* If we are initializing something from a TARGET_EXPR, strip the
5357 TARGET_EXPR and initialize it directly, if possible. This can't
5358 be done if the initializer is void, since that implies that the
5359 temporary is set in some non-trivial way.
5361 ??? What about code that pulls out the temp and uses it
5362 elsewhere? I think that such code never uses the TARGET_EXPR as
5363 an initializer. If I'm wrong, we'll die because the temp won't
5364 have any RTL. In that case, I guess we'll need to replace
5365 references somehow. */
5366 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5369 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
5370 || !TARGET_EXPR_NO_ELIDE (*from_p
))
5371 && !VOID_TYPE_P (TREE_TYPE (init
)))
5381 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5383 gimplify_compound_expr (from_p
, pre_p
, true);
5389 /* If we already made some changes, let the front end have a
5390 crack at this before we break it down. */
5391 if (ret
!= GS_UNHANDLED
)
5393 /* If we're initializing from a CONSTRUCTOR, break this into
5394 individual MODIFY_EXPRs. */
5395 return gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5399 /* If we're assigning to a non-register type, push the assignment
5400 down into the branches. This is mandatory for ADDRESSABLE types,
5401 since we cannot generate temporaries for such, but it saves a
5402 copy in other cases as well. */
5403 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5405 /* This code should mirror the code in gimplify_cond_expr. */
5406 enum tree_code code
= TREE_CODE (*expr_p
);
5407 tree cond
= *from_p
;
5408 tree result
= *to_p
;
5410 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5411 is_gimple_lvalue
, fb_lvalue
);
5412 if (ret
!= GS_ERROR
)
5415 /* If we are going to write RESULT more than once, clear
5416 TREE_READONLY flag, otherwise we might incorrectly promote
5417 the variable to static const and initialize it at compile
5418 time in one of the branches. */
5420 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5421 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5422 TREE_READONLY (result
) = 0;
5423 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5424 TREE_OPERAND (cond
, 1)
5425 = build2 (code
, void_type_node
, result
,
5426 TREE_OPERAND (cond
, 1));
5427 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5428 TREE_OPERAND (cond
, 2)
5429 = build2 (code
, void_type_node
, unshare_expr (result
),
5430 TREE_OPERAND (cond
, 2));
5432 TREE_TYPE (cond
) = void_type_node
;
5433 recalculate_side_effects (cond
);
5437 gimplify_and_add (cond
, pre_p
);
5438 *expr_p
= unshare_expr (result
);
5447 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5448 return slot so that we don't generate a temporary. */
5449 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5450 && aggregate_value_p (*from_p
, *from_p
))
5454 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5455 /* If we need a temporary, *to_p isn't accurate. */
5457 /* It's OK to use the return slot directly unless it's an NRV. */
5458 else if (TREE_CODE (*to_p
) == RESULT_DECL
5459 && DECL_NAME (*to_p
) == NULL_TREE
5460 && needs_to_live_in_memory (*to_p
))
5462 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5463 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5464 /* Don't force regs into memory. */
5466 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5467 /* It's OK to use the target directly if it's being
5470 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5472 /* Always use the target and thus RSO for variable-sized types.
5473 GIMPLE cannot deal with a variable-sized assignment
5474 embedded in a call statement. */
5476 else if (TREE_CODE (*to_p
) != SSA_NAME
5477 && (!is_gimple_variable (*to_p
)
5478 || needs_to_live_in_memory (*to_p
)))
5479 /* Don't use the original target if it's already addressable;
5480 if its address escapes, and the called function uses the
5481 NRV optimization, a conforming program could see *to_p
5482 change before the called function returns; see c++/19317.
5483 When optimizing, the return_slot pass marks more functions
5484 as safe after we have escape info. */
5491 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5492 mark_addressable (*to_p
);
5497 case WITH_SIZE_EXPR
:
5498 /* Likewise for calls that return an aggregate of non-constant size,
5499 since we would not be able to generate a temporary at all. */
5500 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5502 *from_p
= TREE_OPERAND (*from_p
, 0);
5503 /* We don't change ret in this case because the
5504 WITH_SIZE_EXPR might have been added in
5505 gimplify_modify_expr, so returning GS_OK would lead to an
5511 /* If we're initializing from a container, push the initialization
5513 case CLEANUP_POINT_EXPR
:
5515 case STATEMENT_LIST
:
5517 tree wrap
= *from_p
;
5520 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5522 if (ret
!= GS_ERROR
)
5525 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5526 gcc_assert (t
== *expr_p
);
5530 gimplify_and_add (wrap
, pre_p
);
5531 *expr_p
= unshare_expr (*to_p
);
5539 /* Pull out compound literal expressions from a NOP_EXPR.
5540 Those are created in the C FE to drop qualifiers during
5541 lvalue conversion. */
5542 if ((TREE_CODE (TREE_OPERAND (*from_p
, 0)) == COMPOUND_LITERAL_EXPR
)
5543 && tree_ssa_useless_type_conversion (*from_p
))
5545 *from_p
= TREE_OPERAND (*from_p
, 0);
5551 case COMPOUND_LITERAL_EXPR
:
5553 tree complit
= TREE_OPERAND (*expr_p
, 1);
5554 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5555 tree decl
= DECL_EXPR_DECL (decl_s
);
5556 tree init
= DECL_INITIAL (decl
);
5558 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5559 into struct T x = { 0, 1, 2 } if the address of the
5560 compound literal has never been taken. */
5561 if (!TREE_ADDRESSABLE (complit
)
5562 && !TREE_ADDRESSABLE (decl
)
5565 *expr_p
= copy_node (*expr_p
);
5566 TREE_OPERAND (*expr_p
, 1) = init
;
5581 /* Return true if T looks like a valid GIMPLE statement. */
5584 is_gimple_stmt (tree t
)
5586 const enum tree_code code
= TREE_CODE (t
);
5591 /* The only valid NOP_EXPR is the empty statement. */
5592 return IS_EMPTY_STMT (t
);
5596 /* These are only valid if they're void. */
5597 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5603 case CASE_LABEL_EXPR
:
5604 case TRY_CATCH_EXPR
:
5605 case TRY_FINALLY_EXPR
:
5606 case EH_FILTER_EXPR
:
5609 case STATEMENT_LIST
:
5614 case OACC_HOST_DATA
:
5617 case OACC_ENTER_DATA
:
5618 case OACC_EXIT_DATA
:
5623 case OMP_DISTRIBUTE
:
5636 case OMP_TARGET_DATA
:
5637 case OMP_TARGET_UPDATE
:
5638 case OMP_TARGET_ENTER_DATA
:
5639 case OMP_TARGET_EXIT_DATA
:
5642 /* These are always void. */
5648 /* These are valid regardless of their type. */
5657 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5658 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5660 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5661 other, unmodified part of the complex object just before the total store.
5662 As a consequence, if the object is still uninitialized, an undefined value
5663 will be loaded into a register, which may result in a spurious exception
5664 if the register is floating-point and the value happens to be a signaling
5665 NaN for example. Then the fully-fledged complex operations lowering pass
5666 followed by a DCE pass are necessary in order to fix things up. */
5668 static enum gimplify_status
5669 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
5672 enum tree_code code
, ocode
;
5673 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
5675 lhs
= TREE_OPERAND (*expr_p
, 0);
5676 rhs
= TREE_OPERAND (*expr_p
, 1);
5677 code
= TREE_CODE (lhs
);
5678 lhs
= TREE_OPERAND (lhs
, 0);
5680 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
5681 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
5682 TREE_NO_WARNING (other
) = 1;
5683 other
= get_formal_tmp_var (other
, pre_p
);
5685 realpart
= code
== REALPART_EXPR
? rhs
: other
;
5686 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
5688 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
5689 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
5691 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
5693 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
5694 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
5699 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5705 PRE_P points to the list where side effects that must happen before
5706 *EXPR_P should be stored.
5708 POST_P points to the list where side effects that must happen after
5709 *EXPR_P should be stored.
5711 WANT_VALUE is nonzero iff we want to use the value of this expression
5712 in another expression. */
5714 static enum gimplify_status
5715 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5718 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
5719 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
5720 enum gimplify_status ret
= GS_UNHANDLED
;
5722 location_t loc
= EXPR_LOCATION (*expr_p
);
5723 gimple_stmt_iterator gsi
;
5725 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
5726 || TREE_CODE (*expr_p
) == INIT_EXPR
);
5728 /* Trying to simplify a clobber using normal logic doesn't work,
5729 so handle it here. */
5730 if (TREE_CLOBBER_P (*from_p
))
5732 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5733 if (ret
== GS_ERROR
)
5735 gcc_assert (!want_value
);
5736 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
5738 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
5740 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
5742 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
5747 /* Insert pointer conversions required by the middle-end that are not
5748 required by the frontend. This fixes middle-end type checking for
5749 for example gcc.dg/redecl-6.c. */
5750 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
5752 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
5753 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
5754 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
5757 /* See if any simplifications can be done based on what the RHS is. */
5758 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5760 if (ret
!= GS_UNHANDLED
)
5763 /* For zero sized types only gimplify the left hand side and right hand
5764 side as statements and throw away the assignment. Do this after
5765 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5767 if (zero_sized_type (TREE_TYPE (*from_p
))
5769 /* Don't do this for calls that return addressable types, expand_call
5770 relies on those having a lhs. */
5771 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
5772 && TREE_CODE (*from_p
) == CALL_EXPR
))
5774 gimplify_stmt (from_p
, pre_p
);
5775 gimplify_stmt (to_p
, pre_p
);
5776 *expr_p
= NULL_TREE
;
5780 /* If the value being copied is of variable width, compute the length
5781 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5782 before gimplifying any of the operands so that we can resolve any
5783 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5784 the size of the expression to be copied, not of the destination, so
5785 that is what we must do here. */
5786 maybe_with_size_expr (from_p
);
5788 /* As a special case, we have to temporarily allow for assignments
5789 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5790 a toplevel statement, when gimplifying the GENERIC expression
5791 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5792 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5794 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5795 prevent gimplify_expr from trying to create a new temporary for
5796 foo's LHS, we tell it that it should only gimplify until it
5797 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5798 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5799 and all we need to do here is set 'a' to be its LHS. */
5801 /* Gimplify the RHS first for C++17 and bug 71104. */
5802 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
5803 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
5804 if (ret
== GS_ERROR
)
5807 /* Then gimplify the LHS. */
5808 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5809 twice we have to make sure to gimplify into non-SSA as otherwise
5810 the abnormal edge added later will make those defs not dominate
5812 ??? Technically this applies only to the registers used in the
5813 resulting non-register *TO_P. */
5814 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
5816 && TREE_CODE (*from_p
) == CALL_EXPR
5817 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
5818 gimplify_ctxp
->into_ssa
= false;
5819 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5820 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
5821 if (ret
== GS_ERROR
)
5824 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5825 guess for the predicate was wrong. */
5826 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
5827 if (final_pred
!= initial_pred
)
5829 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
5830 if (ret
== GS_ERROR
)
5834 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5835 size as argument to the call. */
5836 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5838 tree call
= TREE_OPERAND (*from_p
, 0);
5839 tree vlasize
= TREE_OPERAND (*from_p
, 1);
5841 if (TREE_CODE (call
) == CALL_EXPR
5842 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
5844 int nargs
= call_expr_nargs (call
);
5845 tree type
= TREE_TYPE (call
);
5846 tree ap
= CALL_EXPR_ARG (call
, 0);
5847 tree tag
= CALL_EXPR_ARG (call
, 1);
5848 tree aptag
= CALL_EXPR_ARG (call
, 2);
5849 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
5853 TREE_OPERAND (*from_p
, 0) = newcall
;
5857 /* Now see if the above changed *from_p to something we handle specially. */
5858 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5860 if (ret
!= GS_UNHANDLED
)
5863 /* If we've got a variable sized assignment between two lvalues (i.e. does
5864 not involve a call), then we can make things a bit more straightforward
5865 by converting the assignment to memcpy or memset. */
5866 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5868 tree from
= TREE_OPERAND (*from_p
, 0);
5869 tree size
= TREE_OPERAND (*from_p
, 1);
5871 if (TREE_CODE (from
) == CONSTRUCTOR
)
5872 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
5874 if (is_gimple_addressable (from
))
5877 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
5882 /* Transform partial stores to non-addressable complex variables into
5883 total stores. This allows us to use real instead of virtual operands
5884 for these variables, which improves optimization. */
5885 if ((TREE_CODE (*to_p
) == REALPART_EXPR
5886 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
5887 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
5888 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
5890 /* Try to alleviate the effects of the gimplification creating artificial
5891 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5892 make sure not to create DECL_DEBUG_EXPR links across functions. */
5893 if (!gimplify_ctxp
->into_ssa
5895 && DECL_IGNORED_P (*from_p
)
5897 && !DECL_IGNORED_P (*to_p
)
5898 && decl_function_context (*to_p
) == current_function_decl
5899 && decl_function_context (*from_p
) == current_function_decl
)
5901 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
5903 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
5904 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
5905 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
5908 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
5909 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
5911 if (TREE_CODE (*from_p
) == CALL_EXPR
)
5913 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5914 instead of a GIMPLE_ASSIGN. */
5916 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
5918 /* Gimplify internal functions created in the FEs. */
5919 int nargs
= call_expr_nargs (*from_p
), i
;
5920 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
5921 auto_vec
<tree
> vargs (nargs
);
5923 for (i
= 0; i
< nargs
; i
++)
5925 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
5926 EXPR_LOCATION (*from_p
));
5927 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
5929 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
5930 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
5931 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
5935 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
5936 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
5937 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
5938 tree fndecl
= get_callee_fndecl (*from_p
);
5940 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
5941 && call_expr_nargs (*from_p
) == 3)
5942 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
5943 CALL_EXPR_ARG (*from_p
, 0),
5944 CALL_EXPR_ARG (*from_p
, 1),
5945 CALL_EXPR_ARG (*from_p
, 2));
5948 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
5951 notice_special_calls (call_stmt
);
5952 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
5953 gimple_call_set_lhs (call_stmt
, *to_p
);
5954 else if (TREE_CODE (*to_p
) == SSA_NAME
)
5955 /* The above is somewhat premature, avoid ICEing later for a
5956 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5957 ??? This doesn't make it a default-def. */
5958 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
5964 assign
= gimple_build_assign (*to_p
, *from_p
);
5965 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
5966 if (COMPARISON_CLASS_P (*from_p
))
5967 gimple_set_no_warning (assign
, TREE_NO_WARNING (*from_p
));
5970 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
5972 /* We should have got an SSA name from the start. */
5973 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
5974 || ! gimple_in_ssa_p (cfun
));
5977 gimplify_seq_add_stmt (pre_p
, assign
);
5978 gsi
= gsi_last (*pre_p
);
5979 maybe_fold_stmt (&gsi
);
5983 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
5992 /* Gimplify a comparison between two variable-sized objects. Do this
5993 with a call to BUILT_IN_MEMCMP. */
5995 static enum gimplify_status
5996 gimplify_variable_sized_compare (tree
*expr_p
)
5998 location_t loc
= EXPR_LOCATION (*expr_p
);
5999 tree op0
= TREE_OPERAND (*expr_p
, 0);
6000 tree op1
= TREE_OPERAND (*expr_p
, 1);
6001 tree t
, arg
, dest
, src
, expr
;
6003 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
6004 arg
= unshare_expr (arg
);
6005 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
6006 src
= build_fold_addr_expr_loc (loc
, op1
);
6007 dest
= build_fold_addr_expr_loc (loc
, op0
);
6008 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
6009 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
6012 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
6013 SET_EXPR_LOCATION (expr
, loc
);
6019 /* Gimplify a comparison between two aggregate objects of integral scalar
6020 mode as a comparison between the bitwise equivalent scalar values. */
6022 static enum gimplify_status
6023 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
6025 location_t loc
= EXPR_LOCATION (*expr_p
);
6026 tree op0
= TREE_OPERAND (*expr_p
, 0);
6027 tree op1
= TREE_OPERAND (*expr_p
, 1);
6029 tree type
= TREE_TYPE (op0
);
6030 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
6032 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
6033 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
6036 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
6041 /* Gimplify an expression sequence. This function gimplifies each
6042 expression and rewrites the original expression with the last
6043 expression of the sequence in GIMPLE form.
6045 PRE_P points to the list where the side effects for all the
6046 expressions in the sequence will be emitted.
6048 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6050 static enum gimplify_status
6051 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
6057 tree
*sub_p
= &TREE_OPERAND (t
, 0);
6059 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
6060 gimplify_compound_expr (sub_p
, pre_p
, false);
6062 gimplify_stmt (sub_p
, pre_p
);
6064 t
= TREE_OPERAND (t
, 1);
6066 while (TREE_CODE (t
) == COMPOUND_EXPR
);
6073 gimplify_stmt (expr_p
, pre_p
);
6078 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6079 gimplify. After gimplification, EXPR_P will point to a new temporary
6080 that holds the original value of the SAVE_EXPR node.
6082 PRE_P points to the list where side effects that must happen before
6083 *EXPR_P should be stored. */
6085 static enum gimplify_status
6086 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6088 enum gimplify_status ret
= GS_ALL_DONE
;
6091 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
6092 val
= TREE_OPERAND (*expr_p
, 0);
6094 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6095 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6097 /* The operand may be a void-valued expression. It is
6098 being executed only for its side-effects. */
6099 if (TREE_TYPE (val
) == void_type_node
)
6101 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6102 is_gimple_stmt
, fb_none
);
6106 /* The temporary may not be an SSA name as later abnormal and EH
6107 control flow may invalidate use/def domination. When in SSA
6108 form then assume there are no such issues and SAVE_EXPRs only
6109 appear via GENERIC foldings. */
6110 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6111 gimple_in_ssa_p (cfun
));
6113 TREE_OPERAND (*expr_p
, 0) = val
;
6114 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6122 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6129 PRE_P points to the list where side effects that must happen before
6130 *EXPR_P should be stored.
6132 POST_P points to the list where side effects that must happen after
6133 *EXPR_P should be stored. */
6135 static enum gimplify_status
6136 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6138 tree expr
= *expr_p
;
6139 tree op0
= TREE_OPERAND (expr
, 0);
6140 enum gimplify_status ret
;
6141 location_t loc
= EXPR_LOCATION (*expr_p
);
6143 switch (TREE_CODE (op0
))
6147 /* Check if we are dealing with an expression of the form '&*ptr'.
6148 While the front end folds away '&*ptr' into 'ptr', these
6149 expressions may be generated internally by the compiler (e.g.,
6150 builtins like __builtin_va_end). */
6151 /* Caution: the silent array decomposition semantics we allow for
6152 ADDR_EXPR means we can't always discard the pair. */
6153 /* Gimplification of the ADDR_EXPR operand may drop
6154 cv-qualification conversions, so make sure we add them if
6157 tree op00
= TREE_OPERAND (op0
, 0);
6158 tree t_expr
= TREE_TYPE (expr
);
6159 tree t_op00
= TREE_TYPE (op00
);
6161 if (!useless_type_conversion_p (t_expr
, t_op00
))
6162 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6168 case VIEW_CONVERT_EXPR
:
6169 /* Take the address of our operand and then convert it to the type of
6172 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6173 all clear. The impact of this transformation is even less clear. */
6175 /* If the operand is a useless conversion, look through it. Doing so
6176 guarantees that the ADDR_EXPR and its operand will remain of the
6178 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
6179 op0
= TREE_OPERAND (op0
, 0);
6181 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
6182 build_fold_addr_expr_loc (loc
,
6183 TREE_OPERAND (op0
, 0)));
6188 if (integer_zerop (TREE_OPERAND (op0
, 1)))
6189 goto do_indirect_ref
;
6194 /* If we see a call to a declared builtin or see its address
6195 being taken (we can unify those cases here) then we can mark
6196 the builtin for implicit generation by GCC. */
6197 if (TREE_CODE (op0
) == FUNCTION_DECL
6198 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
6199 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
6200 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
6202 /* We use fb_either here because the C frontend sometimes takes
6203 the address of a call that returns a struct; see
6204 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6205 the implied temporary explicit. */
6207 /* Make the operand addressable. */
6208 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
6209 is_gimple_addressable
, fb_either
);
6210 if (ret
== GS_ERROR
)
6213 /* Then mark it. Beware that it may not be possible to do so directly
6214 if a temporary has been created by the gimplification. */
6215 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
6217 op0
= TREE_OPERAND (expr
, 0);
6219 /* For various reasons, the gimplification of the expression
6220 may have made a new INDIRECT_REF. */
6221 if (TREE_CODE (op0
) == INDIRECT_REF
6222 || (TREE_CODE (op0
) == MEM_REF
6223 && integer_zerop (TREE_OPERAND (op0
, 1))))
6224 goto do_indirect_ref
;
6226 mark_addressable (TREE_OPERAND (expr
, 0));
6228 /* The FEs may end up building ADDR_EXPRs early on a decl with
6229 an incomplete type. Re-build ADDR_EXPRs in canonical form
6231 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
6232 *expr_p
= build_fold_addr_expr (op0
);
6234 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6235 recompute_tree_invariant_for_addr_expr (*expr_p
);
6237 /* If we re-built the ADDR_EXPR add a conversion to the original type
6239 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
6240 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
6248 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6249 value; output operands should be a gimple lvalue. */
6251 static enum gimplify_status
6252 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6256 const char **oconstraints
;
6259 const char *constraint
;
6260 bool allows_mem
, allows_reg
, is_inout
;
6261 enum gimplify_status ret
, tret
;
6263 vec
<tree
, va_gc
> *inputs
;
6264 vec
<tree
, va_gc
> *outputs
;
6265 vec
<tree
, va_gc
> *clobbers
;
6266 vec
<tree
, va_gc
> *labels
;
6270 noutputs
= list_length (ASM_OUTPUTS (expr
));
6271 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
6279 link_next
= NULL_TREE
;
6280 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
6283 size_t constraint_len
;
6285 link_next
= TREE_CHAIN (link
);
6289 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6290 constraint_len
= strlen (constraint
);
6291 if (constraint_len
== 0)
6294 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6295 &allows_mem
, &allows_reg
, &is_inout
);
6302 /* If we can't make copies, we can only accept memory.
6303 Similarly for VLAs. */
6304 tree outtype
= TREE_TYPE (TREE_VALUE (link
));
6305 if (outtype
!= error_mark_node
6306 && (TREE_ADDRESSABLE (outtype
)
6307 || !COMPLETE_TYPE_P (outtype
)
6308 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype
))))
6314 error ("impossible constraint in %<asm%>");
6315 error ("non-memory output %d must stay in memory", i
);
6320 if (!allows_reg
&& allows_mem
)
6321 mark_addressable (TREE_VALUE (link
));
6323 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6324 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6325 fb_lvalue
| fb_mayfail
);
6326 if (tret
== GS_ERROR
)
6328 error ("invalid lvalue in %<asm%> output %d", i
);
6332 /* If the constraint does not allow memory make sure we gimplify
6333 it to a register if it is not already but its base is. This
6334 happens for complex and vector components. */
6337 tree op
= TREE_VALUE (link
);
6338 if (! is_gimple_val (op
)
6339 && is_gimple_reg_type (TREE_TYPE (op
))
6340 && is_gimple_reg (get_base_address (op
)))
6342 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6346 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6347 tem
, unshare_expr (op
));
6348 gimplify_and_add (ass
, pre_p
);
6350 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6351 gimplify_and_add (ass
, post_p
);
6353 TREE_VALUE (link
) = tem
;
6358 vec_safe_push (outputs
, link
);
6359 TREE_CHAIN (link
) = NULL_TREE
;
6363 /* An input/output operand. To give the optimizers more
6364 flexibility, split it into separate input and output
6367 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6370 /* Turn the in/out constraint into an output constraint. */
6371 char *p
= xstrdup (constraint
);
6373 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6375 /* And add a matching input constraint. */
6378 sprintf (buf
, "%u", i
);
6380 /* If there are multiple alternatives in the constraint,
6381 handle each of them individually. Those that allow register
6382 will be replaced with operand number, the others will stay
6384 if (strchr (p
, ',') != NULL
)
6386 size_t len
= 0, buflen
= strlen (buf
);
6387 char *beg
, *end
, *str
, *dst
;
6391 end
= strchr (beg
, ',');
6393 end
= strchr (beg
, '\0');
6394 if ((size_t) (end
- beg
) < buflen
)
6397 len
+= end
- beg
+ 1;
6404 str
= (char *) alloca (len
);
6405 for (beg
= p
+ 1, dst
= str
;;)
6408 bool mem_p
, reg_p
, inout_p
;
6410 end
= strchr (beg
, ',');
6415 parse_output_constraint (&tem
, i
, 0, 0,
6416 &mem_p
, ®_p
, &inout_p
);
6421 memcpy (dst
, buf
, buflen
);
6430 memcpy (dst
, beg
, len
);
6439 input
= build_string (dst
- str
, str
);
6442 input
= build_string (strlen (buf
), buf
);
6445 input
= build_string (constraint_len
- 1, constraint
+ 1);
6449 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6450 unshare_expr (TREE_VALUE (link
)));
6451 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6455 link_next
= NULL_TREE
;
6456 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6458 link_next
= TREE_CHAIN (link
);
6459 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6460 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6461 oconstraints
, &allows_mem
, &allows_reg
);
6463 /* If we can't make copies, we can only accept memory. */
6464 tree intype
= TREE_TYPE (TREE_VALUE (link
));
6465 if (intype
!= error_mark_node
6466 && (TREE_ADDRESSABLE (intype
)
6467 || !COMPLETE_TYPE_P (intype
)
6468 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype
))))
6474 error ("impossible constraint in %<asm%>");
6475 error ("non-memory input %d must stay in memory", i
);
6480 /* If the operand is a memory input, it should be an lvalue. */
6481 if (!allows_reg
&& allows_mem
)
6483 tree inputv
= TREE_VALUE (link
);
6484 STRIP_NOPS (inputv
);
6485 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6486 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6487 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6488 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6489 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6490 TREE_VALUE (link
) = error_mark_node
;
6491 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6492 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6493 if (tret
!= GS_ERROR
)
6495 /* Unlike output operands, memory inputs are not guaranteed
6496 to be lvalues by the FE, and while the expressions are
6497 marked addressable there, if it is e.g. a statement
6498 expression, temporaries in it might not end up being
6499 addressable. They might be already used in the IL and thus
6500 it is too late to make them addressable now though. */
6501 tree x
= TREE_VALUE (link
);
6502 while (handled_component_p (x
))
6503 x
= TREE_OPERAND (x
, 0);
6504 if (TREE_CODE (x
) == MEM_REF
6505 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6506 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6508 || TREE_CODE (x
) == PARM_DECL
6509 || TREE_CODE (x
) == RESULT_DECL
)
6510 && !TREE_ADDRESSABLE (x
)
6511 && is_gimple_reg (x
))
6513 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6515 "memory input %d is not directly addressable",
6517 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6520 mark_addressable (TREE_VALUE (link
));
6521 if (tret
== GS_ERROR
)
6523 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6524 "memory input %d is not directly addressable", i
);
6530 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6531 is_gimple_asm_val
, fb_rvalue
);
6532 if (tret
== GS_ERROR
)
6536 TREE_CHAIN (link
) = NULL_TREE
;
6537 vec_safe_push (inputs
, link
);
6540 link_next
= NULL_TREE
;
6541 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6543 link_next
= TREE_CHAIN (link
);
6544 TREE_CHAIN (link
) = NULL_TREE
;
6545 vec_safe_push (clobbers
, link
);
6548 link_next
= NULL_TREE
;
6549 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6551 link_next
= TREE_CHAIN (link
);
6552 TREE_CHAIN (link
) = NULL_TREE
;
6553 vec_safe_push (labels
, link
);
6556 /* Do not add ASMs with errors to the gimple IL stream. */
6557 if (ret
!= GS_ERROR
)
6559 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6560 inputs
, outputs
, clobbers
, labels
);
6562 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6563 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6564 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
6566 gimplify_seq_add_stmt (pre_p
, stmt
);
6572 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6573 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6574 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6575 return to this function.
6577 FIXME should we complexify the prequeue handling instead? Or use flags
6578 for all the cleanups and let the optimizer tighten them up? The current
6579 code seems pretty fragile; it will break on a cleanup within any
6580 non-conditional nesting. But any such nesting would be broken, anyway;
6581 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6582 and continues out of it. We can do that at the RTL level, though, so
6583 having an optimizer to tighten up try/finally regions would be a Good
6586 static enum gimplify_status
6587 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6589 gimple_stmt_iterator iter
;
6590 gimple_seq body_sequence
= NULL
;
6592 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6594 /* We only care about the number of conditions between the innermost
6595 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6596 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6597 int old_conds
= gimplify_ctxp
->conditions
;
6598 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6599 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6600 gimplify_ctxp
->conditions
= 0;
6601 gimplify_ctxp
->conditional_cleanups
= NULL
;
6602 gimplify_ctxp
->in_cleanup_point_expr
= true;
6604 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6606 gimplify_ctxp
->conditions
= old_conds
;
6607 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6608 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6610 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6612 gimple
*wce
= gsi_stmt (iter
);
6614 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6616 if (gsi_one_before_end_p (iter
))
6618 /* Note that gsi_insert_seq_before and gsi_remove do not
6619 scan operands, unlike some other sequence mutators. */
6620 if (!gimple_wce_cleanup_eh_only (wce
))
6621 gsi_insert_seq_before_without_update (&iter
,
6622 gimple_wce_cleanup (wce
),
6624 gsi_remove (&iter
, true);
6631 enum gimple_try_flags kind
;
6633 if (gimple_wce_cleanup_eh_only (wce
))
6634 kind
= GIMPLE_TRY_CATCH
;
6636 kind
= GIMPLE_TRY_FINALLY
;
6637 seq
= gsi_split_seq_after (iter
);
6639 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6640 /* Do not use gsi_replace here, as it may scan operands.
6641 We want to do a simple structural modification only. */
6642 gsi_set_stmt (&iter
, gtry
);
6643 iter
= gsi_start (gtry
->eval
);
6650 gimplify_seq_add_seq (pre_p
, body_sequence
);
6663 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6664 is the cleanup action required. EH_ONLY is true if the cleanup should
6665 only be executed if an exception is thrown, not on normal exit.
6666 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6667 only valid for clobbers. */
6670 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
6671 bool force_uncond
= false)
6674 gimple_seq cleanup_stmts
= NULL
;
6676 /* Errors can result in improperly nested cleanups. Which results in
6677 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6681 if (gimple_conditional_context ())
6683 /* If we're in a conditional context, this is more complex. We only
6684 want to run the cleanup if we actually ran the initialization that
6685 necessitates it, but we want to run it after the end of the
6686 conditional context. So we wrap the try/finally around the
6687 condition and use a flag to determine whether or not to actually
6688 run the destructor. Thus
6692 becomes (approximately)
6696 if (test) { A::A(temp); flag = 1; val = f(temp); }
6699 if (flag) A::~A(temp);
6705 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6706 wce
= gimple_build_wce (cleanup_stmts
);
6707 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6711 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
6712 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
6713 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
6715 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
6716 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6717 wce
= gimple_build_wce (cleanup_stmts
);
6719 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
6720 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6721 gimplify_seq_add_stmt (pre_p
, ftrue
);
6723 /* Because of this manipulation, and the EH edges that jump
6724 threading cannot redirect, the temporary (VAR) will appear
6725 to be used uninitialized. Don't warn. */
6726 TREE_NO_WARNING (var
) = 1;
6731 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6732 wce
= gimple_build_wce (cleanup_stmts
);
6733 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
6734 gimplify_seq_add_stmt (pre_p
, wce
);
6738 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6740 static enum gimplify_status
6741 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6743 tree targ
= *expr_p
;
6744 tree temp
= TARGET_EXPR_SLOT (targ
);
6745 tree init
= TARGET_EXPR_INITIAL (targ
);
6746 enum gimplify_status ret
;
6748 bool unpoison_empty_seq
= false;
6749 gimple_stmt_iterator unpoison_it
;
6753 tree cleanup
= NULL_TREE
;
6755 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6756 to the temps list. Handle also variable length TARGET_EXPRs. */
6757 if (!poly_int_tree_p (DECL_SIZE (temp
)))
6759 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
6760 gimplify_type_sizes (TREE_TYPE (temp
), pre_p
);
6761 gimplify_vla_decl (temp
, pre_p
);
6765 /* Save location where we need to place unpoisoning. It's possible
6766 that a variable will be converted to needs_to_live_in_memory. */
6767 unpoison_it
= gsi_last (*pre_p
);
6768 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
6770 gimple_add_tmp_var (temp
);
6773 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6774 expression is supposed to initialize the slot. */
6775 if (VOID_TYPE_P (TREE_TYPE (init
)))
6776 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6779 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
6781 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6783 ggc_free (init_expr
);
6785 if (ret
== GS_ERROR
)
6787 /* PR c++/28266 Make sure this is expanded only once. */
6788 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6792 gimplify_and_add (init
, pre_p
);
6794 /* If needed, push the cleanup for the temp. */
6795 if (TARGET_EXPR_CLEANUP (targ
))
6797 if (CLEANUP_EH_ONLY (targ
))
6798 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
6799 CLEANUP_EH_ONLY (targ
), pre_p
);
6801 cleanup
= TARGET_EXPR_CLEANUP (targ
);
6804 /* Add a clobber for the temporary going out of scope, like
6805 gimplify_bind_expr. */
6806 if (gimplify_ctxp
->in_cleanup_point_expr
6807 && needs_to_live_in_memory (temp
))
6809 if (flag_stack_reuse
== SR_ALL
)
6811 tree clobber
= build_clobber (TREE_TYPE (temp
));
6812 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
6813 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
6815 if (asan_poisoned_variables
6816 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
6817 && !TREE_STATIC (temp
)
6818 && dbg_cnt (asan_use_after_scope
)
6819 && !gimplify_omp_ctxp
)
6821 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
6824 if (unpoison_empty_seq
)
6825 unpoison_it
= gsi_start (*pre_p
);
6827 asan_poison_variable (temp
, false, &unpoison_it
,
6828 unpoison_empty_seq
);
6829 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
6834 gimple_push_cleanup (temp
, cleanup
, false, pre_p
);
6836 /* Only expand this once. */
6837 TREE_OPERAND (targ
, 3) = init
;
6838 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6841 /* We should have expanded this before. */
6842 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
6848 /* Gimplification of expression trees. */
6850 /* Gimplify an expression which appears at statement context. The
6851 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6852 NULL, a new sequence is allocated.
6854 Return true if we actually added a statement to the queue. */
6857 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
6859 gimple_seq_node last
;
6861 last
= gimple_seq_last (*seq_p
);
6862 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
6863 return last
!= gimple_seq_last (*seq_p
);
6866 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6867 to CTX. If entries already exist, force them to be some flavor of private.
6868 If there is no enclosing parallel, do nothing. */
6871 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
6875 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
6880 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6883 if (n
->value
& GOVD_SHARED
)
6884 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
6885 else if (n
->value
& GOVD_MAP
)
6886 n
->value
|= GOVD_MAP_TO_ONLY
;
6890 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
6892 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
6893 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6895 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
6897 else if (ctx
->region_type
!= ORT_WORKSHARE
6898 && ctx
->region_type
!= ORT_TASKGROUP
6899 && ctx
->region_type
!= ORT_SIMD
6900 && ctx
->region_type
!= ORT_ACC
6901 && !(ctx
->region_type
& ORT_TARGET_DATA
))
6902 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6904 ctx
= ctx
->outer_context
;
6909 /* Similarly for each of the type sizes of TYPE. */
6912 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
6914 if (type
== NULL
|| type
== error_mark_node
)
6916 type
= TYPE_MAIN_VARIANT (type
);
6918 if (ctx
->privatized_types
->add (type
))
6921 switch (TREE_CODE (type
))
6927 case FIXED_POINT_TYPE
:
6928 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
6929 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
6933 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6934 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
6939 case QUAL_UNION_TYPE
:
6942 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
6943 if (TREE_CODE (field
) == FIELD_DECL
)
6945 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
6946 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
6952 case REFERENCE_TYPE
:
6953 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6960 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
6961 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
6962 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
6965 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6968 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
6971 unsigned int nflags
;
6974 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
6977 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6978 there are constructors involved somewhere. Exception is a shared clause,
6979 there is nothing privatized in that case. */
6980 if ((flags
& GOVD_SHARED
) == 0
6981 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
6982 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
6985 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6986 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
6988 /* We shouldn't be re-adding the decl with the same data
6990 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
6991 nflags
= n
->value
| flags
;
6992 /* The only combination of data sharing classes we should see is
6993 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6994 reduction variables to be used in data sharing clauses. */
6995 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
6996 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
6997 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
6998 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
7003 /* When adding a variable-sized variable, we have to handle all sorts
7004 of additional bits of data: the pointer replacement variable, and
7005 the parameters of the type. */
7006 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7008 /* Add the pointer replacement variable as PRIVATE if the variable
7009 replacement is private, else FIRSTPRIVATE since we'll need the
7010 address of the original variable either for SHARED, or for the
7011 copy into or out of the context. */
7012 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
7014 if (flags
& GOVD_MAP
)
7015 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
7016 else if (flags
& GOVD_PRIVATE
)
7017 nflags
= GOVD_PRIVATE
;
7018 else if (((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7019 && (flags
& GOVD_FIRSTPRIVATE
))
7020 || (ctx
->region_type
== ORT_TARGET_DATA
7021 && (flags
& GOVD_DATA_SHARE_CLASS
) == 0))
7022 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7024 nflags
= GOVD_FIRSTPRIVATE
;
7025 nflags
|= flags
& GOVD_SEEN
;
7026 t
= DECL_VALUE_EXPR (decl
);
7027 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7028 t
= TREE_OPERAND (t
, 0);
7029 gcc_assert (DECL_P (t
));
7030 omp_add_variable (ctx
, t
, nflags
);
7033 /* Add all of the variable and type parameters (which should have
7034 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7035 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
7036 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
7037 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7039 /* The variable-sized variable itself is never SHARED, only some form
7040 of PRIVATE. The sharing would take place via the pointer variable
7041 which we remapped above. */
7042 if (flags
& GOVD_SHARED
)
7043 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
7044 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
7046 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7047 alloca statement we generate for the variable, so make sure it
7048 is available. This isn't automatically needed for the SHARED
7049 case, since we won't be allocating local storage then.
7050 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7051 in this case omp_notice_variable will be called later
7052 on when it is gimplified. */
7053 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
7054 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
7055 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
7057 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
7058 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7060 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7062 /* Similar to the direct variable sized case above, we'll need the
7063 size of references being privatized. */
7064 if ((flags
& GOVD_SHARED
) == 0)
7066 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7068 omp_notice_variable (ctx
, t
, true);
7075 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
7077 /* For reductions clauses in OpenACC loop directives, by default create a
7078 copy clause on the enclosing parallel construct for carrying back the
7080 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
7082 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
7085 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
7088 /* Ignore local variables and explicitly declared clauses. */
7089 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
7091 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
7093 /* According to the OpenACC spec, such a reduction variable
7094 should already have a copy map on a kernels construct,
7095 verify that here. */
7096 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
7097 && (n
->value
& GOVD_MAP
));
7099 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7101 /* Remove firstprivate and make it a copy map. */
7102 n
->value
&= ~GOVD_FIRSTPRIVATE
;
7103 n
->value
|= GOVD_MAP
;
7106 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7108 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
7109 GOVD_MAP
| GOVD_SEEN
);
7112 outer_ctx
= outer_ctx
->outer_context
;
7117 /* Notice a threadprivate variable DECL used in OMP context CTX.
7118 This just prints out diagnostics about threadprivate variable uses
7119 in untied tasks. If DECL2 is non-NULL, prevent this warning
7120 on that variable. */
7123 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7127 struct gimplify_omp_ctx
*octx
;
7129 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7130 if ((octx
->region_type
& ORT_TARGET
) != 0
7131 || octx
->order_concurrent
)
7133 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7136 if (octx
->order_concurrent
)
7138 error ("threadprivate variable %qE used in a region with"
7139 " %<order(concurrent)%> clause", DECL_NAME (decl
));
7140 inform (octx
->location
, "enclosing region");
7144 error ("threadprivate variable %qE used in target region",
7146 inform (octx
->location
, "enclosing target region");
7148 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7151 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7154 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
7156 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7159 error ("threadprivate variable %qE used in untied task",
7161 inform (ctx
->location
, "enclosing task");
7162 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
7165 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
7169 /* Return true if global var DECL is device resident. */
7172 device_resident_p (tree decl
)
7174 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
7179 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
7181 tree c
= TREE_VALUE (t
);
7182 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
7189 /* Return true if DECL has an ACC DECLARE attribute. */
7192 is_oacc_declared (tree decl
)
7194 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
7195 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
7196 return declared
!= NULL_TREE
;
7199 /* Determine outer default flags for DECL mentioned in an OMP region
7200 but not declared in an enclosing clause.
7202 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7203 remapped firstprivate instead of shared. To some extent this is
7204 addressed in omp_firstprivatize_type_sizes, but not
7208 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
7209 bool in_code
, unsigned flags
)
7211 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
7212 enum omp_clause_default_kind kind
;
7214 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
7215 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
7216 default_kind
= kind
;
7217 else if (VAR_P (decl
) && TREE_STATIC (decl
) && DECL_IN_CONSTANT_POOL (decl
))
7218 default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
7220 switch (default_kind
)
7222 case OMP_CLAUSE_DEFAULT_NONE
:
7226 if (ctx
->region_type
& ORT_PARALLEL
)
7228 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
7230 else if (ctx
->region_type
& ORT_TASK
)
7232 else if (ctx
->region_type
& ORT_TEAMS
)
7237 error ("%qE not specified in enclosing %qs",
7238 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
7239 inform (ctx
->location
, "enclosing %qs", rtype
);
7242 case OMP_CLAUSE_DEFAULT_SHARED
:
7243 flags
|= GOVD_SHARED
;
7245 case OMP_CLAUSE_DEFAULT_PRIVATE
:
7246 flags
|= GOVD_PRIVATE
;
7248 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
7249 flags
|= GOVD_FIRSTPRIVATE
;
7251 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
7252 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7253 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
7254 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
7256 omp_notice_variable (octx
, decl
, in_code
);
7257 for (; octx
; octx
= octx
->outer_context
)
7261 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
7262 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
7263 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7265 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
7267 flags
|= GOVD_FIRSTPRIVATE
;
7270 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
7272 flags
|= GOVD_SHARED
;
7278 if (TREE_CODE (decl
) == PARM_DECL
7279 || (!is_global_var (decl
)
7280 && DECL_CONTEXT (decl
) == current_function_decl
))
7281 flags
|= GOVD_FIRSTPRIVATE
;
7283 flags
|= GOVD_SHARED
;
7295 /* Determine outer default flags for DECL mentioned in an OACC region
7296 but not declared in an enclosing clause. */
7299 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
7302 bool on_device
= false;
7303 bool is_private
= false;
7304 bool declared
= is_oacc_declared (decl
);
7305 tree type
= TREE_TYPE (decl
);
7307 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7308 type
= TREE_TYPE (type
);
7310 /* For Fortran COMMON blocks, only used variables in those blocks are
7311 transfered and remapped. The block itself will have a private clause to
7312 avoid transfering the data twice.
7313 The hook evaluates to false by default. For a variable in Fortran's COMMON
7314 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7315 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7316 the whole block. For C++ and Fortran, it can also be true under certain
7317 other conditions, if DECL_HAS_VALUE_EXPR. */
7318 if (RECORD_OR_UNION_TYPE_P (type
))
7319 is_private
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7321 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
7322 && is_global_var (decl
)
7323 && device_resident_p (decl
)
7327 flags
|= GOVD_MAP_TO_ONLY
;
7330 switch (ctx
->region_type
)
7332 case ORT_ACC_KERNELS
:
7336 flags
|= GOVD_FIRSTPRIVATE
;
7337 else if (AGGREGATE_TYPE_P (type
))
7339 /* Aggregates default to 'present_or_copy', or 'present'. */
7340 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7343 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7346 /* Scalars default to 'copy'. */
7347 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
7351 case ORT_ACC_PARALLEL
:
7352 case ORT_ACC_SERIAL
:
7353 rkind
= ctx
->region_type
== ORT_ACC_PARALLEL
? "parallel" : "serial";
7356 flags
|= GOVD_FIRSTPRIVATE
;
7357 else if (on_device
|| declared
)
7359 else if (AGGREGATE_TYPE_P (type
))
7361 /* Aggregates default to 'present_or_copy', or 'present'. */
7362 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7365 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7368 /* Scalars default to 'firstprivate'. */
7369 flags
|= GOVD_FIRSTPRIVATE
;
7377 if (DECL_ARTIFICIAL (decl
))
7378 ; /* We can get compiler-generated decls, and should not complain
7380 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
7382 error ("%qE not specified in enclosing OpenACC %qs construct",
7383 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
7384 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
7386 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
7387 ; /* Handled above. */
7389 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
7394 /* Record the fact that DECL was used within the OMP context CTX.
7395 IN_CODE is true when real code uses DECL, and false when we should
7396 merely emit default(none) errors. Return true if DECL is going to
7397 be remapped and thus DECL shouldn't be gimplified into its
7398 DECL_VALUE_EXPR (if any). */
7401 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
7404 unsigned flags
= in_code
? GOVD_SEEN
: 0;
7405 bool ret
= false, shared
;
7407 if (error_operand_p (decl
))
7410 if (ctx
->region_type
== ORT_NONE
)
7411 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7413 if (is_global_var (decl
))
7415 /* Threadprivate variables are predetermined. */
7416 if (DECL_THREAD_LOCAL_P (decl
))
7417 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
7419 if (DECL_HAS_VALUE_EXPR_P (decl
))
7421 if (ctx
->region_type
& ORT_ACC
)
7422 /* For OpenACC, defer expansion of value to avoid transfering
7423 privatized common block data instead of im-/explicitly transfered
7424 variables which are in common blocks. */
7428 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
7430 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
7431 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
7435 if (gimplify_omp_ctxp
->outer_context
== NULL
7437 && oacc_get_fn_attrib (current_function_decl
))
7439 location_t loc
= DECL_SOURCE_LOCATION (decl
);
7441 if (lookup_attribute ("omp declare target link",
7442 DECL_ATTRIBUTES (decl
)))
7445 "%qE with %<link%> clause used in %<routine%> function",
7449 else if (!lookup_attribute ("omp declare target",
7450 DECL_ATTRIBUTES (decl
)))
7453 "%qE requires a %<declare%> directive for use "
7454 "in a %<routine%> function", DECL_NAME (decl
));
7460 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7461 if ((ctx
->region_type
& ORT_TARGET
) != 0)
7463 if (ctx
->region_type
& ORT_ACC
)
7464 /* For OpenACC, as remarked above, defer expansion. */
7469 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7472 unsigned nflags
= flags
;
7473 if ((ctx
->region_type
& ORT_ACC
) == 0)
7475 bool is_declare_target
= false;
7476 if (is_global_var (decl
)
7477 && varpool_node::get_create (decl
)->offloadable
)
7479 struct gimplify_omp_ctx
*octx
;
7480 for (octx
= ctx
->outer_context
;
7481 octx
; octx
= octx
->outer_context
)
7483 n
= splay_tree_lookup (octx
->variables
,
7484 (splay_tree_key
)decl
);
7486 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7487 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7490 is_declare_target
= octx
== NULL
;
7492 if (!is_declare_target
)
7495 enum omp_clause_defaultmap_kind kind
;
7496 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7497 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7498 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7500 gdmk
= GDMK_POINTER
;
7501 else if (lang_hooks
.decls
.omp_scalar_p (decl
))
7504 gdmk
= GDMK_AGGREGATE
;
7505 kind
= lang_hooks
.decls
.omp_predetermined_mapping (decl
);
7506 if (kind
!= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
)
7508 if (kind
== OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
)
7509 nflags
|= GOVD_FIRSTPRIVATE
;
7510 else if (kind
== OMP_CLAUSE_DEFAULTMAP_TO
)
7511 nflags
|= GOVD_MAP
| GOVD_MAP_TO_ONLY
;
7515 else if (ctx
->defaultmap
[gdmk
] == 0)
7517 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
7518 error ("%qE not specified in enclosing %<target%>",
7520 inform (ctx
->location
, "enclosing %<target%>");
7522 else if (ctx
->defaultmap
[gdmk
]
7523 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
7524 nflags
|= ctx
->defaultmap
[gdmk
];
7527 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
7528 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
7533 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7534 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7536 /* Look in outer OpenACC contexts, to see if there's a
7537 data attribute for this variable. */
7538 omp_notice_variable (octx
, decl
, in_code
);
7540 for (; octx
; octx
= octx
->outer_context
)
7542 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7545 = splay_tree_lookup (octx
->variables
,
7546 (splay_tree_key
) decl
);
7549 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7550 error ("variable %qE declared in enclosing "
7551 "%<host_data%> region", DECL_NAME (decl
));
7553 if (octx
->region_type
== ORT_ACC_DATA
7554 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7555 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7561 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
7562 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
7564 tree type
= TREE_TYPE (decl
);
7566 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
7567 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7568 type
= TREE_TYPE (type
);
7569 if (!lang_hooks
.types
.omp_mappable_type (type
))
7571 error ("%qD referenced in target region does not have "
7572 "a mappable type", decl
);
7573 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7577 if ((ctx
->region_type
& ORT_ACC
) != 0)
7578 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7584 omp_add_variable (ctx
, decl
, nflags
);
7588 /* If nothing changed, there's nothing left to do. */
7589 if ((n
->value
& flags
) == flags
)
7599 if (ctx
->region_type
== ORT_WORKSHARE
7600 || ctx
->region_type
== ORT_TASKGROUP
7601 || ctx
->region_type
== ORT_SIMD
7602 || ctx
->region_type
== ORT_ACC
7603 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7606 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7608 if ((flags
& GOVD_PRIVATE
)
7609 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7610 flags
|= GOVD_PRIVATE_OUTER_REF
;
7612 omp_add_variable (ctx
, decl
, flags
);
7614 shared
= (flags
& GOVD_SHARED
) != 0;
7615 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7619 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
7620 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
7621 && DECL_SIZE (decl
))
7623 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7626 tree t
= DECL_VALUE_EXPR (decl
);
7627 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7628 t
= TREE_OPERAND (t
, 0);
7629 gcc_assert (DECL_P (t
));
7630 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7631 n2
->value
|= GOVD_SEEN
;
7633 else if (lang_hooks
.decls
.omp_privatize_by_reference (decl
)
7634 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
7635 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
7639 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7640 gcc_assert (DECL_P (t
));
7641 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7643 omp_notice_variable (ctx
, t
, true);
7647 if (ctx
->region_type
& ORT_ACC
)
7648 /* For OpenACC, as remarked above, defer expansion. */
7651 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
7652 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7654 /* If nothing changed, there's nothing left to do. */
7655 if ((n
->value
& flags
) == flags
)
7661 /* If the variable is private in the current context, then we don't
7662 need to propagate anything to an outer context. */
7663 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
7665 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7666 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7668 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
7669 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7670 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7672 if (ctx
->outer_context
7673 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
7678 /* Verify that DECL is private within CTX. If there's specific information
7679 to the contrary in the innermost scope, generate an error. */
7682 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
7686 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7689 if (n
->value
& GOVD_SHARED
)
7691 if (ctx
== gimplify_omp_ctxp
)
7694 error ("iteration variable %qE is predetermined linear",
7697 error ("iteration variable %qE should be private",
7699 n
->value
= GOVD_PRIVATE
;
7705 else if ((n
->value
& GOVD_EXPLICIT
) != 0
7706 && (ctx
== gimplify_omp_ctxp
7707 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7708 && gimplify_omp_ctxp
->outer_context
== ctx
)))
7710 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
7711 error ("iteration variable %qE should not be firstprivate",
7713 else if ((n
->value
& GOVD_REDUCTION
) != 0)
7714 error ("iteration variable %qE should not be reduction",
7716 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
7717 error ("iteration variable %qE should not be linear",
7720 return (ctx
== gimplify_omp_ctxp
7721 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7722 && gimplify_omp_ctxp
->outer_context
== ctx
));
7725 if (ctx
->region_type
!= ORT_WORKSHARE
7726 && ctx
->region_type
!= ORT_TASKGROUP
7727 && ctx
->region_type
!= ORT_SIMD
7728 && ctx
->region_type
!= ORT_ACC
)
7730 else if (ctx
->outer_context
)
7731 return omp_is_private (ctx
->outer_context
, decl
, simd
);
7735 /* Return true if DECL is private within a parallel region
7736 that binds to the current construct's context or in parallel
7737 region's REDUCTION clause. */
7740 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
7746 ctx
= ctx
->outer_context
;
7749 if (is_global_var (decl
))
7752 /* References might be private, but might be shared too,
7753 when checking for copyprivate, assume they might be
7754 private, otherwise assume they might be shared. */
7758 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7761 /* Treat C++ privatized non-static data members outside
7762 of the privatization the same. */
7763 if (omp_member_access_dummy_var (decl
))
7769 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
7771 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7772 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7777 if ((n
->value
& GOVD_LOCAL
) != 0
7778 && omp_member_access_dummy_var (decl
))
7780 return (n
->value
& GOVD_SHARED
) == 0;
7783 while (ctx
->region_type
== ORT_WORKSHARE
7784 || ctx
->region_type
== ORT_TASKGROUP
7785 || ctx
->region_type
== ORT_SIMD
7786 || ctx
->region_type
== ORT_ACC
);
7790 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7793 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
7797 /* If this node has been visited, unmark it and keep looking. */
7798 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
7801 if (IS_TYPE_OR_DECL_P (t
))
7806 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7807 lower all the depend clauses by populating corresponding depend
7808 array. Returns 0 if there are no such depend clauses, or
7809 2 if all depend clauses should be removed, 1 otherwise. */
7812 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
7816 size_t n
[4] = { 0, 0, 0, 0 };
7818 tree counts
[4] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
7819 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
7821 location_t first_loc
= UNKNOWN_LOCATION
;
7823 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7824 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7826 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7828 case OMP_CLAUSE_DEPEND_IN
:
7831 case OMP_CLAUSE_DEPEND_OUT
:
7832 case OMP_CLAUSE_DEPEND_INOUT
:
7835 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
7838 case OMP_CLAUSE_DEPEND_DEPOBJ
:
7841 case OMP_CLAUSE_DEPEND_SOURCE
:
7842 case OMP_CLAUSE_DEPEND_SINK
:
7847 tree t
= OMP_CLAUSE_DECL (c
);
7848 if (first_loc
== UNKNOWN_LOCATION
)
7849 first_loc
= OMP_CLAUSE_LOCATION (c
);
7850 if (TREE_CODE (t
) == TREE_LIST
7852 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
7854 if (TREE_PURPOSE (t
) != last_iter
)
7856 tree tcnt
= size_one_node
;
7857 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
7859 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
7860 is_gimple_val
, fb_rvalue
) == GS_ERROR
7861 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
7862 is_gimple_val
, fb_rvalue
) == GS_ERROR
7863 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
7864 is_gimple_val
, fb_rvalue
) == GS_ERROR
7865 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
7866 is_gimple_val
, fb_rvalue
)
7869 tree var
= TREE_VEC_ELT (it
, 0);
7870 tree begin
= TREE_VEC_ELT (it
, 1);
7871 tree end
= TREE_VEC_ELT (it
, 2);
7872 tree step
= TREE_VEC_ELT (it
, 3);
7873 tree orig_step
= TREE_VEC_ELT (it
, 4);
7874 tree type
= TREE_TYPE (var
);
7875 tree stype
= TREE_TYPE (step
);
7876 location_t loc
= DECL_SOURCE_LOCATION (var
);
7878 /* Compute count for this iterator as
7880 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7881 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7882 and compute product of those for the entire depend
7884 if (POINTER_TYPE_P (type
))
7885 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
7888 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
7890 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
7892 build_int_cst (stype
, 1));
7893 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
7894 build_int_cst (stype
, 1));
7895 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
7896 unshare_expr (endmbegin
),
7898 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
7900 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
7902 if (TYPE_UNSIGNED (stype
))
7904 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
7905 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
7907 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
7910 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
7913 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
7914 build_int_cst (stype
, 0));
7915 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
7917 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
7918 build_int_cst (stype
, 0));
7919 tree osteptype
= TREE_TYPE (orig_step
);
7920 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
7922 build_int_cst (osteptype
, 0));
7923 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
7925 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
7926 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
7927 fb_rvalue
) == GS_ERROR
)
7929 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
7931 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
7932 fb_rvalue
) == GS_ERROR
)
7934 last_iter
= TREE_PURPOSE (t
);
7937 if (counts
[i
] == NULL_TREE
)
7938 counts
[i
] = last_count
;
7940 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
7941 PLUS_EXPR
, counts
[i
], last_count
);
7946 for (i
= 0; i
< 4; i
++)
7952 tree total
= size_zero_node
;
7953 for (i
= 0; i
< 4; i
++)
7955 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
7956 if (counts
[i
] == NULL_TREE
)
7957 counts
[i
] = size_zero_node
;
7959 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
7960 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
7961 fb_rvalue
) == GS_ERROR
)
7963 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
7966 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
7969 bool is_old
= unused
[1] && unused
[3];
7970 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
7971 size_int (is_old
? 1 : 4));
7972 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
7973 tree array
= create_tmp_var_raw (type
);
7974 TREE_ADDRESSABLE (array
) = 1;
7975 if (!poly_int_tree_p (totalpx
))
7977 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
7978 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
7979 if (gimplify_omp_ctxp
)
7981 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
7983 && (ctx
->region_type
== ORT_WORKSHARE
7984 || ctx
->region_type
== ORT_TASKGROUP
7985 || ctx
->region_type
== ORT_SIMD
7986 || ctx
->region_type
== ORT_ACC
))
7987 ctx
= ctx
->outer_context
;
7989 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
7991 gimplify_vla_decl (array
, pre_p
);
7994 gimple_add_tmp_var (array
);
7995 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
8000 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8001 build_int_cst (ptr_type_node
, 0));
8002 gimplify_and_add (tem
, pre_p
);
8003 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
8006 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8007 fold_convert (ptr_type_node
, total
));
8008 gimplify_and_add (tem
, pre_p
);
8009 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
8011 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
8012 NULL_TREE
, NULL_TREE
);
8013 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
8014 gimplify_and_add (tem
, pre_p
);
8021 for (i
= 0; i
< 4; i
++)
8023 if (i
&& (i
>= j
|| unused
[i
- 1]))
8025 cnts
[i
] = cnts
[i
- 1];
8028 cnts
[i
] = create_tmp_var (sizetype
);
8030 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
8035 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
8037 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
8038 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8041 g
= gimple_build_assign (cnts
[i
], t
);
8043 gimple_seq_add_stmt (pre_p
, g
);
8046 last_iter
= NULL_TREE
;
8047 tree last_bind
= NULL_TREE
;
8048 tree
*last_body
= NULL
;
8049 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8050 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8052 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8054 case OMP_CLAUSE_DEPEND_IN
:
8057 case OMP_CLAUSE_DEPEND_OUT
:
8058 case OMP_CLAUSE_DEPEND_INOUT
:
8061 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8064 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8067 case OMP_CLAUSE_DEPEND_SOURCE
:
8068 case OMP_CLAUSE_DEPEND_SINK
:
8073 tree t
= OMP_CLAUSE_DECL (c
);
8074 if (TREE_CODE (t
) == TREE_LIST
8076 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8078 if (TREE_PURPOSE (t
) != last_iter
)
8081 gimplify_and_add (last_bind
, pre_p
);
8082 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8083 last_bind
= build3 (BIND_EXPR
, void_type_node
,
8084 BLOCK_VARS (block
), NULL
, block
);
8085 TREE_SIDE_EFFECTS (last_bind
) = 1;
8086 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
8087 tree
*p
= &BIND_EXPR_BODY (last_bind
);
8088 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8090 tree var
= TREE_VEC_ELT (it
, 0);
8091 tree begin
= TREE_VEC_ELT (it
, 1);
8092 tree end
= TREE_VEC_ELT (it
, 2);
8093 tree step
= TREE_VEC_ELT (it
, 3);
8094 tree orig_step
= TREE_VEC_ELT (it
, 4);
8095 tree type
= TREE_TYPE (var
);
8096 location_t loc
= DECL_SOURCE_LOCATION (var
);
8104 if (orig_step > 0) {
8105 if (var < end) goto beg_label;
8107 if (var > end) goto beg_label;
8109 for each iterator, with inner iterators added to
8111 tree beg_label
= create_artificial_label (loc
);
8112 tree cond_label
= NULL_TREE
;
8113 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8115 append_to_statement_list_force (tem
, p
);
8116 tem
= build_and_jump (&cond_label
);
8117 append_to_statement_list_force (tem
, p
);
8118 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
8119 append_to_statement_list (tem
, p
);
8120 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
8121 NULL_TREE
, NULL_TREE
);
8122 TREE_SIDE_EFFECTS (bind
) = 1;
8123 SET_EXPR_LOCATION (bind
, loc
);
8124 append_to_statement_list_force (bind
, p
);
8125 if (POINTER_TYPE_P (type
))
8126 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
8127 var
, fold_convert_loc (loc
, sizetype
,
8130 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
8131 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8133 append_to_statement_list_force (tem
, p
);
8134 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
8135 append_to_statement_list (tem
, p
);
8136 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8140 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8141 cond
, build_and_jump (&beg_label
),
8143 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8146 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8147 cond
, build_and_jump (&beg_label
),
8149 tree osteptype
= TREE_TYPE (orig_step
);
8150 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8152 build_int_cst (osteptype
, 0));
8153 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8155 append_to_statement_list_force (tem
, p
);
8156 p
= &BIND_EXPR_BODY (bind
);
8160 last_iter
= TREE_PURPOSE (t
);
8161 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8163 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
8165 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8167 if (error_operand_p (TREE_VALUE (t
)))
8169 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
8170 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8171 NULL_TREE
, NULL_TREE
);
8172 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8173 void_type_node
, r
, TREE_VALUE (t
));
8174 append_to_statement_list_force (tem
, last_body
);
8175 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8176 void_type_node
, cnts
[i
],
8177 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)));
8178 append_to_statement_list_force (tem
, last_body
);
8179 TREE_VALUE (t
) = null_pointer_node
;
8185 gimplify_and_add (last_bind
, pre_p
);
8186 last_bind
= NULL_TREE
;
8188 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8190 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8191 NULL
, is_gimple_val
, fb_rvalue
);
8192 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8194 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8196 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8197 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8198 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8200 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8201 NULL_TREE
, NULL_TREE
);
8202 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
8203 gimplify_and_add (tem
, pre_p
);
8204 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
, cnts
[i
],
8206 gimple_seq_add_stmt (pre_p
, g
);
8210 gimplify_and_add (last_bind
, pre_p
);
8211 tree cond
= boolean_false_node
;
8215 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
8216 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
8219 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8220 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8222 size_binop_loc (first_loc
, PLUS_EXPR
,
8228 tree prev
= size_int (5);
8229 for (i
= 0; i
< 4; i
++)
8233 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
8234 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8235 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8236 cnts
[i
], unshare_expr (prev
)));
8239 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
8240 build_call_expr_loc (first_loc
,
8241 builtin_decl_explicit (BUILT_IN_TRAP
),
8243 gimplify_and_add (tem
, pre_p
);
8244 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
8245 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
8246 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
8247 OMP_CLAUSE_CHAIN (c
) = *list_p
;
8252 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8253 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8254 the struct node to insert the new mapping after (when the struct node is
8255 initially created). PREV_NODE is the first of two or three mappings for a
8256 pointer, and is either:
8257 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8259 - not the node before C. This is true when we have a reference-to-pointer
8260 type (with a mapping for the reference and for the pointer), or for
8261 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8262 If SCP is non-null, the new node is inserted before *SCP.
8263 if SCP is null, the new node is inserted before PREV_NODE.
8265 - PREV_NODE, if SCP is non-null.
8266 - The newly-created ALLOC or RELEASE node, if SCP is null.
8267 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8268 reference to a pointer. */
8271 insert_struct_comp_map (enum tree_code code
, tree c
, tree struct_node
,
8272 tree prev_node
, tree
*scp
)
8274 enum gomp_map_kind mkind
8275 = (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
8276 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8278 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
8279 tree cl
= scp
? prev_node
: c2
;
8280 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8281 OMP_CLAUSE_DECL (c2
) = unshare_expr (OMP_CLAUSE_DECL (c
));
8282 OMP_CLAUSE_CHAIN (c2
) = scp
? *scp
: prev_node
;
8283 if (OMP_CLAUSE_CHAIN (prev_node
) != c
8284 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node
)) == OMP_CLAUSE_MAP
8285 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8286 == GOMP_MAP_TO_PSET
))
8287 OMP_CLAUSE_SIZE (c2
) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node
));
8289 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (ptr_type_node
);
8291 OMP_CLAUSE_CHAIN (struct_node
) = c2
;
8293 /* We might need to create an additional mapping if we have a reference to a
8294 pointer (in C++). Don't do this if we have something other than a
8295 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8296 if (OMP_CLAUSE_CHAIN (prev_node
) != c
8297 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node
)) == OMP_CLAUSE_MAP
8298 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8299 == GOMP_MAP_ALWAYS_POINTER
)
8300 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8301 == GOMP_MAP_ATTACH_DETACH
)))
8303 tree c4
= OMP_CLAUSE_CHAIN (prev_node
);
8304 tree c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
8305 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8306 OMP_CLAUSE_DECL (c3
) = unshare_expr (OMP_CLAUSE_DECL (c4
));
8307 OMP_CLAUSE_SIZE (c3
) = TYPE_SIZE_UNIT (ptr_type_node
);
8308 OMP_CLAUSE_CHAIN (c3
) = prev_node
;
8310 OMP_CLAUSE_CHAIN (c2
) = c3
;
8321 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8322 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8323 If BASE_REF is non-NULL and the containing object is a reference, set
8324 *BASE_REF to that reference before dereferencing the object.
8325 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8326 has array type, else return NULL. */
8329 extract_base_bit_offset (tree base
, tree
*base_ref
, poly_int64
*bitposp
,
8330 poly_offset_int
*poffsetp
)
8333 poly_int64 bitsize
, bitpos
;
8335 int unsignedp
, reversep
, volatilep
= 0;
8336 poly_offset_int poffset
;
8340 *base_ref
= NULL_TREE
;
8342 while (TREE_CODE (base
) == ARRAY_REF
)
8343 base
= TREE_OPERAND (base
, 0);
8345 if (TREE_CODE (base
) == INDIRECT_REF
)
8346 base
= TREE_OPERAND (base
, 0);
8350 if (TREE_CODE (base
) == ARRAY_REF
)
8352 while (TREE_CODE (base
) == ARRAY_REF
)
8353 base
= TREE_OPERAND (base
, 0);
8354 if (TREE_CODE (base
) != COMPONENT_REF
8355 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
)
8358 else if (TREE_CODE (base
) == INDIRECT_REF
8359 && TREE_CODE (TREE_OPERAND (base
, 0)) == COMPONENT_REF
8360 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
8362 base
= TREE_OPERAND (base
, 0);
8365 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
, &mode
,
8366 &unsignedp
, &reversep
, &volatilep
);
8368 tree orig_base
= base
;
8370 if ((TREE_CODE (base
) == INDIRECT_REF
8371 || (TREE_CODE (base
) == MEM_REF
8372 && integer_zerop (TREE_OPERAND (base
, 1))))
8373 && DECL_P (TREE_OPERAND (base
, 0))
8374 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0))) == REFERENCE_TYPE
)
8375 base
= TREE_OPERAND (base
, 0);
8377 gcc_assert (offset
== NULL_TREE
|| poly_int_tree_p (offset
));
8380 poffset
= wi::to_poly_offset (offset
);
8384 if (maybe_ne (bitpos
, 0))
8385 poffset
+= bits_to_bytes_round_down (bitpos
);
8388 *poffsetp
= poffset
;
8390 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8391 if (base_ref
&& orig_base
!= base
)
8392 *base_ref
= orig_base
;
8397 /* Returns true if EXPR is or contains (as a sub-component) BASE_PTR. */
8400 is_or_contains_p (tree expr
, tree base_ptr
)
8402 while (expr
!= base_ptr
)
8403 if (TREE_CODE (base_ptr
) == COMPONENT_REF
)
8404 base_ptr
= TREE_OPERAND (base_ptr
, 0);
8407 return expr
== base_ptr
;
8410 /* Implement OpenMP 5.x map ordering rules for target directives. There are
8411 several rules, and with some level of ambiguity, hopefully we can at least
8412 collect the complexity here in one place. */
8415 omp_target_reorder_clauses (tree
*list_p
)
8417 /* Collect refs to alloc/release/delete maps. */
8418 auto_vec
<tree
, 32> ard
;
8420 while (*cp
!= NULL_TREE
)
8421 if (OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
8422 && (OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ALLOC
8423 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_RELEASE
8424 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_DELETE
))
8426 /* Unlink cp and push to ard. */
8428 tree nc
= OMP_CLAUSE_CHAIN (c
);
8432 /* Any associated pointer type maps should also move along. */
8433 while (*cp
!= NULL_TREE
8434 && OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
8435 && (OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
8436 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8437 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ATTACH_DETACH
8438 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_POINTER
8439 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ALWAYS_POINTER
8440 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_TO_PSET
))
8443 nc
= OMP_CLAUSE_CHAIN (c
);
8449 cp
= &OMP_CLAUSE_CHAIN (*cp
);
8451 /* Link alloc/release/delete maps to the end of list. */
8452 for (unsigned int i
= 0; i
< ard
.length (); i
++)
8455 cp
= &OMP_CLAUSE_CHAIN (ard
[i
]);
8459 /* OpenMP 5.0 requires that pointer variables are mapped before
8460 its use as a base-pointer. */
8461 auto_vec
<tree
*, 32> atf
;
8462 for (tree
*cp
= list_p
; *cp
; cp
= &OMP_CLAUSE_CHAIN (*cp
))
8463 if (OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
)
8465 /* Collect alloc, to, from, to/from clause tree pointers. */
8466 gomp_map_kind k
= OMP_CLAUSE_MAP_KIND (*cp
);
8467 if (k
== GOMP_MAP_ALLOC
8469 || k
== GOMP_MAP_FROM
8470 || k
== GOMP_MAP_TOFROM
8471 || k
== GOMP_MAP_ALWAYS_TO
8472 || k
== GOMP_MAP_ALWAYS_FROM
8473 || k
== GOMP_MAP_ALWAYS_TOFROM
)
8477 for (unsigned int i
= 0; i
< atf
.length (); i
++)
8481 tree decl
= OMP_CLAUSE_DECL (*cp
);
8482 if (TREE_CODE (decl
) == INDIRECT_REF
|| TREE_CODE (decl
) == MEM_REF
)
8484 tree base_ptr
= TREE_OPERAND (decl
, 0);
8485 STRIP_TYPE_NOPS (base_ptr
);
8486 for (unsigned int j
= i
+ 1; j
< atf
.length (); j
++)
8489 tree decl2
= OMP_CLAUSE_DECL (*cp2
);
8490 if (is_or_contains_p (decl2
, base_ptr
))
8492 /* Move *cp2 to before *cp. */
8494 *cp2
= OMP_CLAUSE_CHAIN (c
);
8495 OMP_CLAUSE_CHAIN (c
) = *cp
;
8504 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8505 and previous omp contexts. */
8508 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
8509 enum omp_region_type region_type
,
8510 enum tree_code code
)
8512 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
8514 hash_map
<tree
, tree
> *struct_map_to_clause
= NULL
;
8515 hash_set
<tree
> *struct_deref_set
= NULL
;
8516 tree
*prev_list_p
= NULL
, *orig_list_p
= list_p
;
8517 int handled_depend_iterators
= -1;
8520 ctx
= new_omp_context (region_type
);
8522 outer_ctx
= ctx
->outer_context
;
8523 if (code
== OMP_TARGET
)
8525 if (!lang_GNU_Fortran ())
8526 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
8527 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
8529 if (!lang_GNU_Fortran ())
8533 case OMP_TARGET_DATA
:
8534 case OMP_TARGET_ENTER_DATA
:
8535 case OMP_TARGET_EXIT_DATA
:
8537 case OACC_HOST_DATA
:
8540 ctx
->target_firstprivatize_array_bases
= true;
8545 if (code
== OMP_TARGET
8546 || code
== OMP_TARGET_DATA
8547 || code
== OMP_TARGET_ENTER_DATA
8548 || code
== OMP_TARGET_EXIT_DATA
)
8549 omp_target_reorder_clauses (list_p
);
8551 while ((c
= *list_p
) != NULL
)
8553 bool remove
= false;
8554 bool notice_outer
= true;
8555 const char *check_non_private
= NULL
;
8559 switch (OMP_CLAUSE_CODE (c
))
8561 case OMP_CLAUSE_PRIVATE
:
8562 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
8563 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
8565 flags
|= GOVD_PRIVATE_OUTER_REF
;
8566 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
8569 notice_outer
= false;
8571 case OMP_CLAUSE_SHARED
:
8572 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
8574 case OMP_CLAUSE_FIRSTPRIVATE
:
8575 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8576 check_non_private
= "firstprivate";
8578 case OMP_CLAUSE_LASTPRIVATE
:
8579 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
8582 case OMP_DISTRIBUTE
:
8583 error_at (OMP_CLAUSE_LOCATION (c
),
8584 "conditional %<lastprivate%> clause on "
8585 "%qs construct", "distribute");
8586 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8589 error_at (OMP_CLAUSE_LOCATION (c
),
8590 "conditional %<lastprivate%> clause on "
8591 "%qs construct", "taskloop");
8592 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8597 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
8598 if (code
!= OMP_LOOP
)
8599 check_non_private
= "lastprivate";
8600 decl
= OMP_CLAUSE_DECL (c
);
8601 if (error_operand_p (decl
))
8603 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
8604 && !lang_hooks
.decls
.omp_scalar_p (decl
))
8606 error_at (OMP_CLAUSE_LOCATION (c
),
8607 "non-scalar variable %qD in conditional "
8608 "%<lastprivate%> clause", decl
);
8609 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8611 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
8612 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
8614 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
8615 || ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
8616 == ORT_COMBINED_TEAMS
))
8617 && splay_tree_lookup (outer_ctx
->variables
,
8618 (splay_tree_key
) decl
) == NULL
)
8620 omp_add_variable (outer_ctx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
8621 if (outer_ctx
->outer_context
)
8622 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8625 && (outer_ctx
->region_type
& ORT_TASK
) != 0
8626 && outer_ctx
->combined_loop
8627 && splay_tree_lookup (outer_ctx
->variables
,
8628 (splay_tree_key
) decl
) == NULL
)
8630 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8631 if (outer_ctx
->outer_context
)
8632 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8635 && (outer_ctx
->region_type
== ORT_WORKSHARE
8636 || outer_ctx
->region_type
== ORT_ACC
)
8637 && outer_ctx
->combined_loop
8638 && splay_tree_lookup (outer_ctx
->variables
,
8639 (splay_tree_key
) decl
) == NULL
8640 && !omp_check_private (outer_ctx
, decl
, false))
8642 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8643 if (outer_ctx
->outer_context
8644 && (outer_ctx
->outer_context
->region_type
8645 == ORT_COMBINED_PARALLEL
)
8646 && splay_tree_lookup (outer_ctx
->outer_context
->variables
,
8647 (splay_tree_key
) decl
) == NULL
)
8649 struct gimplify_omp_ctx
*octx
= outer_ctx
->outer_context
;
8650 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
8651 if (octx
->outer_context
)
8653 octx
= octx
->outer_context
;
8654 if (octx
->region_type
== ORT_WORKSHARE
8655 && octx
->combined_loop
8656 && splay_tree_lookup (octx
->variables
,
8657 (splay_tree_key
) decl
) == NULL
8658 && !omp_check_private (octx
, decl
, false))
8660 omp_add_variable (octx
, decl
,
8661 GOVD_LASTPRIVATE
| GOVD_SEEN
);
8662 octx
= octx
->outer_context
;
8664 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
8665 == ORT_COMBINED_TEAMS
)
8666 && (splay_tree_lookup (octx
->variables
,
8667 (splay_tree_key
) decl
)
8670 omp_add_variable (octx
, decl
,
8671 GOVD_SHARED
| GOVD_SEEN
);
8672 octx
= octx
->outer_context
;
8676 omp_notice_variable (octx
, decl
, true);
8679 else if (outer_ctx
->outer_context
)
8680 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8683 case OMP_CLAUSE_REDUCTION
:
8684 if (OMP_CLAUSE_REDUCTION_TASK (c
))
8686 if (region_type
== ORT_WORKSHARE
)
8689 nowait
= omp_find_clause (*list_p
,
8690 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8692 && (outer_ctx
== NULL
8693 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
8695 error_at (OMP_CLAUSE_LOCATION (c
),
8696 "%<task%> reduction modifier on a construct "
8697 "with a %<nowait%> clause");
8698 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
8701 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
8703 error_at (OMP_CLAUSE_LOCATION (c
),
8704 "invalid %<task%> reduction modifier on construct "
8705 "other than %<parallel%>, %qs or %<sections%>",
8706 lang_GNU_Fortran () ? "do" : "for");
8707 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
8710 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
8714 error_at (OMP_CLAUSE_LOCATION (c
),
8715 "%<inscan%> %<reduction%> clause on "
8716 "%qs construct", "sections");
8717 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8720 error_at (OMP_CLAUSE_LOCATION (c
),
8721 "%<inscan%> %<reduction%> clause on "
8722 "%qs construct", "parallel");
8723 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8726 error_at (OMP_CLAUSE_LOCATION (c
),
8727 "%<inscan%> %<reduction%> clause on "
8728 "%qs construct", "teams");
8729 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8732 error_at (OMP_CLAUSE_LOCATION (c
),
8733 "%<inscan%> %<reduction%> clause on "
8734 "%qs construct", "taskloop");
8735 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8741 case OMP_CLAUSE_IN_REDUCTION
:
8742 case OMP_CLAUSE_TASK_REDUCTION
:
8743 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
8744 /* OpenACC permits reductions on private variables. */
8745 if (!(region_type
& ORT_ACC
)
8746 /* taskgroup is actually not a worksharing region. */
8747 && code
!= OMP_TASKGROUP
)
8748 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
8749 decl
= OMP_CLAUSE_DECL (c
);
8750 if (TREE_CODE (decl
) == MEM_REF
)
8752 tree type
= TREE_TYPE (decl
);
8753 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
8754 NULL
, is_gimple_val
, fb_rvalue
, false)
8760 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8763 omp_firstprivatize_variable (ctx
, v
);
8764 omp_notice_variable (ctx
, v
, true);
8766 decl
= TREE_OPERAND (decl
, 0);
8767 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
8769 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
8770 NULL
, is_gimple_val
, fb_rvalue
, false)
8776 v
= TREE_OPERAND (decl
, 1);
8779 omp_firstprivatize_variable (ctx
, v
);
8780 omp_notice_variable (ctx
, v
, true);
8782 decl
= TREE_OPERAND (decl
, 0);
8784 if (TREE_CODE (decl
) == ADDR_EXPR
8785 || TREE_CODE (decl
) == INDIRECT_REF
)
8786 decl
= TREE_OPERAND (decl
, 0);
8789 case OMP_CLAUSE_LINEAR
:
8790 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
8791 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8798 if (code
== OMP_SIMD
8799 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8801 struct gimplify_omp_ctx
*octx
= outer_ctx
;
8803 && octx
->region_type
== ORT_WORKSHARE
8804 && octx
->combined_loop
8805 && !octx
->distribute
)
8807 if (octx
->outer_context
8808 && (octx
->outer_context
->region_type
8809 == ORT_COMBINED_PARALLEL
))
8810 octx
= octx
->outer_context
->outer_context
;
8812 octx
= octx
->outer_context
;
8815 && octx
->region_type
== ORT_WORKSHARE
8816 && octx
->combined_loop
8817 && octx
->distribute
)
8819 error_at (OMP_CLAUSE_LOCATION (c
),
8820 "%<linear%> clause for variable other than "
8821 "loop iterator specified on construct "
8822 "combined with %<distribute%>");
8827 /* For combined #pragma omp parallel for simd, need to put
8828 lastprivate and perhaps firstprivate too on the
8829 parallel. Similarly for #pragma omp for simd. */
8830 struct gimplify_omp_ctx
*octx
= outer_ctx
;
8834 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8835 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8837 decl
= OMP_CLAUSE_DECL (c
);
8838 if (error_operand_p (decl
))
8844 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8845 flags
|= GOVD_FIRSTPRIVATE
;
8846 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8847 flags
|= GOVD_LASTPRIVATE
;
8849 && octx
->region_type
== ORT_WORKSHARE
8850 && octx
->combined_loop
)
8852 if (octx
->outer_context
8853 && (octx
->outer_context
->region_type
8854 == ORT_COMBINED_PARALLEL
))
8855 octx
= octx
->outer_context
;
8856 else if (omp_check_private (octx
, decl
, false))
8860 && (octx
->region_type
& ORT_TASK
) != 0
8861 && octx
->combined_loop
)
8864 && octx
->region_type
== ORT_COMBINED_PARALLEL
8865 && ctx
->region_type
== ORT_WORKSHARE
8866 && octx
== outer_ctx
)
8867 flags
= GOVD_SEEN
| GOVD_SHARED
;
8869 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
8870 == ORT_COMBINED_TEAMS
))
8871 flags
= GOVD_SEEN
| GOVD_SHARED
;
8873 && octx
->region_type
== ORT_COMBINED_TARGET
)
8875 flags
&= ~GOVD_LASTPRIVATE
;
8876 if (flags
== GOVD_SEEN
)
8882 = splay_tree_lookup (octx
->variables
,
8883 (splay_tree_key
) decl
);
8884 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
8889 omp_add_variable (octx
, decl
, flags
);
8890 if (octx
->outer_context
== NULL
)
8892 octx
= octx
->outer_context
;
8897 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8898 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
8899 omp_notice_variable (octx
, decl
, true);
8901 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
8902 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8903 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8905 notice_outer
= false;
8906 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
8910 case OMP_CLAUSE_MAP
:
8911 decl
= OMP_CLAUSE_DECL (c
);
8912 if (error_operand_p (decl
))
8919 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
8922 case OMP_TARGET_DATA
:
8923 case OMP_TARGET_ENTER_DATA
:
8924 case OMP_TARGET_EXIT_DATA
:
8925 case OACC_ENTER_DATA
:
8926 case OACC_EXIT_DATA
:
8927 case OACC_HOST_DATA
:
8928 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8929 || (OMP_CLAUSE_MAP_KIND (c
)
8930 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
8931 /* For target {,enter ,exit }data only the array slice is
8932 mapped, but not the pointer to it. */
8938 /* For Fortran, not only the pointer to the data is mapped but also
8939 the address of the pointer, the array descriptor etc.; for
8940 'exit data' - and in particular for 'delete:' - having an 'alloc:'
8941 does not make sense. Likewise, for 'update' only transferring the
8942 data itself is needed as the rest has been handled in previous
8943 directives. However, for 'exit data', the array descriptor needs
8944 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
8946 NOTE: Generally, it is not safe to perform "enter data" operations
8947 on arrays where the data *or the descriptor* may go out of scope
8948 before a corresponding "exit data" operation -- and such a
8949 descriptor may be synthesized temporarily, e.g. to pass an
8950 explicit-shape array to a function expecting an assumed-shape
8951 argument. Performing "enter data" inside the called function
8952 would thus be problematic. */
8953 if (code
== OMP_TARGET_EXIT_DATA
8954 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
)
8955 OMP_CLAUSE_SET_MAP_KIND (c
, OMP_CLAUSE_MAP_KIND (*prev_list_p
)
8957 ? GOMP_MAP_DELETE
: GOMP_MAP_RELEASE
);
8958 else if ((code
== OMP_TARGET_EXIT_DATA
|| code
== OMP_TARGET_UPDATE
)
8959 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
8960 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
))
8965 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
8967 struct gimplify_omp_ctx
*octx
;
8968 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
8970 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
8973 = splay_tree_lookup (octx
->variables
,
8974 (splay_tree_key
) decl
);
8976 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
8977 "declared in enclosing %<host_data%> region",
8981 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
8982 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
8983 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8984 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
8985 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8990 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8991 || (OMP_CLAUSE_MAP_KIND (c
)
8992 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
8993 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
8994 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
8997 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
8999 if ((region_type
& ORT_TARGET
) != 0)
9000 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
9001 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
9007 if (TREE_CODE (d
) == ARRAY_REF
)
9009 while (TREE_CODE (d
) == ARRAY_REF
)
9010 d
= TREE_OPERAND (d
, 0);
9011 if (TREE_CODE (d
) == COMPONENT_REF
9012 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
9015 pd
= &OMP_CLAUSE_DECL (c
);
9017 && TREE_CODE (decl
) == INDIRECT_REF
9018 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
9019 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9022 pd
= &TREE_OPERAND (decl
, 0);
9023 decl
= TREE_OPERAND (decl
, 0);
9025 bool indir_p
= false;
9026 tree orig_decl
= decl
;
9027 tree decl_ref
= NULL_TREE
;
9028 if ((region_type
& (ORT_ACC
| ORT_TARGET
| ORT_TARGET_DATA
)) != 0
9029 && TREE_CODE (*pd
) == COMPONENT_REF
9030 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
9031 && code
!= OACC_UPDATE
)
9033 while (TREE_CODE (decl
) == COMPONENT_REF
)
9035 decl
= TREE_OPERAND (decl
, 0);
9036 if (((TREE_CODE (decl
) == MEM_REF
9037 && integer_zerop (TREE_OPERAND (decl
, 1)))
9038 || INDIRECT_REF_P (decl
))
9039 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9043 decl
= TREE_OPERAND (decl
, 0);
9045 if (TREE_CODE (decl
) == INDIRECT_REF
9046 && DECL_P (TREE_OPERAND (decl
, 0))
9047 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9051 decl
= TREE_OPERAND (decl
, 0);
9055 else if (TREE_CODE (decl
) == COMPONENT_REF
)
9057 while (TREE_CODE (decl
) == COMPONENT_REF
)
9058 decl
= TREE_OPERAND (decl
, 0);
9059 if (TREE_CODE (decl
) == INDIRECT_REF
9060 && DECL_P (TREE_OPERAND (decl
, 0))
9061 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9063 decl
= TREE_OPERAND (decl
, 0);
9065 if (decl
!= orig_decl
&& DECL_P (decl
) && indir_p
)
9068 = ((code
== OACC_EXIT_DATA
|| code
== OMP_TARGET_EXIT_DATA
)
9069 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
9070 /* We have a dereference of a struct member. Make this an
9071 attach/detach operation, and ensure the base pointer is
9072 mapped as a FIRSTPRIVATE_POINTER. */
9073 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
9074 flags
= GOVD_MAP
| GOVD_SEEN
| GOVD_EXPLICIT
;
9075 tree next_clause
= OMP_CLAUSE_CHAIN (c
);
9076 if (k
== GOMP_MAP_ATTACH
9077 && code
!= OACC_ENTER_DATA
9078 && code
!= OMP_TARGET_ENTER_DATA
9080 || (OMP_CLAUSE_CODE (next_clause
) != OMP_CLAUSE_MAP
)
9081 || (OMP_CLAUSE_MAP_KIND (next_clause
)
9082 != GOMP_MAP_POINTER
)
9083 || OMP_CLAUSE_DECL (next_clause
) != decl
)
9084 && (!struct_deref_set
9085 || !struct_deref_set
->contains (decl
)))
9087 if (!struct_deref_set
)
9088 struct_deref_set
= new hash_set
<tree
> ();
9089 /* As well as the attach, we also need a
9090 FIRSTPRIVATE_POINTER clause to properly map the
9091 pointer to the struct base. */
9092 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9094 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ALLOC
);
9095 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2
)
9098 = build_int_cst (build_pointer_type (char_type_node
),
9100 OMP_CLAUSE_DECL (c2
)
9101 = build2 (MEM_REF
, char_type_node
,
9102 decl_ref
? decl_ref
: decl
, charptr_zero
);
9103 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
9104 tree c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9106 OMP_CLAUSE_SET_MAP_KIND (c3
,
9107 GOMP_MAP_FIRSTPRIVATE_POINTER
);
9108 OMP_CLAUSE_DECL (c3
) = decl
;
9109 OMP_CLAUSE_SIZE (c3
) = size_zero_node
;
9110 tree mapgrp
= *prev_list_p
;
9112 OMP_CLAUSE_CHAIN (c3
) = mapgrp
;
9113 OMP_CLAUSE_CHAIN (c2
) = c3
;
9115 struct_deref_set
->add (decl
);
9119 /* An "attach/detach" operation on an update directive should
9120 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
9121 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
9122 depends on the previous mapping. */
9123 if (code
== OACC_UPDATE
9124 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9125 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ALWAYS_POINTER
);
9127 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
9128 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
9129 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
9130 && code
!= OACC_UPDATE
9131 && code
!= OMP_TARGET_UPDATE
)
9133 if (error_operand_p (decl
))
9139 tree stype
= TREE_TYPE (decl
);
9140 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
9141 stype
= TREE_TYPE (stype
);
9142 if (TYPE_SIZE_UNIT (stype
) == NULL
9143 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
9145 error_at (OMP_CLAUSE_LOCATION (c
),
9146 "mapping field %qE of variable length "
9147 "structure", OMP_CLAUSE_DECL (c
));
9152 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
9153 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9155 /* Error recovery. */
9156 if (prev_list_p
== NULL
)
9161 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
9163 tree ch
= OMP_CLAUSE_CHAIN (*prev_list_p
);
9164 if (ch
== NULL_TREE
|| OMP_CLAUSE_CHAIN (ch
) != c
)
9172 poly_offset_int offset1
;
9177 = extract_base_bit_offset (OMP_CLAUSE_DECL (c
), &base_ref
,
9178 &bitpos1
, &offset1
);
9180 gcc_assert (base
== decl
);
9183 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
9184 bool ptr
= (OMP_CLAUSE_MAP_KIND (c
)
9185 == GOMP_MAP_ALWAYS_POINTER
);
9186 bool attach_detach
= (OMP_CLAUSE_MAP_KIND (c
)
9187 == GOMP_MAP_ATTACH_DETACH
);
9188 bool attach
= OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
9189 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
;
9190 bool has_attachments
= false;
9191 /* For OpenACC, pointers in structs should trigger an
9194 && ((region_type
& (ORT_ACC
| ORT_TARGET
| ORT_TARGET_DATA
))
9195 || code
== OMP_TARGET_ENTER_DATA
9196 || code
== OMP_TARGET_EXIT_DATA
))
9199 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9200 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9201 have detected a case that needs a GOMP_MAP_STRUCT
9204 = ((code
== OACC_EXIT_DATA
|| code
== OMP_TARGET_EXIT_DATA
)
9205 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
9206 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
9207 has_attachments
= true;
9209 if (n
== NULL
|| (n
->value
& GOVD_MAP
) == 0)
9211 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9213 gomp_map_kind k
= attach
? GOMP_MAP_FORCE_PRESENT
9216 OMP_CLAUSE_SET_MAP_KIND (l
, k
);
9218 OMP_CLAUSE_DECL (l
) = unshare_expr (base_ref
);
9220 OMP_CLAUSE_DECL (l
) = decl
;
9224 : DECL_P (OMP_CLAUSE_DECL (l
))
9225 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l
))
9226 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l
))));
9227 if (struct_map_to_clause
== NULL
)
9228 struct_map_to_clause
= new hash_map
<tree
, tree
>;
9229 struct_map_to_clause
->put (decl
, l
);
9230 if (ptr
|| attach_detach
)
9232 insert_struct_comp_map (code
, c
, l
, *prev_list_p
,
9239 OMP_CLAUSE_CHAIN (l
) = c
;
9241 list_p
= &OMP_CLAUSE_CHAIN (l
);
9243 if (base_ref
&& code
== OMP_TARGET
)
9245 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9247 enum gomp_map_kind mkind
9248 = GOMP_MAP_FIRSTPRIVATE_REFERENCE
;
9249 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
9250 OMP_CLAUSE_DECL (c2
) = decl
;
9251 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
9252 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
9253 OMP_CLAUSE_CHAIN (l
) = c2
;
9255 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
9256 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
9260 if (has_attachments
)
9261 flags
|= GOVD_MAP_HAS_ATTACHMENTS
;
9264 else if (struct_map_to_clause
)
9266 tree
*osc
= struct_map_to_clause
->get (decl
);
9267 tree
*sc
= NULL
, *scp
= NULL
;
9268 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
9271 n
->value
|= GOVD_SEEN
;
9272 sc
= &OMP_CLAUSE_CHAIN (*osc
);
9274 && (OMP_CLAUSE_MAP_KIND (*sc
)
9275 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9276 sc
= &OMP_CLAUSE_CHAIN (*sc
);
9277 /* Here "prev_list_p" is the end of the inserted
9278 alloc/release nodes after the struct node, OSC. */
9279 for (; *sc
!= c
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
9280 if ((ptr
|| attach_detach
) && sc
== prev_list_p
)
9282 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9284 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9286 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9291 tree sc_decl
= OMP_CLAUSE_DECL (*sc
);
9292 poly_offset_int offsetn
;
9295 = extract_base_bit_offset (sc_decl
, NULL
,
9296 &bitposn
, &offsetn
);
9301 if ((region_type
& ORT_ACC
) != 0)
9303 /* This duplicate checking code is currently only
9304 enabled for OpenACC. */
9305 tree d1
= OMP_CLAUSE_DECL (*sc
);
9306 tree d2
= OMP_CLAUSE_DECL (c
);
9307 while (TREE_CODE (d1
) == ARRAY_REF
)
9308 d1
= TREE_OPERAND (d1
, 0);
9309 while (TREE_CODE (d2
) == ARRAY_REF
)
9310 d2
= TREE_OPERAND (d2
, 0);
9311 if (TREE_CODE (d1
) == INDIRECT_REF
)
9312 d1
= TREE_OPERAND (d1
, 0);
9313 if (TREE_CODE (d2
) == INDIRECT_REF
)
9314 d2
= TREE_OPERAND (d2
, 0);
9315 while (TREE_CODE (d1
) == COMPONENT_REF
)
9316 if (TREE_CODE (d2
) == COMPONENT_REF
9317 && TREE_OPERAND (d1
, 1)
9318 == TREE_OPERAND (d2
, 1))
9320 d1
= TREE_OPERAND (d1
, 0);
9321 d2
= TREE_OPERAND (d2
, 0);
9327 error_at (OMP_CLAUSE_LOCATION (c
),
9328 "%qE appears more than once in map "
9329 "clauses", OMP_CLAUSE_DECL (c
));
9334 if (maybe_lt (offset1
, offsetn
)
9335 || (known_eq (offset1
, offsetn
)
9336 && maybe_lt (bitpos1
, bitposn
)))
9338 if (ptr
|| attach_detach
)
9347 OMP_CLAUSE_SIZE (*osc
)
9348 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
),
9350 if (ptr
|| attach_detach
)
9352 tree cl
= insert_struct_comp_map (code
, c
, NULL
,
9354 if (sc
== prev_list_p
)
9361 *prev_list_p
= OMP_CLAUSE_CHAIN (c
);
9362 list_p
= prev_list_p
;
9364 OMP_CLAUSE_CHAIN (c
) = *sc
;
9371 *list_p
= OMP_CLAUSE_CHAIN (c
);
9372 OMP_CLAUSE_CHAIN (c
) = *sc
;
9379 if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
)
9387 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
9388 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH_DETACH
9389 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
9390 && OMP_CLAUSE_CHAIN (c
)
9391 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
9392 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9393 == GOMP_MAP_ALWAYS_POINTER
)
9394 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9395 == GOMP_MAP_ATTACH_DETACH
)
9396 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9397 == GOMP_MAP_TO_PSET
)))
9398 prev_list_p
= list_p
;
9404 /* DECL_P (decl) == true */
9406 if (struct_map_to_clause
9407 && (sc
= struct_map_to_clause
->get (decl
)) != NULL
9408 && OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_STRUCT
9409 && decl
== OMP_CLAUSE_DECL (*sc
))
9411 /* We have found a map of the whole structure after a
9412 leading GOMP_MAP_STRUCT has been created, so refill the
9413 leading clause into a map of the whole structure
9414 variable, and remove the current one.
9415 TODO: we should be able to remove some maps of the
9416 following structure element maps if they are of
9417 compatible TO/FROM/ALLOC type. */
9418 OMP_CLAUSE_SET_MAP_KIND (*sc
, OMP_CLAUSE_MAP_KIND (c
));
9419 OMP_CLAUSE_SIZE (*sc
) = unshare_expr (OMP_CLAUSE_SIZE (c
));
9424 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
9425 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
9426 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
9427 flags
|= GOVD_MAP_ALWAYS_TO
;
9429 if ((code
== OMP_TARGET
9430 || code
== OMP_TARGET_DATA
9431 || code
== OMP_TARGET_ENTER_DATA
9432 || code
== OMP_TARGET_EXIT_DATA
)
9433 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9435 for (struct gimplify_omp_ctx
*octx
= outer_ctx
; octx
;
9436 octx
= octx
->outer_context
)
9439 = splay_tree_lookup (octx
->variables
,
9440 (splay_tree_key
) OMP_CLAUSE_DECL (c
));
9441 /* If this is contained in an outer OpenMP region as a
9442 firstprivate value, remove the attach/detach. */
9443 if (n
&& (n
->value
& GOVD_FIRSTPRIVATE
))
9445 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
9450 enum gomp_map_kind map_kind
= (code
== OMP_TARGET_EXIT_DATA
9453 OMP_CLAUSE_SET_MAP_KIND (c
, map_kind
);
9458 case OMP_CLAUSE_DEPEND
:
9459 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
9461 tree deps
= OMP_CLAUSE_DECL (c
);
9462 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
9464 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
9465 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
9466 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
9467 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
9468 deps
= TREE_CHAIN (deps
);
9472 else if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
9474 if (handled_depend_iterators
== -1)
9475 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
9476 if (handled_depend_iterators
)
9478 if (handled_depend_iterators
== 2)
9482 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
9484 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
9485 NULL
, is_gimple_val
, fb_rvalue
);
9486 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
9488 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
9493 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
9494 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
9495 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9503 case OMP_CLAUSE_FROM
:
9504 case OMP_CLAUSE__CACHE_
:
9505 decl
= OMP_CLAUSE_DECL (c
);
9506 if (error_operand_p (decl
))
9511 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
9512 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
9513 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9514 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
9515 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9522 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
9523 NULL
, is_gimple_lvalue
, fb_lvalue
)
9533 case OMP_CLAUSE_USE_DEVICE_PTR
:
9534 case OMP_CLAUSE_USE_DEVICE_ADDR
:
9535 flags
= GOVD_EXPLICIT
;
9538 case OMP_CLAUSE_IS_DEVICE_PTR
:
9539 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
9543 decl
= OMP_CLAUSE_DECL (c
);
9545 if (error_operand_p (decl
))
9550 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
9552 tree t
= omp_member_access_dummy_var (decl
);
9555 tree v
= DECL_VALUE_EXPR (decl
);
9556 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
9558 omp_notice_variable (outer_ctx
, t
, true);
9561 if (code
== OACC_DATA
9562 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9563 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
9564 flags
|= GOVD_MAP_0LEN_ARRAY
;
9565 omp_add_variable (ctx
, decl
, flags
);
9566 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9567 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
9568 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9569 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9571 omp_add_variable (ctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
9572 GOVD_LOCAL
| GOVD_SEEN
);
9573 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
9574 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
9576 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
9578 omp_add_variable (ctx
,
9579 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
9580 GOVD_LOCAL
| GOVD_SEEN
);
9581 gimplify_omp_ctxp
= ctx
;
9582 push_gimplify_context ();
9584 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9585 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9587 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
9588 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
9589 pop_gimplify_context
9590 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
9591 push_gimplify_context ();
9592 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
9593 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9594 pop_gimplify_context
9595 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
9596 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
9597 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
9599 gimplify_omp_ctxp
= outer_ctx
;
9601 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
9602 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
9604 gimplify_omp_ctxp
= ctx
;
9605 push_gimplify_context ();
9606 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
9608 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
9610 TREE_SIDE_EFFECTS (bind
) = 1;
9611 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
9612 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
9614 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
9615 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
9616 pop_gimplify_context
9617 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
9618 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
9620 gimplify_omp_ctxp
= outer_ctx
;
9622 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
9623 && OMP_CLAUSE_LINEAR_STMT (c
))
9625 gimplify_omp_ctxp
= ctx
;
9626 push_gimplify_context ();
9627 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
9629 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
9631 TREE_SIDE_EFFECTS (bind
) = 1;
9632 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
9633 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
9635 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
9636 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
9637 pop_gimplify_context
9638 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
9639 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
9641 gimplify_omp_ctxp
= outer_ctx
;
9647 case OMP_CLAUSE_COPYIN
:
9648 case OMP_CLAUSE_COPYPRIVATE
:
9649 decl
= OMP_CLAUSE_DECL (c
);
9650 if (error_operand_p (decl
))
9655 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
9657 && !omp_check_private (ctx
, decl
, true))
9660 if (is_global_var (decl
))
9662 if (DECL_THREAD_LOCAL_P (decl
))
9664 else if (DECL_HAS_VALUE_EXPR_P (decl
))
9666 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
9670 && DECL_THREAD_LOCAL_P (value
))
9675 error_at (OMP_CLAUSE_LOCATION (c
),
9676 "copyprivate variable %qE is not threadprivate"
9677 " or private in outer context", DECL_NAME (decl
));
9680 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9681 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
9682 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
9684 && ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
9685 || (region_type
== ORT_WORKSHARE
9686 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9687 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
9688 || code
== OMP_LOOP
)))
9689 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
9690 || (code
== OMP_LOOP
9691 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9692 && ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
9693 == ORT_COMBINED_TEAMS
))))
9696 = splay_tree_lookup (outer_ctx
->variables
,
9697 (splay_tree_key
)decl
);
9698 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9700 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9701 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
9702 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
9703 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
9704 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
9706 omp_firstprivatize_variable (outer_ctx
, decl
);
9709 omp_add_variable (outer_ctx
, decl
,
9710 GOVD_SEEN
| GOVD_SHARED
);
9711 if (outer_ctx
->outer_context
)
9712 omp_notice_variable (outer_ctx
->outer_context
, decl
,
9718 omp_notice_variable (outer_ctx
, decl
, true);
9719 if (check_non_private
9720 && region_type
== ORT_WORKSHARE
9721 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
9722 || decl
== OMP_CLAUSE_DECL (c
)
9723 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
9724 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
9726 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
9727 == POINTER_PLUS_EXPR
9728 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9729 (OMP_CLAUSE_DECL (c
), 0), 0))
9731 && omp_check_private (ctx
, decl
, false))
9733 error ("%s variable %qE is private in outer context",
9734 check_non_private
, DECL_NAME (decl
));
9740 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
9741 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
9744 for (int i
= 0; i
< 2; i
++)
9745 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
9747 case VOID_CST
: p
[i
] = "cancel"; break;
9748 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
9749 case OMP_SIMD
: p
[i
] = "simd"; break;
9750 case OMP_TASK
: p
[i
] = "task"; break;
9751 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
9752 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
9753 case OMP_TARGET
: p
[i
] = "target"; break;
9754 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
9755 case OMP_TARGET_ENTER_DATA
:
9756 p
[i
] = "target enter data"; break;
9757 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
9758 default: gcc_unreachable ();
9760 error_at (OMP_CLAUSE_LOCATION (c
),
9761 "expected %qs %<if%> clause modifier rather than %qs",
9767 case OMP_CLAUSE_FINAL
:
9768 OMP_CLAUSE_OPERAND (c
, 0)
9769 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
9772 case OMP_CLAUSE_SCHEDULE
:
9773 case OMP_CLAUSE_NUM_THREADS
:
9774 case OMP_CLAUSE_NUM_TEAMS
:
9775 case OMP_CLAUSE_THREAD_LIMIT
:
9776 case OMP_CLAUSE_DIST_SCHEDULE
:
9777 case OMP_CLAUSE_DEVICE
:
9778 case OMP_CLAUSE_PRIORITY
:
9779 case OMP_CLAUSE_GRAINSIZE
:
9780 case OMP_CLAUSE_NUM_TASKS
:
9781 case OMP_CLAUSE_HINT
:
9782 case OMP_CLAUSE_ASYNC
:
9783 case OMP_CLAUSE_WAIT
:
9784 case OMP_CLAUSE_NUM_GANGS
:
9785 case OMP_CLAUSE_NUM_WORKERS
:
9786 case OMP_CLAUSE_VECTOR_LENGTH
:
9787 case OMP_CLAUSE_WORKER
:
9788 case OMP_CLAUSE_VECTOR
:
9789 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
9790 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9794 case OMP_CLAUSE_GANG
:
9795 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
9796 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9798 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
9799 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9803 case OMP_CLAUSE_NOWAIT
:
9807 case OMP_CLAUSE_ORDERED
:
9808 case OMP_CLAUSE_UNTIED
:
9809 case OMP_CLAUSE_COLLAPSE
:
9810 case OMP_CLAUSE_TILE
:
9811 case OMP_CLAUSE_AUTO
:
9812 case OMP_CLAUSE_SEQ
:
9813 case OMP_CLAUSE_INDEPENDENT
:
9814 case OMP_CLAUSE_MERGEABLE
:
9815 case OMP_CLAUSE_PROC_BIND
:
9816 case OMP_CLAUSE_SAFELEN
:
9817 case OMP_CLAUSE_SIMDLEN
:
9818 case OMP_CLAUSE_NOGROUP
:
9819 case OMP_CLAUSE_THREADS
:
9820 case OMP_CLAUSE_SIMD
:
9821 case OMP_CLAUSE_BIND
:
9822 case OMP_CLAUSE_IF_PRESENT
:
9823 case OMP_CLAUSE_FINALIZE
:
9826 case OMP_CLAUSE_ORDER
:
9827 ctx
->order_concurrent
= true;
9830 case OMP_CLAUSE_DEFAULTMAP
:
9831 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
9832 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
9834 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
9835 gdmkmin
= GDMK_SCALAR
;
9836 gdmkmax
= GDMK_POINTER
;
9838 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
9839 gdmkmin
= gdmkmax
= GDMK_SCALAR
;
9841 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
9842 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
9844 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
9845 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
9847 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
9848 gdmkmin
= gdmkmax
= GDMK_POINTER
;
9853 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
9854 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
9856 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
9857 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
9859 case OMP_CLAUSE_DEFAULTMAP_TO
:
9860 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
9862 case OMP_CLAUSE_DEFAULTMAP_FROM
:
9863 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
9865 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
9866 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
9868 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
9869 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
9871 case OMP_CLAUSE_DEFAULTMAP_NONE
:
9872 ctx
->defaultmap
[gdmk
] = 0;
9874 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
9878 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
9880 case GDMK_AGGREGATE
:
9881 case GDMK_ALLOCATABLE
:
9882 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
9885 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
9896 case OMP_CLAUSE_ALIGNED
:
9897 decl
= OMP_CLAUSE_DECL (c
);
9898 if (error_operand_p (decl
))
9903 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
9904 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9909 if (!is_global_var (decl
)
9910 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
9911 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
9914 case OMP_CLAUSE_NONTEMPORAL
:
9915 decl
= OMP_CLAUSE_DECL (c
);
9916 if (error_operand_p (decl
))
9921 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
9924 case OMP_CLAUSE_ALLOCATE
:
9925 decl
= OMP_CLAUSE_DECL (c
);
9926 if (error_operand_p (decl
))
9931 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
), pre_p
, NULL
,
9932 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9937 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
9938 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
9941 else if (code
== OMP_TASKLOOP
9942 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
9943 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
9944 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
9945 pre_p
, NULL
, false);
9948 case OMP_CLAUSE_DEFAULT
:
9949 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
9952 case OMP_CLAUSE_INCLUSIVE
:
9953 case OMP_CLAUSE_EXCLUSIVE
:
9954 decl
= OMP_CLAUSE_DECL (c
);
9956 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
9957 (splay_tree_key
) decl
);
9958 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
9960 error_at (OMP_CLAUSE_LOCATION (c
),
9961 "%qD specified in %qs clause but not in %<inscan%> "
9962 "%<reduction%> clause on the containing construct",
9963 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
9968 n
->value
|= GOVD_REDUCTION_INSCAN
;
9969 if (outer_ctx
->region_type
== ORT_SIMD
9970 && outer_ctx
->outer_context
9971 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
9973 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
9974 (splay_tree_key
) decl
);
9975 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
9976 n
->value
|= GOVD_REDUCTION_INSCAN
;
9986 if (code
== OACC_DATA
9987 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9988 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9989 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9992 *list_p
= OMP_CLAUSE_CHAIN (c
);
9994 list_p
= &OMP_CLAUSE_CHAIN (c
);
9997 ctx
->clauses
= *orig_list_p
;
9998 gimplify_omp_ctxp
= ctx
;
9999 if (struct_map_to_clause
)
10000 delete struct_map_to_clause
;
10001 if (struct_deref_set
)
10002 delete struct_deref_set
;
10005 /* Return true if DECL is a candidate for shared to firstprivate
10006 optimization. We only consider non-addressable scalars, not
10007 too big, and not references. */
10010 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
10012 if (TREE_ADDRESSABLE (decl
))
10014 tree type
= TREE_TYPE (decl
);
10015 if (!is_gimple_reg_type (type
)
10016 || TREE_CODE (type
) == REFERENCE_TYPE
10017 || TREE_ADDRESSABLE (type
))
10019 /* Don't optimize too large decls, as each thread/task will have
10021 HOST_WIDE_INT len
= int_size_in_bytes (type
);
10022 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
10024 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
10029 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
10030 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
10031 GOVD_WRITTEN in outer contexts. */
10034 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
10036 for (; ctx
; ctx
= ctx
->outer_context
)
10038 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
10039 (splay_tree_key
) decl
);
10042 else if (n
->value
& GOVD_SHARED
)
10044 n
->value
|= GOVD_WRITTEN
;
10047 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
10052 /* Helper callback for walk_gimple_seq to discover possible stores
10053 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10054 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10058 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
10060 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
10062 *walk_subtrees
= 0;
10069 if (handled_component_p (op
))
10070 op
= TREE_OPERAND (op
, 0);
10071 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
10072 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
10073 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
10078 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
10081 omp_mark_stores (gimplify_omp_ctxp
, op
);
10085 /* Helper callback for walk_gimple_seq to discover possible stores
10086 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10087 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10091 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
10092 bool *handled_ops_p
,
10093 struct walk_stmt_info
*wi
)
10095 gimple
*stmt
= gsi_stmt (*gsi_p
);
10096 switch (gimple_code (stmt
))
10098 /* Don't recurse on OpenMP constructs for which
10099 gimplify_adjust_omp_clauses already handled the bodies,
10100 except handle gimple_omp_for_pre_body. */
10101 case GIMPLE_OMP_FOR
:
10102 *handled_ops_p
= true;
10103 if (gimple_omp_for_pre_body (stmt
))
10104 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
10105 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
10107 case GIMPLE_OMP_PARALLEL
:
10108 case GIMPLE_OMP_TASK
:
10109 case GIMPLE_OMP_SECTIONS
:
10110 case GIMPLE_OMP_SINGLE
:
10111 case GIMPLE_OMP_TARGET
:
10112 case GIMPLE_OMP_TEAMS
:
10113 case GIMPLE_OMP_CRITICAL
:
10114 *handled_ops_p
= true;
10122 struct gimplify_adjust_omp_clauses_data
10128 /* For all variables that were not actually used within the context,
10129 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
10132 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
10134 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
10136 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
10137 tree decl
= (tree
) n
->key
;
10138 unsigned flags
= n
->value
;
10139 enum omp_clause_code code
;
10141 bool private_debug
;
10143 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
10144 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
10145 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
10146 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
10148 if ((flags
& GOVD_SEEN
) == 0)
10150 if ((flags
& GOVD_MAP_HAS_ATTACHMENTS
) != 0)
10152 if (flags
& GOVD_DEBUG_PRIVATE
)
10154 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
10155 private_debug
= true;
10157 else if (flags
& GOVD_MAP
)
10158 private_debug
= false;
10161 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
10162 !!(flags
& GOVD_SHARED
));
10164 code
= OMP_CLAUSE_PRIVATE
;
10165 else if (flags
& GOVD_MAP
)
10167 code
= OMP_CLAUSE_MAP
;
10168 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
10169 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
10171 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
10175 && DECL_IN_CONSTANT_POOL (decl
)
10176 && !lookup_attribute ("omp declare target",
10177 DECL_ATTRIBUTES (decl
)))
10179 tree id
= get_identifier ("omp declare target");
10180 DECL_ATTRIBUTES (decl
)
10181 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
10182 varpool_node
*node
= varpool_node::get (decl
);
10185 node
->offloadable
= 1;
10186 if (ENABLE_OFFLOADING
)
10187 g
->have_offload
= true;
10191 else if (flags
& GOVD_SHARED
)
10193 if (is_global_var (decl
))
10195 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
10196 while (ctx
!= NULL
)
10199 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10200 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
10201 | GOVD_PRIVATE
| GOVD_REDUCTION
10202 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
10204 ctx
= ctx
->outer_context
;
10209 code
= OMP_CLAUSE_SHARED
;
10211 else if (flags
& GOVD_PRIVATE
)
10212 code
= OMP_CLAUSE_PRIVATE
;
10213 else if (flags
& GOVD_FIRSTPRIVATE
)
10215 code
= OMP_CLAUSE_FIRSTPRIVATE
;
10216 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
10217 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
10218 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
10220 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
10221 "%<target%> construct", decl
);
10225 else if (flags
& GOVD_LASTPRIVATE
)
10226 code
= OMP_CLAUSE_LASTPRIVATE
;
10227 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
10229 else if (flags
& GOVD_CONDTEMP
)
10231 code
= OMP_CLAUSE__CONDTEMP_
;
10232 gimple_add_tmp_var (decl
);
10235 gcc_unreachable ();
10237 if (((flags
& GOVD_LASTPRIVATE
)
10238 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
10239 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10240 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10242 tree chain
= *list_p
;
10243 clause
= build_omp_clause (input_location
, code
);
10244 OMP_CLAUSE_DECL (clause
) = decl
;
10245 OMP_CLAUSE_CHAIN (clause
) = chain
;
10247 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
10248 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
10249 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
10250 else if (code
== OMP_CLAUSE_SHARED
10251 && (flags
& GOVD_WRITTEN
) == 0
10252 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10253 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
10254 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
10255 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
10256 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
10258 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
10259 OMP_CLAUSE_DECL (nc
) = decl
;
10260 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10261 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
10262 OMP_CLAUSE_DECL (clause
)
10263 = build_simple_mem_ref_loc (input_location
, decl
);
10264 OMP_CLAUSE_DECL (clause
)
10265 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
10266 build_int_cst (build_pointer_type (char_type_node
), 0));
10267 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
10268 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10269 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
10270 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
10271 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
10272 OMP_CLAUSE_CHAIN (nc
) = chain
;
10273 OMP_CLAUSE_CHAIN (clause
) = nc
;
10274 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10275 gimplify_omp_ctxp
= ctx
->outer_context
;
10276 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
10277 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
10278 gimplify_omp_ctxp
= ctx
;
10280 else if (code
== OMP_CLAUSE_MAP
)
10283 /* Not all combinations of these GOVD_MAP flags are actually valid. */
10284 switch (flags
& (GOVD_MAP_TO_ONLY
10286 | GOVD_MAP_FORCE_PRESENT
10287 | GOVD_MAP_ALLOC_ONLY
10288 | GOVD_MAP_FROM_ONLY
))
10291 kind
= GOMP_MAP_TOFROM
;
10293 case GOVD_MAP_FORCE
:
10294 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
10296 case GOVD_MAP_TO_ONLY
:
10297 kind
= GOMP_MAP_TO
;
10299 case GOVD_MAP_FROM_ONLY
:
10300 kind
= GOMP_MAP_FROM
;
10302 case GOVD_MAP_ALLOC_ONLY
:
10303 kind
= GOMP_MAP_ALLOC
;
10305 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
10306 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
10308 case GOVD_MAP_FORCE_PRESENT
:
10309 kind
= GOMP_MAP_FORCE_PRESENT
;
10312 gcc_unreachable ();
10314 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
10315 if (DECL_SIZE (decl
)
10316 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
10318 tree decl2
= DECL_VALUE_EXPR (decl
);
10319 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10320 decl2
= TREE_OPERAND (decl2
, 0);
10321 gcc_assert (DECL_P (decl2
));
10322 tree mem
= build_simple_mem_ref (decl2
);
10323 OMP_CLAUSE_DECL (clause
) = mem
;
10324 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10325 if (gimplify_omp_ctxp
->outer_context
)
10327 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
10328 omp_notice_variable (ctx
, decl2
, true);
10329 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
10331 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
10333 OMP_CLAUSE_DECL (nc
) = decl
;
10334 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10335 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
10336 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
10338 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
10339 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
10340 OMP_CLAUSE_CHAIN (clause
) = nc
;
10342 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
10343 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
10345 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
10346 OMP_CLAUSE_SIZE (clause
)
10347 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
10348 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10349 gimplify_omp_ctxp
= ctx
->outer_context
;
10350 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
10351 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
10352 gimplify_omp_ctxp
= ctx
;
10353 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
10355 OMP_CLAUSE_DECL (nc
) = decl
;
10356 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10357 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
10358 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
10359 OMP_CLAUSE_CHAIN (clause
) = nc
;
10362 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
10364 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
10366 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
10367 OMP_CLAUSE_DECL (nc
) = decl
;
10368 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
10369 OMP_CLAUSE_CHAIN (nc
) = chain
;
10370 OMP_CLAUSE_CHAIN (clause
) = nc
;
10371 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10372 gimplify_omp_ctxp
= ctx
->outer_context
;
10373 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
10374 (ctx
->region_type
& ORT_ACC
) != 0);
10375 gimplify_omp_ctxp
= ctx
;
10378 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10379 gimplify_omp_ctxp
= ctx
->outer_context
;
10380 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
,
10381 (ctx
->region_type
& ORT_ACC
) != 0);
10382 if (gimplify_omp_ctxp
)
10383 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
10384 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
10385 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
10386 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
10388 gimplify_omp_ctxp
= ctx
;
10393 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
10394 enum tree_code code
)
10396 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10397 tree
*orig_list_p
= list_p
;
10399 bool has_inscan_reductions
= false;
10403 struct gimplify_omp_ctx
*octx
;
10404 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
10405 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
10409 struct walk_stmt_info wi
;
10410 memset (&wi
, 0, sizeof (wi
));
10411 walk_gimple_seq (body
, omp_find_stores_stmt
,
10412 omp_find_stores_op
, &wi
);
10416 if (ctx
->add_safelen1
)
10418 /* If there are VLAs in the body of simd loop, prevent
10420 gcc_assert (ctx
->region_type
== ORT_SIMD
);
10421 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
10422 OMP_CLAUSE_SAFELEN_EXPR (c
) = integer_one_node
;
10423 OMP_CLAUSE_CHAIN (c
) = *list_p
;
10425 list_p
= &OMP_CLAUSE_CHAIN (c
);
10428 if (ctx
->region_type
== ORT_WORKSHARE
10429 && ctx
->outer_context
10430 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
10432 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10433 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
10434 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
10436 decl
= OMP_CLAUSE_DECL (c
);
10438 = splay_tree_lookup (ctx
->outer_context
->variables
,
10439 (splay_tree_key
) decl
);
10440 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
10441 (splay_tree_key
) decl
));
10442 omp_add_variable (ctx
, decl
, n
->value
);
10443 tree c2
= copy_node (c
);
10444 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
10446 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
10448 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10449 OMP_CLAUSE_FIRSTPRIVATE
);
10450 OMP_CLAUSE_DECL (c2
) = decl
;
10451 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
10455 while ((c
= *list_p
) != NULL
)
10458 bool remove
= false;
10460 switch (OMP_CLAUSE_CODE (c
))
10462 case OMP_CLAUSE_FIRSTPRIVATE
:
10463 if ((ctx
->region_type
& ORT_TARGET
)
10464 && (ctx
->region_type
& ORT_ACC
) == 0
10465 && TYPE_ATOMIC (strip_array_types
10466 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
10468 error_at (OMP_CLAUSE_LOCATION (c
),
10469 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10470 "%<target%> construct", OMP_CLAUSE_DECL (c
));
10475 case OMP_CLAUSE_PRIVATE
:
10476 case OMP_CLAUSE_SHARED
:
10477 case OMP_CLAUSE_LINEAR
:
10478 decl
= OMP_CLAUSE_DECL (c
);
10479 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10480 remove
= !(n
->value
& GOVD_SEEN
);
10481 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
10482 && code
== OMP_PARALLEL
10483 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10487 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
10488 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
10489 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
10491 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
10492 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
10494 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
10495 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
10497 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
10498 && (n
->value
& GOVD_WRITTEN
) == 0
10500 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10501 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
10502 else if (DECL_P (decl
)
10503 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
10504 && (n
->value
& GOVD_WRITTEN
) != 0)
10505 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10506 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
10507 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10508 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10511 n
->value
&= ~GOVD_EXPLICIT
;
10514 case OMP_CLAUSE_LASTPRIVATE
:
10515 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10516 accurately reflect the presence of a FIRSTPRIVATE clause. */
10517 decl
= OMP_CLAUSE_DECL (c
);
10518 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10519 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
10520 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
10521 if (code
== OMP_DISTRIBUTE
10522 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
10525 error_at (OMP_CLAUSE_LOCATION (c
),
10526 "same variable used in %<firstprivate%> and "
10527 "%<lastprivate%> clauses on %<distribute%> "
10531 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
10533 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10534 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10535 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
10539 case OMP_CLAUSE_ALIGNED
:
10540 decl
= OMP_CLAUSE_DECL (c
);
10541 if (!is_global_var (decl
))
10543 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10544 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
10545 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
10547 struct gimplify_omp_ctx
*octx
;
10549 && (n
->value
& (GOVD_DATA_SHARE_CLASS
10550 & ~GOVD_FIRSTPRIVATE
)))
10553 for (octx
= ctx
->outer_context
; octx
;
10554 octx
= octx
->outer_context
)
10556 n
= splay_tree_lookup (octx
->variables
,
10557 (splay_tree_key
) decl
);
10560 if (n
->value
& GOVD_LOCAL
)
10562 /* We have to avoid assigning a shared variable
10563 to itself when trying to add
10564 __builtin_assume_aligned. */
10565 if (n
->value
& GOVD_SHARED
)
10573 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
10575 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10576 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
10581 case OMP_CLAUSE_NONTEMPORAL
:
10582 decl
= OMP_CLAUSE_DECL (c
);
10583 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10584 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
10587 case OMP_CLAUSE_MAP
:
10588 if (code
== OMP_TARGET_EXIT_DATA
10589 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
10594 decl
= OMP_CLAUSE_DECL (c
);
10595 /* Data clauses associated with reductions must be
10596 compatible with present_or_copy. Warn and adjust the clause
10597 if that is not the case. */
10598 if (ctx
->region_type
== ORT_ACC_PARALLEL
10599 || ctx
->region_type
== ORT_ACC_SERIAL
)
10601 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
10605 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
10607 if (n
&& (n
->value
& GOVD_REDUCTION
))
10609 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
10611 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
10612 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
10613 && kind
!= GOMP_MAP_FORCE_PRESENT
10614 && kind
!= GOMP_MAP_POINTER
)
10616 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
10617 "incompatible data clause with reduction "
10618 "on %qE; promoting to %<present_or_copy%>",
10620 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
10624 if (!DECL_P (decl
))
10626 if ((ctx
->region_type
& ORT_TARGET
) != 0
10627 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
10629 if (TREE_CODE (decl
) == INDIRECT_REF
10630 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
10631 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
10632 == REFERENCE_TYPE
))
10633 decl
= TREE_OPERAND (decl
, 0);
10634 if (TREE_CODE (decl
) == COMPONENT_REF
)
10636 while (TREE_CODE (decl
) == COMPONENT_REF
)
10637 decl
= TREE_OPERAND (decl
, 0);
10640 n
= splay_tree_lookup (ctx
->variables
,
10641 (splay_tree_key
) decl
);
10642 if (!(n
->value
& GOVD_SEEN
))
10649 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10650 if ((ctx
->region_type
& ORT_TARGET
) != 0
10651 && !(n
->value
& GOVD_SEEN
)
10652 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
10653 && (!is_global_var (decl
)
10654 || !lookup_attribute ("omp declare target link",
10655 DECL_ATTRIBUTES (decl
))))
10658 /* For struct element mapping, if struct is never referenced
10659 in target block and none of the mapping has always modifier,
10660 remove all the struct element mappings, which immediately
10661 follow the GOMP_MAP_STRUCT map clause. */
10662 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
10664 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
10666 OMP_CLAUSE_CHAIN (c
)
10667 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
10670 else if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
10671 && (code
== OMP_TARGET_EXIT_DATA
10672 || code
== OACC_EXIT_DATA
))
10674 else if (DECL_SIZE (decl
)
10675 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
10676 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
10677 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
10678 && (OMP_CLAUSE_MAP_KIND (c
)
10679 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10681 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10682 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10684 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
10686 tree decl2
= DECL_VALUE_EXPR (decl
);
10687 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10688 decl2
= TREE_OPERAND (decl2
, 0);
10689 gcc_assert (DECL_P (decl2
));
10690 tree mem
= build_simple_mem_ref (decl2
);
10691 OMP_CLAUSE_DECL (c
) = mem
;
10692 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10693 if (ctx
->outer_context
)
10695 omp_notice_variable (ctx
->outer_context
, decl2
, true);
10696 omp_notice_variable (ctx
->outer_context
,
10697 OMP_CLAUSE_SIZE (c
), true);
10699 if (((ctx
->region_type
& ORT_TARGET
) != 0
10700 || !ctx
->target_firstprivatize_array_bases
)
10701 && ((n
->value
& GOVD_SEEN
) == 0
10702 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
10704 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10706 OMP_CLAUSE_DECL (nc
) = decl
;
10707 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10708 if (ctx
->target_firstprivatize_array_bases
)
10709 OMP_CLAUSE_SET_MAP_KIND (nc
,
10710 GOMP_MAP_FIRSTPRIVATE_POINTER
);
10712 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
10713 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
10714 OMP_CLAUSE_CHAIN (c
) = nc
;
10720 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
10721 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
10722 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
10723 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
10728 case OMP_CLAUSE_TO
:
10729 case OMP_CLAUSE_FROM
:
10730 case OMP_CLAUSE__CACHE_
:
10731 decl
= OMP_CLAUSE_DECL (c
);
10732 if (!DECL_P (decl
))
10734 if (DECL_SIZE (decl
)
10735 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
10737 tree decl2
= DECL_VALUE_EXPR (decl
);
10738 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10739 decl2
= TREE_OPERAND (decl2
, 0);
10740 gcc_assert (DECL_P (decl2
));
10741 tree mem
= build_simple_mem_ref (decl2
);
10742 OMP_CLAUSE_DECL (c
) = mem
;
10743 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10744 if (ctx
->outer_context
)
10746 omp_notice_variable (ctx
->outer_context
, decl2
, true);
10747 omp_notice_variable (ctx
->outer_context
,
10748 OMP_CLAUSE_SIZE (c
), true);
10751 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
10752 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
10755 case OMP_CLAUSE_REDUCTION
:
10756 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
10758 decl
= OMP_CLAUSE_DECL (c
);
10759 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10760 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
10763 error_at (OMP_CLAUSE_LOCATION (c
),
10764 "%qD specified in %<inscan%> %<reduction%> clause "
10765 "but not in %<scan%> directive clause", decl
);
10768 has_inscan_reductions
= true;
10771 case OMP_CLAUSE_IN_REDUCTION
:
10772 case OMP_CLAUSE_TASK_REDUCTION
:
10773 decl
= OMP_CLAUSE_DECL (c
);
10774 /* OpenACC reductions need a present_or_copy data clause.
10775 Add one if necessary. Emit error when the reduction is private. */
10776 if (ctx
->region_type
== ORT_ACC_PARALLEL
10777 || ctx
->region_type
== ORT_ACC_SERIAL
)
10779 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10780 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
10783 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
10784 "reduction on %qE", DECL_NAME (decl
));
10786 else if ((n
->value
& GOVD_MAP
) == 0)
10788 tree next
= OMP_CLAUSE_CHAIN (c
);
10789 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
10790 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
10791 OMP_CLAUSE_DECL (nc
) = decl
;
10792 OMP_CLAUSE_CHAIN (c
) = nc
;
10793 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
10798 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
10799 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
10801 nc
= OMP_CLAUSE_CHAIN (nc
);
10803 OMP_CLAUSE_CHAIN (nc
) = next
;
10804 n
->value
|= GOVD_MAP
;
10808 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10809 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10812 case OMP_CLAUSE_ALLOCATE
:
10813 decl
= OMP_CLAUSE_DECL (c
);
10814 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10815 if (n
!= NULL
&& !(n
->value
& GOVD_SEEN
))
10817 if ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
| GOVD_LINEAR
))
10819 && (n
->value
& (GOVD_REDUCTION
| GOVD_LASTPRIVATE
)) == 0)
10823 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
10824 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)) != INTEGER_CST
10825 && ((ctx
->region_type
& (ORT_PARALLEL
| ORT_TARGET
)) != 0
10826 || (ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASK
10827 || (ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
))
10829 tree allocator
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
10830 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) allocator
);
10833 enum omp_clause_default_kind default_kind
10834 = ctx
->default_kind
;
10835 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
10836 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
10838 ctx
->default_kind
= default_kind
;
10841 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
10846 case OMP_CLAUSE_COPYIN
:
10847 case OMP_CLAUSE_COPYPRIVATE
:
10848 case OMP_CLAUSE_IF
:
10849 case OMP_CLAUSE_NUM_THREADS
:
10850 case OMP_CLAUSE_NUM_TEAMS
:
10851 case OMP_CLAUSE_THREAD_LIMIT
:
10852 case OMP_CLAUSE_DIST_SCHEDULE
:
10853 case OMP_CLAUSE_DEVICE
:
10854 case OMP_CLAUSE_SCHEDULE
:
10855 case OMP_CLAUSE_NOWAIT
:
10856 case OMP_CLAUSE_ORDERED
:
10857 case OMP_CLAUSE_DEFAULT
:
10858 case OMP_CLAUSE_UNTIED
:
10859 case OMP_CLAUSE_COLLAPSE
:
10860 case OMP_CLAUSE_FINAL
:
10861 case OMP_CLAUSE_MERGEABLE
:
10862 case OMP_CLAUSE_PROC_BIND
:
10863 case OMP_CLAUSE_SAFELEN
:
10864 case OMP_CLAUSE_SIMDLEN
:
10865 case OMP_CLAUSE_DEPEND
:
10866 case OMP_CLAUSE_PRIORITY
:
10867 case OMP_CLAUSE_GRAINSIZE
:
10868 case OMP_CLAUSE_NUM_TASKS
:
10869 case OMP_CLAUSE_NOGROUP
:
10870 case OMP_CLAUSE_THREADS
:
10871 case OMP_CLAUSE_SIMD
:
10872 case OMP_CLAUSE_HINT
:
10873 case OMP_CLAUSE_DEFAULTMAP
:
10874 case OMP_CLAUSE_ORDER
:
10875 case OMP_CLAUSE_BIND
:
10876 case OMP_CLAUSE_USE_DEVICE_PTR
:
10877 case OMP_CLAUSE_USE_DEVICE_ADDR
:
10878 case OMP_CLAUSE_IS_DEVICE_PTR
:
10879 case OMP_CLAUSE_ASYNC
:
10880 case OMP_CLAUSE_WAIT
:
10881 case OMP_CLAUSE_INDEPENDENT
:
10882 case OMP_CLAUSE_NUM_GANGS
:
10883 case OMP_CLAUSE_NUM_WORKERS
:
10884 case OMP_CLAUSE_VECTOR_LENGTH
:
10885 case OMP_CLAUSE_GANG
:
10886 case OMP_CLAUSE_WORKER
:
10887 case OMP_CLAUSE_VECTOR
:
10888 case OMP_CLAUSE_AUTO
:
10889 case OMP_CLAUSE_SEQ
:
10890 case OMP_CLAUSE_TILE
:
10891 case OMP_CLAUSE_IF_PRESENT
:
10892 case OMP_CLAUSE_FINALIZE
:
10893 case OMP_CLAUSE_INCLUSIVE
:
10894 case OMP_CLAUSE_EXCLUSIVE
:
10898 gcc_unreachable ();
10902 *list_p
= OMP_CLAUSE_CHAIN (c
);
10904 list_p
= &OMP_CLAUSE_CHAIN (c
);
10907 /* Add in any implicit data sharing. */
10908 struct gimplify_adjust_omp_clauses_data data
;
10909 data
.list_p
= list_p
;
10910 data
.pre_p
= pre_p
;
10911 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
10913 if (has_inscan_reductions
)
10914 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10915 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10916 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10918 error_at (OMP_CLAUSE_LOCATION (c
),
10919 "%<inscan%> %<reduction%> clause used together with "
10920 "%<linear%> clause for a variable other than loop "
10925 gimplify_omp_ctxp
= ctx
->outer_context
;
10926 delete_omp_context (ctx
);
10929 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
10930 -1 if unknown yet (simd is involved, won't be known until vectorization)
10931 and 1 if they do. If SCORES is non-NULL, it should point to an array
10932 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
10933 of the CONSTRUCTS (position -1 if it will never match) followed by
10934 number of constructs in the OpenMP context construct trait. If the
10935 score depends on whether it will be in a declare simd clone or not,
10936 the function returns 2 and there will be two sets of the scores, the first
10937 one for the case that it is not in a declare simd clone, the other
10938 that it is in a declare simd clone. */
10941 omp_construct_selector_matches (enum tree_code
*constructs
, int nconstructs
,
10944 int matched
= 0, cnt
= 0;
10945 bool simd_seen
= false;
10946 bool target_seen
= false;
10947 int declare_simd_cnt
= -1;
10948 auto_vec
<enum tree_code
, 16> codes
;
10949 for (struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
; ctx
;)
10951 if (((ctx
->region_type
& ORT_PARALLEL
) && ctx
->code
== OMP_PARALLEL
)
10952 || ((ctx
->region_type
& (ORT_TARGET
| ORT_IMPLICIT_TARGET
| ORT_ACC
))
10953 == ORT_TARGET
&& ctx
->code
== OMP_TARGET
)
10954 || ((ctx
->region_type
& ORT_TEAMS
) && ctx
->code
== OMP_TEAMS
)
10955 || (ctx
->region_type
== ORT_WORKSHARE
&& ctx
->code
== OMP_FOR
)
10956 || (ctx
->region_type
== ORT_SIMD
10957 && ctx
->code
== OMP_SIMD
10958 && !omp_find_clause (ctx
->clauses
, OMP_CLAUSE_BIND
)))
10962 codes
.safe_push (ctx
->code
);
10963 else if (matched
< nconstructs
&& ctx
->code
== constructs
[matched
])
10965 if (ctx
->code
== OMP_SIMD
)
10973 if (ctx
->code
== OMP_TARGET
)
10975 if (scores
== NULL
)
10976 return matched
< nconstructs
? 0 : simd_seen
? -1 : 1;
10977 target_seen
= true;
10981 else if (ctx
->region_type
== ORT_WORKSHARE
10982 && ctx
->code
== OMP_LOOP
10983 && ctx
->outer_context
10984 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
10985 && ctx
->outer_context
->outer_context
10986 && ctx
->outer_context
->outer_context
->code
== OMP_LOOP
10987 && ctx
->outer_context
->outer_context
->distribute
)
10988 ctx
= ctx
->outer_context
->outer_context
;
10989 ctx
= ctx
->outer_context
;
10992 && lookup_attribute ("omp declare simd",
10993 DECL_ATTRIBUTES (current_function_decl
)))
10995 /* Declare simd is a maybe case, it is supposed to be added only to the
10996 omp-simd-clone.c added clones and not to the base function. */
10997 declare_simd_cnt
= cnt
++;
10999 codes
.safe_push (OMP_SIMD
);
11001 && constructs
[0] == OMP_SIMD
)
11003 gcc_assert (matched
== 0);
11005 if (++matched
== nconstructs
)
11009 if (tree attr
= lookup_attribute ("omp declare variant variant",
11010 DECL_ATTRIBUTES (current_function_decl
)))
11012 enum tree_code variant_constructs
[5];
11013 int variant_nconstructs
= 0;
11015 variant_nconstructs
11016 = omp_constructor_traits_to_codes (TREE_VALUE (attr
),
11017 variant_constructs
);
11018 for (int i
= 0; i
< variant_nconstructs
; i
++)
11022 codes
.safe_push (variant_constructs
[i
]);
11023 else if (matched
< nconstructs
11024 && variant_constructs
[i
] == constructs
[matched
])
11026 if (variant_constructs
[i
] == OMP_SIMD
)
11037 && lookup_attribute ("omp declare target block",
11038 DECL_ATTRIBUTES (current_function_decl
)))
11041 codes
.safe_push (OMP_TARGET
);
11042 else if (matched
< nconstructs
&& constructs
[matched
] == OMP_TARGET
)
11047 for (int pass
= 0; pass
< (declare_simd_cnt
== -1 ? 1 : 2); pass
++)
11049 int j
= codes
.length () - 1;
11050 for (int i
= nconstructs
- 1; i
>= 0; i
--)
11053 && (pass
!= 0 || declare_simd_cnt
!= j
)
11054 && constructs
[i
] != codes
[j
])
11056 if (pass
== 0 && declare_simd_cnt
!= -1 && j
> declare_simd_cnt
)
11061 *scores
++ = ((pass
== 0 && declare_simd_cnt
!= -1)
11062 ? codes
.length () - 1 : codes
.length ());
11064 return declare_simd_cnt
== -1 ? 1 : 2;
11066 if (matched
== nconstructs
)
11067 return simd_seen
? -1 : 1;
11071 /* Gimplify OACC_CACHE. */
11074 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
11076 tree expr
= *expr_p
;
11078 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
11080 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
11083 /* TODO: Do something sensible with this information. */
11085 *expr_p
= NULL_TREE
;
11088 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
11089 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
11090 kind. The entry kind will replace the one in CLAUSE, while the exit
11091 kind will be used in a new omp_clause and returned to the caller. */
11094 gimplify_oacc_declare_1 (tree clause
)
11096 HOST_WIDE_INT kind
, new_op
;
11100 kind
= OMP_CLAUSE_MAP_KIND (clause
);
11104 case GOMP_MAP_ALLOC
:
11105 new_op
= GOMP_MAP_RELEASE
;
11109 case GOMP_MAP_FROM
:
11110 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
11111 new_op
= GOMP_MAP_FROM
;
11115 case GOMP_MAP_TOFROM
:
11116 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
11117 new_op
= GOMP_MAP_FROM
;
11121 case GOMP_MAP_DEVICE_RESIDENT
:
11122 case GOMP_MAP_FORCE_DEVICEPTR
:
11123 case GOMP_MAP_FORCE_PRESENT
:
11124 case GOMP_MAP_LINK
:
11125 case GOMP_MAP_POINTER
:
11130 gcc_unreachable ();
11136 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
11137 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
11138 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
11144 /* Gimplify OACC_DECLARE. */
11147 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
11149 tree expr
= *expr_p
;
11151 tree clauses
, t
, decl
;
11153 clauses
= OACC_DECLARE_CLAUSES (expr
);
11155 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
11156 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
11158 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
11160 decl
= OMP_CLAUSE_DECL (t
);
11162 if (TREE_CODE (decl
) == MEM_REF
)
11163 decl
= TREE_OPERAND (decl
, 0);
11165 if (VAR_P (decl
) && !is_oacc_declared (decl
))
11167 tree attr
= get_identifier ("oacc declare target");
11168 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
11169 DECL_ATTRIBUTES (decl
));
11173 && !is_global_var (decl
)
11174 && DECL_CONTEXT (decl
) == current_function_decl
)
11176 tree c
= gimplify_oacc_declare_1 (t
);
11179 if (oacc_declare_returns
== NULL
)
11180 oacc_declare_returns
= new hash_map
<tree
, tree
>;
11182 oacc_declare_returns
->put (decl
, c
);
11186 if (gimplify_omp_ctxp
)
11187 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
11190 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
11193 gimplify_seq_add_stmt (pre_p
, stmt
);
11195 *expr_p
= NULL_TREE
;
11198 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
11199 gimplification of the body, as well as scanning the body for used
11200 variables. We need to do this scan now, because variable-sized
11201 decls will be decomposed during gimplification. */
11204 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
11206 tree expr
= *expr_p
;
11208 gimple_seq body
= NULL
;
11210 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
11211 OMP_PARALLEL_COMBINED (expr
)
11212 ? ORT_COMBINED_PARALLEL
11213 : ORT_PARALLEL
, OMP_PARALLEL
);
11215 push_gimplify_context ();
11217 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
11218 if (gimple_code (g
) == GIMPLE_BIND
)
11219 pop_gimplify_context (g
);
11221 pop_gimplify_context (NULL
);
11223 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
11226 g
= gimple_build_omp_parallel (body
,
11227 OMP_PARALLEL_CLAUSES (expr
),
11228 NULL_TREE
, NULL_TREE
);
11229 if (OMP_PARALLEL_COMBINED (expr
))
11230 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
11231 gimplify_seq_add_stmt (pre_p
, g
);
11232 *expr_p
= NULL_TREE
;
11235 /* Gimplify the contents of an OMP_TASK statement. This involves
11236 gimplification of the body, as well as scanning the body for used
11237 variables. We need to do this scan now, because variable-sized
11238 decls will be decomposed during gimplification. */
11241 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
11243 tree expr
= *expr_p
;
11245 gimple_seq body
= NULL
;
11247 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
11248 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11249 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
11250 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
11252 error_at (OMP_CLAUSE_LOCATION (c
),
11253 "%<mutexinoutset%> kind in %<depend%> clause on a "
11254 "%<taskwait%> construct");
11258 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
11259 omp_find_clause (OMP_TASK_CLAUSES (expr
),
11261 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
11263 if (OMP_TASK_BODY (expr
))
11265 push_gimplify_context ();
11267 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
11268 if (gimple_code (g
) == GIMPLE_BIND
)
11269 pop_gimplify_context (g
);
11271 pop_gimplify_context (NULL
);
11274 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
11277 g
= gimple_build_omp_task (body
,
11278 OMP_TASK_CLAUSES (expr
),
11279 NULL_TREE
, NULL_TREE
,
11280 NULL_TREE
, NULL_TREE
, NULL_TREE
);
11281 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
11282 gimple_omp_task_set_taskwait_p (g
, true);
11283 gimplify_seq_add_stmt (pre_p
, g
);
11284 *expr_p
= NULL_TREE
;
11287 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
11288 force it into a temporary initialized in PRE_P and add firstprivate clause
11289 to ORIG_FOR_STMT. */
11292 gimplify_omp_taskloop_expr (tree type
, tree
*tp
, gimple_seq
*pre_p
,
11293 tree orig_for_stmt
)
11295 if (*tp
== NULL
|| is_gimple_constant (*tp
))
11298 *tp
= get_initialized_tmp_var (*tp
, pre_p
, NULL
, false);
11299 /* Reference to pointer conversion is considered useless,
11300 but is significant for firstprivate clause. Force it
11303 && TREE_CODE (type
) == POINTER_TYPE
11304 && TREE_CODE (TREE_TYPE (*tp
)) == REFERENCE_TYPE
)
11306 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
11307 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
, *tp
);
11308 gimplify_and_add (m
, pre_p
);
11312 tree c
= build_omp_clause (input_location
, OMP_CLAUSE_FIRSTPRIVATE
);
11313 OMP_CLAUSE_DECL (c
) = *tp
;
11314 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
11315 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
11318 /* Gimplify the gross structure of an OMP_FOR statement. */
11320 static enum gimplify_status
11321 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
11323 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
11324 enum gimplify_status ret
= GS_ALL_DONE
;
11325 enum gimplify_status tret
;
11327 gimple_seq for_body
, for_pre_body
;
11329 bitmap has_decl_expr
= NULL
;
11330 enum omp_region_type ort
= ORT_WORKSHARE
;
11331 bool openacc
= TREE_CODE (*expr_p
) == OACC_LOOP
;
11333 orig_for_stmt
= for_stmt
= *expr_p
;
11335 bool loop_p
= (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_BIND
)
11337 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
11339 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
11340 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
11341 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
11342 find_combined_omp_for
, data
, NULL
);
11343 if (inner_for_stmt
== NULL_TREE
)
11345 gcc_assert (seen_error ());
11346 *expr_p
= NULL_TREE
;
11349 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
11351 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
11352 &OMP_FOR_PRE_BODY (for_stmt
));
11353 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
11355 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
11357 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
11358 &OMP_FOR_PRE_BODY (for_stmt
));
11359 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
11364 /* We have some statements or variable declarations in between
11365 the composite construct directives. Move them around the
11368 for (i
= 0; i
< 3; i
++)
11372 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
11373 data
[i
+ 1] = data
[i
];
11374 *data
[i
] = OMP_BODY (t
);
11375 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
11376 NULL_TREE
, make_node (BLOCK
));
11377 OMP_BODY (t
) = body
;
11378 append_to_statement_list_force (inner_for_stmt
,
11379 &BIND_EXPR_BODY (body
));
11381 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
11382 gcc_assert (*data
[3] == inner_for_stmt
);
11387 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
11389 && OMP_FOR_ORIG_DECLS (inner_for_stmt
)
11390 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11392 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11395 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
11396 /* Class iterators aren't allowed on OMP_SIMD, so the only
11397 case we need to solve is distribute parallel for. They are
11398 allowed on the loop construct, but that is already handled
11399 in gimplify_omp_loop. */
11400 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
11401 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
11403 tree orig_decl
= TREE_PURPOSE (orig
);
11404 tree last
= TREE_VALUE (orig
);
11406 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
11407 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
11408 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
11409 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
11410 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
11412 if (*pc
== NULL_TREE
)
11415 for (spc
= &OMP_PARALLEL_CLAUSES (*data
[1]);
11416 *spc
; spc
= &OMP_CLAUSE_CHAIN (*spc
))
11417 if (OMP_CLAUSE_CODE (*spc
) == OMP_CLAUSE_PRIVATE
11418 && OMP_CLAUSE_DECL (*spc
) == orig_decl
)
11423 *spc
= OMP_CLAUSE_CHAIN (c
);
11424 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
11428 if (*pc
== NULL_TREE
)
11430 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
11432 /* private clause will appear only on inner_for_stmt.
11433 Change it into firstprivate, and add private clause
11435 tree c
= copy_node (*pc
);
11436 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
11437 OMP_FOR_CLAUSES (for_stmt
) = c
;
11438 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
11439 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
11443 /* lastprivate clause will appear on both inner_for_stmt
11444 and for_stmt. Add firstprivate clause to
11446 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
11447 OMP_CLAUSE_FIRSTPRIVATE
);
11448 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
11449 OMP_CLAUSE_CHAIN (c
) = *pc
;
11451 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
11453 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
11454 OMP_CLAUSE_FIRSTPRIVATE
);
11455 OMP_CLAUSE_DECL (c
) = last
;
11456 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11457 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11458 c
= build_omp_clause (UNKNOWN_LOCATION
,
11459 *pc
? OMP_CLAUSE_SHARED
11460 : OMP_CLAUSE_FIRSTPRIVATE
);
11461 OMP_CLAUSE_DECL (c
) = orig_decl
;
11462 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11463 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11465 /* Similarly, take care of C++ range for temporaries, those should
11466 be firstprivate on OMP_PARALLEL if any. */
11468 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
11469 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
11470 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11472 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11476 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
11477 tree v
= TREE_CHAIN (orig
);
11478 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
11479 OMP_CLAUSE_FIRSTPRIVATE
);
11480 /* First add firstprivate clause for the __for_end artificial
11482 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
11483 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
11485 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
11486 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11487 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11488 if (TREE_VEC_ELT (v
, 0))
11490 /* And now the same for __for_range artificial decl if it
11492 c
= build_omp_clause (UNKNOWN_LOCATION
,
11493 OMP_CLAUSE_FIRSTPRIVATE
);
11494 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
11495 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
11497 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
11498 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11499 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11504 switch (TREE_CODE (for_stmt
))
11507 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
11509 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
11510 OMP_CLAUSE_SCHEDULE
))
11511 error_at (EXPR_LOCATION (for_stmt
),
11512 "%qs clause may not appear on non-rectangular %qs",
11513 "schedule", "for");
11514 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
))
11515 error_at (EXPR_LOCATION (for_stmt
),
11516 "%qs clause may not appear on non-rectangular %qs",
11520 case OMP_DISTRIBUTE
:
11521 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
)
11522 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
11523 OMP_CLAUSE_DIST_SCHEDULE
))
11524 error_at (EXPR_LOCATION (for_stmt
),
11525 "%qs clause may not appear on non-rectangular %qs",
11526 "dist_schedule", "distribute");
11532 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
11533 ort
= ORT_UNTIED_TASKLOOP
;
11535 ort
= ORT_TASKLOOP
;
11541 gcc_unreachable ();
11544 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11545 clause for the IV. */
11546 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
11548 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
11549 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
11550 decl
= TREE_OPERAND (t
, 0);
11551 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11552 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11553 && OMP_CLAUSE_DECL (c
) == decl
)
11555 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
11560 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
11561 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
11562 loop_p
&& TREE_CODE (for_stmt
) != OMP_SIMD
11563 ? OMP_LOOP
: TREE_CODE (for_stmt
));
11565 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
11566 gimplify_omp_ctxp
->distribute
= true;
11568 /* Handle OMP_FOR_INIT. */
11569 for_pre_body
= NULL
;
11570 if ((ort
== ORT_SIMD
11571 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
11572 && OMP_FOR_PRE_BODY (for_stmt
))
11574 has_decl_expr
= BITMAP_ALLOC (NULL
);
11575 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
11576 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
11579 t
= OMP_FOR_PRE_BODY (for_stmt
);
11580 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
11582 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
11584 tree_stmt_iterator si
;
11585 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
11589 if (TREE_CODE (t
) == DECL_EXPR
11590 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
11591 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
11595 if (OMP_FOR_PRE_BODY (for_stmt
))
11597 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
11598 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
11601 struct gimplify_omp_ctx ctx
;
11602 memset (&ctx
, 0, sizeof (ctx
));
11603 ctx
.region_type
= ORT_NONE
;
11604 gimplify_omp_ctxp
= &ctx
;
11605 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
11606 gimplify_omp_ctxp
= NULL
;
11609 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
11611 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
11612 for_stmt
= inner_for_stmt
;
11614 /* For taskloop, need to gimplify the start, end and step before the
11615 taskloop, outside of the taskloop omp context. */
11616 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
11618 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11620 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11621 gimple_seq
*for_pre_p
= (gimple_seq_empty_p (for_pre_body
)
11622 ? pre_p
: &for_pre_body
);
11623 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
11624 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
11626 tree v
= TREE_OPERAND (t
, 1);
11627 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
11628 for_pre_p
, orig_for_stmt
);
11629 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
11630 for_pre_p
, orig_for_stmt
);
11633 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
11636 /* Handle OMP_FOR_COND. */
11637 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
11638 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
11640 tree v
= TREE_OPERAND (t
, 1);
11641 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
11642 for_pre_p
, orig_for_stmt
);
11643 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
11644 for_pre_p
, orig_for_stmt
);
11647 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
11650 /* Handle OMP_FOR_INCR. */
11651 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11652 if (TREE_CODE (t
) == MODIFY_EXPR
)
11654 decl
= TREE_OPERAND (t
, 0);
11655 t
= TREE_OPERAND (t
, 1);
11656 tree
*tp
= &TREE_OPERAND (t
, 1);
11657 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
11658 tp
= &TREE_OPERAND (t
, 0);
11660 gimplify_omp_taskloop_expr (NULL_TREE
, tp
, for_pre_p
,
11665 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
11669 if (orig_for_stmt
!= for_stmt
)
11670 gimplify_omp_ctxp
->combined_loop
= true;
11673 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
11674 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
11675 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
11676 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
11678 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
11679 bool is_doacross
= false;
11680 if (c
&& OMP_CLAUSE_ORDERED_EXPR (c
))
11682 is_doacross
= true;
11683 gimplify_omp_ctxp
->loop_iter_var
.create (TREE_VEC_LENGTH
11684 (OMP_FOR_INIT (for_stmt
))
11687 int collapse
= 1, tile
= 0;
11688 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
11690 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
11691 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
11693 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
11694 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ALLOCATE
);
11695 hash_set
<tree
> *allocate_uids
= NULL
;
11698 allocate_uids
= new hash_set
<tree
>;
11699 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
11700 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
)
11701 allocate_uids
->add (OMP_CLAUSE_DECL (c
));
11703 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11705 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11706 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
11707 decl
= TREE_OPERAND (t
, 0);
11708 gcc_assert (DECL_P (decl
));
11709 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
11710 || POINTER_TYPE_P (TREE_TYPE (decl
)));
11713 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
11715 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
11716 if (TREE_CODE (orig_decl
) == TREE_LIST
)
11718 orig_decl
= TREE_PURPOSE (orig_decl
);
11722 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
11725 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
11726 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
11729 /* Make sure the iteration variable is private. */
11730 tree c
= NULL_TREE
;
11731 tree c2
= NULL_TREE
;
11732 if (orig_for_stmt
!= for_stmt
)
11734 /* Preserve this information until we gimplify the inner simd. */
11736 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
11737 TREE_PRIVATE (t
) = 1;
11739 else if (ort
== ORT_SIMD
)
11741 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
11742 (splay_tree_key
) decl
);
11743 omp_is_private (gimplify_omp_ctxp
, decl
,
11744 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
11746 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
11748 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
11749 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
11750 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
11751 OMP_CLAUSE_LASTPRIVATE
);
11752 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
11753 OMP_CLAUSE_LASTPRIVATE
))
11754 if (OMP_CLAUSE_DECL (c3
) == decl
)
11756 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
11757 "conditional %<lastprivate%> on loop "
11758 "iterator %qD ignored", decl
);
11759 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
11760 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
11763 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1 && !loop_p
)
11765 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
11766 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
11767 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
11769 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
11770 || TREE_PRIVATE (t
))
11772 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
11773 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11775 struct gimplify_omp_ctx
*outer
11776 = gimplify_omp_ctxp
->outer_context
;
11777 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11779 if (outer
->region_type
== ORT_WORKSHARE
11780 && outer
->combined_loop
)
11782 n
= splay_tree_lookup (outer
->variables
,
11783 (splay_tree_key
)decl
);
11784 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11786 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
11787 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11791 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
11793 && octx
->region_type
== ORT_COMBINED_PARALLEL
11794 && octx
->outer_context
11795 && (octx
->outer_context
->region_type
11797 && octx
->outer_context
->combined_loop
)
11799 octx
= octx
->outer_context
;
11800 n
= splay_tree_lookup (octx
->variables
,
11801 (splay_tree_key
)decl
);
11802 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11804 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
11805 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11812 OMP_CLAUSE_DECL (c
) = decl
;
11813 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
11814 OMP_FOR_CLAUSES (for_stmt
) = c
;
11815 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
11816 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11818 if (outer
->region_type
== ORT_WORKSHARE
11819 && outer
->combined_loop
)
11821 if (outer
->outer_context
11822 && (outer
->outer_context
->region_type
11823 == ORT_COMBINED_PARALLEL
))
11824 outer
= outer
->outer_context
;
11825 else if (omp_check_private (outer
, decl
, false))
11828 else if (((outer
->region_type
& ORT_TASKLOOP
)
11830 && outer
->combined_loop
11831 && !omp_check_private (gimplify_omp_ctxp
,
11834 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
11836 omp_notice_variable (outer
, decl
, true);
11841 n
= splay_tree_lookup (outer
->variables
,
11842 (splay_tree_key
)decl
);
11843 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11845 omp_add_variable (outer
, decl
,
11846 GOVD_LASTPRIVATE
| GOVD_SEEN
);
11847 if (outer
->region_type
== ORT_COMBINED_PARALLEL
11848 && outer
->outer_context
11849 && (outer
->outer_context
->region_type
11851 && outer
->outer_context
->combined_loop
)
11853 outer
= outer
->outer_context
;
11854 n
= splay_tree_lookup (outer
->variables
,
11855 (splay_tree_key
)decl
);
11856 if (omp_check_private (outer
, decl
, false))
11859 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
11861 omp_add_variable (outer
, decl
,
11867 if (outer
&& outer
->outer_context
11868 && ((outer
->outer_context
->region_type
11869 & ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
11870 || (((outer
->region_type
& ORT_TASKLOOP
)
11872 && (outer
->outer_context
->region_type
11873 == ORT_COMBINED_PARALLEL
))))
11875 outer
= outer
->outer_context
;
11876 n
= splay_tree_lookup (outer
->variables
,
11877 (splay_tree_key
)decl
);
11879 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11880 omp_add_variable (outer
, decl
,
11881 GOVD_SHARED
| GOVD_SEEN
);
11885 if (outer
&& outer
->outer_context
)
11886 omp_notice_variable (outer
->outer_context
, decl
,
11896 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
11897 if (TREE_PRIVATE (t
))
11898 lastprivate
= false;
11899 if (loop_p
&& OMP_FOR_ORIG_DECLS (for_stmt
))
11901 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
11902 if (TREE_CODE (elt
) == TREE_LIST
&& TREE_PURPOSE (elt
))
11903 lastprivate
= false;
11906 struct gimplify_omp_ctx
*outer
11907 = gimplify_omp_ctxp
->outer_context
;
11908 if (outer
&& lastprivate
)
11910 if (outer
->region_type
== ORT_WORKSHARE
11911 && outer
->combined_loop
)
11913 n
= splay_tree_lookup (outer
->variables
,
11914 (splay_tree_key
)decl
);
11915 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11917 lastprivate
= false;
11920 else if (outer
->outer_context
11921 && (outer
->outer_context
->region_type
11922 == ORT_COMBINED_PARALLEL
))
11923 outer
= outer
->outer_context
;
11924 else if (omp_check_private (outer
, decl
, false))
11927 else if (((outer
->region_type
& ORT_TASKLOOP
)
11929 && outer
->combined_loop
11930 && !omp_check_private (gimplify_omp_ctxp
,
11933 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
11935 omp_notice_variable (outer
, decl
, true);
11940 n
= splay_tree_lookup (outer
->variables
,
11941 (splay_tree_key
)decl
);
11942 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11944 omp_add_variable (outer
, decl
,
11945 GOVD_LASTPRIVATE
| GOVD_SEEN
);
11946 if (outer
->region_type
== ORT_COMBINED_PARALLEL
11947 && outer
->outer_context
11948 && (outer
->outer_context
->region_type
11950 && outer
->outer_context
->combined_loop
)
11952 outer
= outer
->outer_context
;
11953 n
= splay_tree_lookup (outer
->variables
,
11954 (splay_tree_key
)decl
);
11955 if (omp_check_private (outer
, decl
, false))
11958 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
11960 omp_add_variable (outer
, decl
,
11966 if (outer
&& outer
->outer_context
11967 && ((outer
->outer_context
->region_type
11968 & ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
11969 || (((outer
->region_type
& ORT_TASKLOOP
)
11971 && (outer
->outer_context
->region_type
11972 == ORT_COMBINED_PARALLEL
))))
11974 outer
= outer
->outer_context
;
11975 n
= splay_tree_lookup (outer
->variables
,
11976 (splay_tree_key
)decl
);
11978 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11979 omp_add_variable (outer
, decl
,
11980 GOVD_SHARED
| GOVD_SEEN
);
11984 if (outer
&& outer
->outer_context
)
11985 omp_notice_variable (outer
->outer_context
, decl
,
11991 c
= build_omp_clause (input_location
,
11992 lastprivate
? OMP_CLAUSE_LASTPRIVATE
11993 : OMP_CLAUSE_PRIVATE
);
11994 OMP_CLAUSE_DECL (c
) = decl
;
11995 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
11996 OMP_FOR_CLAUSES (for_stmt
) = c
;
11997 omp_add_variable (gimplify_omp_ctxp
, decl
,
11998 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
11999 | GOVD_EXPLICIT
| GOVD_SEEN
);
12003 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
12005 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
12006 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
12007 (splay_tree_key
) decl
);
12008 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
12009 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12010 OMP_CLAUSE_LASTPRIVATE
);
12011 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
12012 OMP_CLAUSE_LASTPRIVATE
))
12013 if (OMP_CLAUSE_DECL (c3
) == decl
)
12015 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
12016 "conditional %<lastprivate%> on loop "
12017 "iterator %qD ignored", decl
);
12018 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
12019 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
12023 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
12025 /* If DECL is not a gimple register, create a temporary variable to act
12026 as an iteration counter. This is valid, since DECL cannot be
12027 modified in the body of the loop. Similarly for any iteration vars
12028 in simd with collapse > 1 where the iterator vars must be
12029 lastprivate. And similarly for vars mentioned in allocate clauses. */
12030 if (orig_for_stmt
!= for_stmt
)
12032 else if (!is_gimple_reg (decl
)
12033 || (ort
== ORT_SIMD
12034 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1)
12035 || (allocate_uids
&& allocate_uids
->contains (decl
)))
12037 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12038 /* Make sure omp_add_variable is not called on it prematurely.
12039 We call it ourselves a few lines later. */
12040 gimplify_omp_ctxp
= NULL
;
12041 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
12042 gimplify_omp_ctxp
= ctx
;
12043 TREE_OPERAND (t
, 0) = var
;
12045 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
12047 if (ort
== ORT_SIMD
12048 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
12050 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
12051 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
12052 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
12053 OMP_CLAUSE_DECL (c2
) = var
;
12054 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
12055 OMP_FOR_CLAUSES (for_stmt
) = c2
;
12056 omp_add_variable (gimplify_omp_ctxp
, var
,
12057 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
12058 if (c
== NULL_TREE
)
12065 omp_add_variable (gimplify_omp_ctxp
, var
,
12066 GOVD_PRIVATE
| GOVD_SEEN
);
12071 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12073 tree lb
= TREE_OPERAND (t
, 1);
12074 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 1), &for_pre_body
, NULL
,
12075 is_gimple_val
, fb_rvalue
, false);
12076 ret
= MIN (ret
, tret
);
12077 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 2), &for_pre_body
, NULL
,
12078 is_gimple_val
, fb_rvalue
, false);
12081 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12082 is_gimple_val
, fb_rvalue
, false);
12083 ret
= MIN (ret
, tret
);
12084 if (ret
== GS_ERROR
)
12087 /* Handle OMP_FOR_COND. */
12088 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
12089 gcc_assert (COMPARISON_CLASS_P (t
));
12090 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
12092 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12094 tree ub
= TREE_OPERAND (t
, 1);
12095 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 1), &for_pre_body
, NULL
,
12096 is_gimple_val
, fb_rvalue
, false);
12097 ret
= MIN (ret
, tret
);
12098 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 2), &for_pre_body
, NULL
,
12099 is_gimple_val
, fb_rvalue
, false);
12102 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12103 is_gimple_val
, fb_rvalue
, false);
12104 ret
= MIN (ret
, tret
);
12106 /* Handle OMP_FOR_INCR. */
12107 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12108 switch (TREE_CODE (t
))
12110 case PREINCREMENT_EXPR
:
12111 case POSTINCREMENT_EXPR
:
12113 tree decl
= TREE_OPERAND (t
, 0);
12114 /* c_omp_for_incr_canonicalize_ptr() should have been
12115 called to massage things appropriately. */
12116 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
12118 if (orig_for_stmt
!= for_stmt
)
12120 t
= build_int_cst (TREE_TYPE (decl
), 1);
12122 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
12123 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
12124 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
12125 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
12129 case PREDECREMENT_EXPR
:
12130 case POSTDECREMENT_EXPR
:
12131 /* c_omp_for_incr_canonicalize_ptr() should have been
12132 called to massage things appropriately. */
12133 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
12134 if (orig_for_stmt
!= for_stmt
)
12136 t
= build_int_cst (TREE_TYPE (decl
), -1);
12138 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
12139 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
12140 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
12141 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
12145 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
12146 TREE_OPERAND (t
, 0) = var
;
12148 t
= TREE_OPERAND (t
, 1);
12149 switch (TREE_CODE (t
))
12152 if (TREE_OPERAND (t
, 1) == decl
)
12154 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
12155 TREE_OPERAND (t
, 0) = var
;
12161 case POINTER_PLUS_EXPR
:
12162 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
12163 TREE_OPERAND (t
, 0) = var
;
12166 gcc_unreachable ();
12169 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12170 is_gimple_val
, fb_rvalue
, false);
12171 ret
= MIN (ret
, tret
);
12174 tree step
= TREE_OPERAND (t
, 1);
12175 tree stept
= TREE_TYPE (decl
);
12176 if (POINTER_TYPE_P (stept
))
12178 step
= fold_convert (stept
, step
);
12179 if (TREE_CODE (t
) == MINUS_EXPR
)
12180 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
12181 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
12182 if (step
!= TREE_OPERAND (t
, 1))
12184 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
12185 &for_pre_body
, NULL
,
12186 is_gimple_val
, fb_rvalue
, false);
12187 ret
= MIN (ret
, tret
);
12193 gcc_unreachable ();
12199 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
12202 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
12204 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12205 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12206 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
12207 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
12208 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
12209 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
12210 && OMP_CLAUSE_DECL (c
) == decl
)
12212 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
12216 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12217 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12218 gcc_assert (TREE_OPERAND (t
, 0) == var
);
12219 t
= TREE_OPERAND (t
, 1);
12220 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
12221 || TREE_CODE (t
) == MINUS_EXPR
12222 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
12223 gcc_assert (TREE_OPERAND (t
, 0) == var
);
12224 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
12225 is_doacross
? var
: decl
,
12226 TREE_OPERAND (t
, 1));
12229 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
12230 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
12232 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
12233 push_gimplify_context ();
12234 gimplify_assign (decl
, t
, seq
);
12235 gimple
*bind
= NULL
;
12236 if (gimplify_ctxp
->temps
)
12238 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
12240 gimplify_seq_add_stmt (seq
, bind
);
12242 pop_gimplify_context (bind
);
12245 if (OMP_FOR_NON_RECTANGULAR (for_stmt
) && var
!= decl
)
12246 for (int j
= i
+ 1; j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
12248 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
12249 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12250 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12251 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12252 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12253 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
12254 gcc_assert (COMPARISON_CLASS_P (t
));
12255 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12256 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12257 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12261 BITMAP_FREE (has_decl_expr
);
12262 delete allocate_uids
;
12264 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
12265 || (loop_p
&& orig_for_stmt
== for_stmt
))
12267 push_gimplify_context ();
12268 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
12270 OMP_FOR_BODY (orig_for_stmt
)
12271 = build3 (BIND_EXPR
, void_type_node
, NULL
,
12272 OMP_FOR_BODY (orig_for_stmt
), NULL
);
12273 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
12277 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
12280 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
12281 || (loop_p
&& orig_for_stmt
== for_stmt
))
12283 if (gimple_code (g
) == GIMPLE_BIND
)
12284 pop_gimplify_context (g
);
12286 pop_gimplify_context (NULL
);
12289 if (orig_for_stmt
!= for_stmt
)
12290 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12292 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12293 decl
= TREE_OPERAND (t
, 0);
12294 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12295 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
12296 gimplify_omp_ctxp
= ctx
->outer_context
;
12297 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
12298 gimplify_omp_ctxp
= ctx
;
12299 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
12300 TREE_OPERAND (t
, 0) = var
;
12301 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12302 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
12303 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
12304 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
12305 for (int j
= i
+ 1;
12306 j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
12308 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
12309 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12310 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12311 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12313 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
12314 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12316 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
12317 gcc_assert (COMPARISON_CLASS_P (t
));
12318 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12319 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12321 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
12322 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12327 gimplify_adjust_omp_clauses (pre_p
, for_body
,
12328 &OMP_FOR_CLAUSES (orig_for_stmt
),
12329 TREE_CODE (orig_for_stmt
));
12332 switch (TREE_CODE (orig_for_stmt
))
12334 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
12335 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
12336 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
12337 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
12338 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
12340 gcc_unreachable ();
12342 if (loop_p
&& kind
== GF_OMP_FOR_KIND_SIMD
)
12344 gimplify_seq_add_seq (pre_p
, for_pre_body
);
12345 for_pre_body
= NULL
;
12347 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
12348 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
12350 if (orig_for_stmt
!= for_stmt
)
12351 gimple_omp_for_set_combined_p (gfor
, true);
12352 if (gimplify_omp_ctxp
12353 && (gimplify_omp_ctxp
->combined_loop
12354 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
12355 && gimplify_omp_ctxp
->outer_context
12356 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
12358 gimple_omp_for_set_combined_into_p (gfor
, true);
12359 if (gimplify_omp_ctxp
->combined_loop
)
12360 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
12362 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
12365 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12367 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12368 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
12369 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
12370 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
12371 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
12372 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
12373 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12374 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
12377 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12378 constructs with GIMPLE_OMP_TASK sandwiched in between them.
12379 The outer taskloop stands for computing the number of iterations,
12380 counts for collapsed loops and holding taskloop specific clauses.
12381 The task construct stands for the effect of data sharing on the
12382 explicit task it creates and the inner taskloop stands for expansion
12383 of the static loop inside of the explicit task construct. */
12384 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
12386 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
12387 tree task_clauses
= NULL_TREE
;
12388 tree c
= *gfor_clauses_ptr
;
12389 tree
*gtask_clauses_ptr
= &task_clauses
;
12390 tree outer_for_clauses
= NULL_TREE
;
12391 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
12392 bitmap lastprivate_uids
= NULL
;
12393 if (omp_find_clause (c
, OMP_CLAUSE_ALLOCATE
))
12395 c
= omp_find_clause (c
, OMP_CLAUSE_LASTPRIVATE
);
12398 lastprivate_uids
= BITMAP_ALLOC (NULL
);
12399 for (; c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
12400 OMP_CLAUSE_LASTPRIVATE
))
12401 bitmap_set_bit (lastprivate_uids
,
12402 DECL_UID (OMP_CLAUSE_DECL (c
)));
12404 c
= *gfor_clauses_ptr
;
12406 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
12407 switch (OMP_CLAUSE_CODE (c
))
12409 /* These clauses are allowed on task, move them there. */
12410 case OMP_CLAUSE_SHARED
:
12411 case OMP_CLAUSE_FIRSTPRIVATE
:
12412 case OMP_CLAUSE_DEFAULT
:
12413 case OMP_CLAUSE_IF
:
12414 case OMP_CLAUSE_UNTIED
:
12415 case OMP_CLAUSE_FINAL
:
12416 case OMP_CLAUSE_MERGEABLE
:
12417 case OMP_CLAUSE_PRIORITY
:
12418 case OMP_CLAUSE_REDUCTION
:
12419 case OMP_CLAUSE_IN_REDUCTION
:
12420 *gtask_clauses_ptr
= c
;
12421 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12423 case OMP_CLAUSE_PRIVATE
:
12424 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
12426 /* We want private on outer for and firstprivate
12429 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12430 OMP_CLAUSE_FIRSTPRIVATE
);
12431 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12432 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
12434 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12435 *gforo_clauses_ptr
= c
;
12436 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12440 *gtask_clauses_ptr
= c
;
12441 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12444 /* These clauses go into outer taskloop clauses. */
12445 case OMP_CLAUSE_GRAINSIZE
:
12446 case OMP_CLAUSE_NUM_TASKS
:
12447 case OMP_CLAUSE_NOGROUP
:
12448 *gforo_clauses_ptr
= c
;
12449 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12451 /* Collapse clause we duplicate on both taskloops. */
12452 case OMP_CLAUSE_COLLAPSE
:
12453 *gfor_clauses_ptr
= c
;
12454 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12455 *gforo_clauses_ptr
= copy_node (c
);
12456 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
12458 /* For lastprivate, keep the clause on inner taskloop, and add
12459 a shared clause on task. If the same decl is also firstprivate,
12460 add also firstprivate clause on the inner taskloop. */
12461 case OMP_CLAUSE_LASTPRIVATE
:
12462 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
12464 /* For taskloop C++ lastprivate IVs, we want:
12465 1) private on outer taskloop
12466 2) firstprivate and shared on task
12467 3) lastprivate on inner taskloop */
12469 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12470 OMP_CLAUSE_FIRSTPRIVATE
);
12471 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12472 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
12474 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12475 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
12476 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12477 OMP_CLAUSE_PRIVATE
);
12478 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12479 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
12480 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
12481 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
12483 *gfor_clauses_ptr
= c
;
12484 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12486 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
12487 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12488 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
12489 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
12491 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12493 /* Allocate clause we duplicate on task and inner taskloop
12494 if the decl is lastprivate, otherwise just put on task. */
12495 case OMP_CLAUSE_ALLOCATE
:
12496 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
12497 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
12499 /* Additionally, put firstprivate clause on task
12500 for the allocator if it is not constant. */
12502 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12503 OMP_CLAUSE_FIRSTPRIVATE
);
12504 OMP_CLAUSE_DECL (*gtask_clauses_ptr
)
12505 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
12506 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12508 if (lastprivate_uids
12509 && bitmap_bit_p (lastprivate_uids
,
12510 DECL_UID (OMP_CLAUSE_DECL (c
))))
12512 *gfor_clauses_ptr
= c
;
12513 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12514 *gtask_clauses_ptr
= copy_node (c
);
12515 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12519 *gtask_clauses_ptr
= c
;
12520 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12524 gcc_unreachable ();
12526 *gfor_clauses_ptr
= NULL_TREE
;
12527 *gtask_clauses_ptr
= NULL_TREE
;
12528 *gforo_clauses_ptr
= NULL_TREE
;
12529 BITMAP_FREE (lastprivate_uids
);
12530 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
12531 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
12532 NULL_TREE
, NULL_TREE
, NULL_TREE
);
12533 gimple_omp_task_set_taskloop_p (g
, true);
12534 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
12536 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
12537 gimple_omp_for_collapse (gfor
),
12538 gimple_omp_for_pre_body (gfor
));
12539 gimple_omp_for_set_pre_body (gfor
, NULL
);
12540 gimple_omp_for_set_combined_p (gforo
, true);
12541 gimple_omp_for_set_combined_into_p (gfor
, true);
12542 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
12544 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
12545 tree v
= create_tmp_var (type
);
12546 gimple_omp_for_set_index (gforo
, i
, v
);
12547 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
12548 gimple_omp_for_set_initial (gforo
, i
, t
);
12549 gimple_omp_for_set_cond (gforo
, i
,
12550 gimple_omp_for_cond (gfor
, i
));
12551 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
12552 gimple_omp_for_set_final (gforo
, i
, t
);
12553 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
12554 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
12555 TREE_OPERAND (t
, 0) = v
;
12556 gimple_omp_for_set_incr (gforo
, i
, t
);
12557 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
12558 OMP_CLAUSE_DECL (t
) = v
;
12559 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
12560 gimple_omp_for_set_clauses (gforo
, t
);
12561 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
12563 tree
*p1
= NULL
, *p2
= NULL
;
12564 t
= gimple_omp_for_initial (gforo
, i
);
12565 if (TREE_CODE (t
) == TREE_VEC
)
12566 p1
= &TREE_VEC_ELT (t
, 0);
12567 t
= gimple_omp_for_final (gforo
, i
);
12568 if (TREE_CODE (t
) == TREE_VEC
)
12571 p2
= &TREE_VEC_ELT (t
, 0);
12573 p1
= &TREE_VEC_ELT (t
, 0);
12578 for (j
= 0; j
< i
; j
++)
12579 if (*p1
== gimple_omp_for_index (gfor
, j
))
12581 *p1
= gimple_omp_for_index (gforo
, j
);
12586 gcc_assert (j
< i
);
12590 gimplify_seq_add_stmt (pre_p
, gforo
);
12593 gimplify_seq_add_stmt (pre_p
, gfor
);
12595 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
12597 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12598 unsigned lastprivate_conditional
= 0;
12600 && (ctx
->region_type
== ORT_TARGET_DATA
12601 || ctx
->region_type
== ORT_TASKGROUP
))
12602 ctx
= ctx
->outer_context
;
12603 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
12604 for (tree c
= gimple_omp_for_clauses (gfor
);
12605 c
; c
= OMP_CLAUSE_CHAIN (c
))
12606 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12607 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12608 ++lastprivate_conditional
;
12609 if (lastprivate_conditional
)
12611 struct omp_for_data fd
;
12612 omp_extract_for_data (gfor
, &fd
, NULL
);
12613 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
12614 lastprivate_conditional
);
12615 tree var
= create_tmp_var_raw (type
);
12616 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
12617 OMP_CLAUSE_DECL (c
) = var
;
12618 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
12619 gimple_omp_for_set_clauses (gfor
, c
);
12620 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
12623 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
12625 unsigned lastprivate_conditional
= 0;
12626 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12627 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12628 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12629 ++lastprivate_conditional
;
12630 if (lastprivate_conditional
)
12632 struct omp_for_data fd
;
12633 omp_extract_for_data (gfor
, &fd
, NULL
);
12634 tree type
= unsigned_type_for (fd
.iter_type
);
12635 while (lastprivate_conditional
--)
12637 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
12638 OMP_CLAUSE__CONDTEMP_
);
12639 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
12640 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
12641 gimple_omp_for_set_clauses (gfor
, c
);
12646 if (ret
!= GS_ALL_DONE
)
12648 *expr_p
= NULL_TREE
;
12649 return GS_ALL_DONE
;
12652 /* Helper for gimplify_omp_loop, called through walk_tree. */
12655 replace_reduction_placeholders (tree
*tp
, int *walk_subtrees
, void *data
)
12659 tree
*d
= (tree
*) data
;
12660 if (*tp
== OMP_CLAUSE_REDUCTION_PLACEHOLDER (d
[0]))
12662 *tp
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (d
[1]);
12663 *walk_subtrees
= 0;
12665 else if (*tp
== OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d
[0]))
12667 *tp
= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d
[1]);
12668 *walk_subtrees
= 0;
12674 /* Gimplify the gross structure of an OMP_LOOP statement. */
12676 static enum gimplify_status
12677 gimplify_omp_loop (tree
*expr_p
, gimple_seq
*pre_p
)
12679 tree for_stmt
= *expr_p
;
12680 tree clauses
= OMP_FOR_CLAUSES (for_stmt
);
12681 struct gimplify_omp_ctx
*octx
= gimplify_omp_ctxp
;
12682 enum omp_clause_bind_kind kind
= OMP_CLAUSE_BIND_THREAD
;
12685 /* If order is not present, the behavior is as if order(concurrent)
12687 tree order
= omp_find_clause (clauses
, OMP_CLAUSE_ORDER
);
12688 if (order
== NULL_TREE
)
12690 order
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_ORDER
);
12691 OMP_CLAUSE_CHAIN (order
) = clauses
;
12692 OMP_FOR_CLAUSES (for_stmt
) = clauses
= order
;
12695 tree bind
= omp_find_clause (clauses
, OMP_CLAUSE_BIND
);
12696 if (bind
== NULL_TREE
)
12698 if (!flag_openmp
) /* flag_openmp_simd */
12700 else if (octx
&& (octx
->region_type
& ORT_TEAMS
) != 0)
12701 kind
= OMP_CLAUSE_BIND_TEAMS
;
12702 else if (octx
&& (octx
->region_type
& ORT_PARALLEL
) != 0)
12703 kind
= OMP_CLAUSE_BIND_PARALLEL
;
12706 for (; octx
; octx
= octx
->outer_context
)
12708 if ((octx
->region_type
& ORT_ACC
) != 0
12709 || octx
->region_type
== ORT_NONE
12710 || octx
->region_type
== ORT_IMPLICIT_TARGET
)
12714 if (octx
== NULL
&& !in_omp_construct
)
12715 error_at (EXPR_LOCATION (for_stmt
),
12716 "%<bind%> clause not specified on a %<loop%> "
12717 "construct not nested inside another OpenMP construct");
12719 bind
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_BIND
);
12720 OMP_CLAUSE_CHAIN (bind
) = clauses
;
12721 OMP_CLAUSE_BIND_KIND (bind
) = kind
;
12722 OMP_FOR_CLAUSES (for_stmt
) = bind
;
12725 switch (OMP_CLAUSE_BIND_KIND (bind
))
12727 case OMP_CLAUSE_BIND_THREAD
:
12729 case OMP_CLAUSE_BIND_PARALLEL
:
12730 if (!flag_openmp
) /* flag_openmp_simd */
12732 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12735 for (; octx
; octx
= octx
->outer_context
)
12736 if (octx
->region_type
== ORT_SIMD
12737 && omp_find_clause (octx
->clauses
, OMP_CLAUSE_BIND
) == NULL_TREE
)
12739 error_at (EXPR_LOCATION (for_stmt
),
12740 "%<bind(parallel)%> on a %<loop%> construct nested "
12741 "inside %<simd%> construct");
12742 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12745 kind
= OMP_CLAUSE_BIND_PARALLEL
;
12747 case OMP_CLAUSE_BIND_TEAMS
:
12748 if (!flag_openmp
) /* flag_openmp_simd */
12750 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12754 && octx
->region_type
!= ORT_IMPLICIT_TARGET
12755 && octx
->region_type
!= ORT_NONE
12756 && (octx
->region_type
& ORT_TEAMS
) == 0)
12757 || in_omp_construct
)
12759 error_at (EXPR_LOCATION (for_stmt
),
12760 "%<bind(teams)%> on a %<loop%> region not strictly "
12761 "nested inside of a %<teams%> region");
12762 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12765 kind
= OMP_CLAUSE_BIND_TEAMS
;
12768 gcc_unreachable ();
12771 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
12772 switch (OMP_CLAUSE_CODE (*pc
))
12774 case OMP_CLAUSE_REDUCTION
:
12775 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc
))
12777 error_at (OMP_CLAUSE_LOCATION (*pc
),
12778 "%<inscan%> %<reduction%> clause on "
12779 "%qs construct", "loop");
12780 OMP_CLAUSE_REDUCTION_INSCAN (*pc
) = 0;
12782 if (OMP_CLAUSE_REDUCTION_TASK (*pc
))
12784 error_at (OMP_CLAUSE_LOCATION (*pc
),
12785 "invalid %<task%> reduction modifier on construct "
12786 "other than %<parallel%>, %qs or %<sections%>",
12787 lang_GNU_Fortran () ? "do" : "for");
12788 OMP_CLAUSE_REDUCTION_TASK (*pc
) = 0;
12790 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12792 case OMP_CLAUSE_LASTPRIVATE
:
12793 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12795 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12796 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12797 if (OMP_CLAUSE_DECL (*pc
) == TREE_OPERAND (t
, 0))
12799 if (OMP_FOR_ORIG_DECLS (for_stmt
)
12800 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
12802 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
12805 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12806 if (OMP_CLAUSE_DECL (*pc
) == TREE_PURPOSE (orig
))
12810 if (i
== TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)))
12812 error_at (OMP_CLAUSE_LOCATION (*pc
),
12813 "%<lastprivate%> clause on a %<loop%> construct refers "
12814 "to a variable %qD which is not the loop iterator",
12815 OMP_CLAUSE_DECL (*pc
));
12816 *pc
= OMP_CLAUSE_CHAIN (*pc
);
12819 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12822 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12826 TREE_SET_CODE (for_stmt
, OMP_SIMD
);
12831 case OMP_CLAUSE_BIND_THREAD
: last
= 0; break;
12832 case OMP_CLAUSE_BIND_PARALLEL
: last
= 1; break;
12833 case OMP_CLAUSE_BIND_TEAMS
: last
= 2; break;
12835 for (int pass
= 1; pass
<= last
; pass
++)
12839 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
12840 append_to_statement_list (*expr_p
, &BIND_EXPR_BODY (bind
));
12841 *expr_p
= make_node (OMP_PARALLEL
);
12842 TREE_TYPE (*expr_p
) = void_type_node
;
12843 OMP_PARALLEL_BODY (*expr_p
) = bind
;
12844 OMP_PARALLEL_COMBINED (*expr_p
) = 1;
12845 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (for_stmt
));
12846 tree
*pc
= &OMP_PARALLEL_CLAUSES (*expr_p
);
12847 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12848 if (OMP_FOR_ORIG_DECLS (for_stmt
)
12849 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
))
12852 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12853 if (TREE_PURPOSE (elt
) && TREE_VALUE (elt
))
12855 *pc
= build_omp_clause (UNKNOWN_LOCATION
,
12856 OMP_CLAUSE_FIRSTPRIVATE
);
12857 OMP_CLAUSE_DECL (*pc
) = TREE_VALUE (elt
);
12858 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12862 tree t
= make_node (pass
== 2 ? OMP_DISTRIBUTE
: OMP_FOR
);
12863 tree
*pc
= &OMP_FOR_CLAUSES (t
);
12864 TREE_TYPE (t
) = void_type_node
;
12865 OMP_FOR_BODY (t
) = *expr_p
;
12866 SET_EXPR_LOCATION (t
, EXPR_LOCATION (for_stmt
));
12867 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12868 switch (OMP_CLAUSE_CODE (c
))
12870 case OMP_CLAUSE_BIND
:
12871 case OMP_CLAUSE_ORDER
:
12872 case OMP_CLAUSE_COLLAPSE
:
12873 *pc
= copy_node (c
);
12874 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12876 case OMP_CLAUSE_PRIVATE
:
12877 case OMP_CLAUSE_FIRSTPRIVATE
:
12878 /* Only needed on innermost. */
12880 case OMP_CLAUSE_LASTPRIVATE
:
12881 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
) && pass
!= last
)
12883 *pc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12884 OMP_CLAUSE_FIRSTPRIVATE
);
12885 OMP_CLAUSE_DECL (*pc
) = OMP_CLAUSE_DECL (c
);
12886 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
12887 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12889 *pc
= copy_node (c
);
12890 OMP_CLAUSE_LASTPRIVATE_STMT (*pc
) = NULL_TREE
;
12891 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
12892 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
12895 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc
) = 1;
12897 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
12898 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc
) = 0;
12900 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12902 case OMP_CLAUSE_REDUCTION
:
12903 *pc
= copy_node (c
);
12904 OMP_CLAUSE_DECL (*pc
) = unshare_expr (OMP_CLAUSE_DECL (c
));
12905 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
12906 OMP_CLAUSE_REDUCTION_INIT (*pc
)
12907 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c
));
12908 OMP_CLAUSE_REDUCTION_MERGE (*pc
)
12909 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c
));
12910 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
))
12912 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
)
12913 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
));
12914 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
12915 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
)
12916 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
));
12918 tree data
[2] = { c
, nc
};
12919 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc
),
12920 replace_reduction_placeholders
,
12922 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc
),
12923 replace_reduction_placeholders
,
12926 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12929 gcc_unreachable ();
12934 return gimplify_omp_for (expr_p
, pre_p
);
12938 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12939 of OMP_TARGET's body. */
12942 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
12944 *walk_subtrees
= 0;
12945 switch (TREE_CODE (*tp
))
12950 case STATEMENT_LIST
:
12951 *walk_subtrees
= 1;
12959 /* Helper function of optimize_target_teams, determine if the expression
12960 can be computed safely before the target construct on the host. */
12963 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
12969 *walk_subtrees
= 0;
12972 switch (TREE_CODE (*tp
))
12977 *walk_subtrees
= 0;
12978 if (error_operand_p (*tp
)
12979 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
12980 || DECL_HAS_VALUE_EXPR_P (*tp
)
12981 || DECL_THREAD_LOCAL_P (*tp
)
12982 || TREE_SIDE_EFFECTS (*tp
)
12983 || TREE_THIS_VOLATILE (*tp
))
12985 if (is_global_var (*tp
)
12986 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
12987 || lookup_attribute ("omp declare target link",
12988 DECL_ATTRIBUTES (*tp
))))
12991 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
12992 && !is_global_var (*tp
)
12993 && decl_function_context (*tp
) == current_function_decl
)
12995 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
12996 (splay_tree_key
) *tp
);
12999 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
13003 else if (n
->value
& GOVD_LOCAL
)
13005 else if (n
->value
& GOVD_FIRSTPRIVATE
)
13007 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
13008 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
13012 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
13016 if (TARGET_EXPR_INITIAL (*tp
)
13017 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
13019 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
13020 walk_subtrees
, NULL
);
13021 /* Allow some reasonable subset of integral arithmetics. */
13025 case TRUNC_DIV_EXPR
:
13026 case CEIL_DIV_EXPR
:
13027 case FLOOR_DIV_EXPR
:
13028 case ROUND_DIV_EXPR
:
13029 case TRUNC_MOD_EXPR
:
13030 case CEIL_MOD_EXPR
:
13031 case FLOOR_MOD_EXPR
:
13032 case ROUND_MOD_EXPR
:
13034 case EXACT_DIV_EXPR
:
13045 case NON_LVALUE_EXPR
:
13047 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
13050 /* And disallow anything else, except for comparisons. */
13052 if (COMPARISON_CLASS_P (*tp
))
13058 /* Try to determine if the num_teams and/or thread_limit expressions
13059 can have their values determined already before entering the
13061 INTEGER_CSTs trivially are,
13062 integral decls that are firstprivate (explicitly or implicitly)
13063 or explicitly map(always, to:) or map(always, tofrom:) on the target
13064 region too, and expressions involving simple arithmetics on those
13065 too, function calls are not ok, dereferencing something neither etc.
13066 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
13067 EXPR based on what we find:
13068 0 stands for clause not specified at all, use implementation default
13069 -1 stands for value that can't be determined easily before entering
13070 the target construct.
13071 If teams construct is not present at all, use 1 for num_teams
13072 and 0 for thread_limit (only one team is involved, and the thread
13073 limit is implementation defined. */
13076 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
13078 tree body
= OMP_BODY (target
);
13079 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
13080 tree num_teams
= integer_zero_node
;
13081 tree thread_limit
= integer_zero_node
;
13082 location_t num_teams_loc
= EXPR_LOCATION (target
);
13083 location_t thread_limit_loc
= EXPR_LOCATION (target
);
13085 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
13087 if (teams
== NULL_TREE
)
13088 num_teams
= integer_one_node
;
13090 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13092 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
13095 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
13097 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
13100 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
13104 expr
= OMP_CLAUSE_OPERAND (c
, 0);
13105 if (TREE_CODE (expr
) == INTEGER_CST
)
13110 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
13112 *p
= integer_minus_one_node
;
13116 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
13117 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
13120 gimplify_omp_ctxp
= target_ctx
;
13121 *p
= integer_minus_one_node
;
13124 gimplify_omp_ctxp
= target_ctx
;
13125 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
13126 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
13128 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
13129 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
13130 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
13131 OMP_TARGET_CLAUSES (target
) = c
;
13132 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
13133 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
13134 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
13135 OMP_TARGET_CLAUSES (target
) = c
;
13138 /* Gimplify the gross structure of several OMP constructs. */
13141 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
13143 tree expr
= *expr_p
;
13145 gimple_seq body
= NULL
;
13146 enum omp_region_type ort
;
13148 switch (TREE_CODE (expr
))
13152 ort
= ORT_WORKSHARE
;
13155 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
13158 ort
= ORT_ACC_KERNELS
;
13160 case OACC_PARALLEL
:
13161 ort
= ORT_ACC_PARALLEL
;
13164 ort
= ORT_ACC_SERIAL
;
13167 ort
= ORT_ACC_DATA
;
13169 case OMP_TARGET_DATA
:
13170 ort
= ORT_TARGET_DATA
;
13173 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
13174 if (gimplify_omp_ctxp
== NULL
13175 || gimplify_omp_ctxp
->region_type
== ORT_IMPLICIT_TARGET
)
13176 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
13178 case OACC_HOST_DATA
:
13179 ort
= ORT_ACC_HOST_DATA
;
13182 gcc_unreachable ();
13185 bool save_in_omp_construct
= in_omp_construct
;
13186 if ((ort
& ORT_ACC
) == 0)
13187 in_omp_construct
= false;
13188 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
13190 if (TREE_CODE (expr
) == OMP_TARGET
)
13191 optimize_target_teams (expr
, pre_p
);
13192 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
13193 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
13195 push_gimplify_context ();
13196 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
13197 if (gimple_code (g
) == GIMPLE_BIND
)
13198 pop_gimplify_context (g
);
13200 pop_gimplify_context (NULL
);
13201 if ((ort
& ORT_TARGET_DATA
) != 0)
13203 enum built_in_function end_ix
;
13204 switch (TREE_CODE (expr
))
13207 case OACC_HOST_DATA
:
13208 end_ix
= BUILT_IN_GOACC_DATA_END
;
13210 case OMP_TARGET_DATA
:
13211 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
13214 gcc_unreachable ();
13216 tree fn
= builtin_decl_explicit (end_ix
);
13217 g
= gimple_build_call (fn
, 0);
13218 gimple_seq cleanup
= NULL
;
13219 gimple_seq_add_stmt (&cleanup
, g
);
13220 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
13222 gimple_seq_add_stmt (&body
, g
);
13226 gimplify_and_add (OMP_BODY (expr
), &body
);
13227 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
13229 in_omp_construct
= save_in_omp_construct
;
13231 switch (TREE_CODE (expr
))
13234 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
13235 OMP_CLAUSES (expr
));
13237 case OACC_HOST_DATA
:
13238 if (omp_find_clause (OMP_CLAUSES (expr
), OMP_CLAUSE_IF_PRESENT
))
13240 for (tree c
= OMP_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13241 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
13242 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
) = 1;
13245 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
13246 OMP_CLAUSES (expr
));
13249 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
13250 OMP_CLAUSES (expr
));
13252 case OACC_PARALLEL
:
13253 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
13254 OMP_CLAUSES (expr
));
13257 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_SERIAL
,
13258 OMP_CLAUSES (expr
));
13261 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
13264 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
13267 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
13268 OMP_CLAUSES (expr
));
13270 case OMP_TARGET_DATA
:
13271 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
13272 to be evaluated before the use_device_{ptr,addr} clauses if they
13273 refer to the same variables. */
13275 tree use_device_clauses
;
13276 tree
*pc
, *uc
= &use_device_clauses
;
13277 for (pc
= &OMP_CLAUSES (expr
); *pc
; )
13278 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_PTR
13279 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
13282 *pc
= OMP_CLAUSE_CHAIN (*pc
);
13283 uc
= &OMP_CLAUSE_CHAIN (*uc
);
13286 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13288 *pc
= use_device_clauses
;
13289 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
13290 OMP_CLAUSES (expr
));
13294 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
13295 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
13296 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
13299 gcc_unreachable ();
13302 gimplify_seq_add_stmt (pre_p
, stmt
);
13303 *expr_p
= NULL_TREE
;
13306 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
13307 target update constructs. */
13310 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
13312 tree expr
= *expr_p
;
13315 enum omp_region_type ort
= ORT_WORKSHARE
;
13317 switch (TREE_CODE (expr
))
13319 case OACC_ENTER_DATA
:
13320 case OACC_EXIT_DATA
:
13321 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
;
13325 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
13328 case OMP_TARGET_UPDATE
:
13329 kind
= GF_OMP_TARGET_KIND_UPDATE
;
13331 case OMP_TARGET_ENTER_DATA
:
13332 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
13334 case OMP_TARGET_EXIT_DATA
:
13335 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
13338 gcc_unreachable ();
13340 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
13341 ort
, TREE_CODE (expr
));
13342 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
13344 if (TREE_CODE (expr
) == OACC_UPDATE
13345 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
13346 OMP_CLAUSE_IF_PRESENT
))
13348 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
13350 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13351 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
13352 switch (OMP_CLAUSE_MAP_KIND (c
))
13354 case GOMP_MAP_FORCE_TO
:
13355 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
13357 case GOMP_MAP_FORCE_FROM
:
13358 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
13364 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
13365 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
13366 OMP_CLAUSE_FINALIZE
))
13368 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
13370 bool have_clause
= false;
13371 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13372 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
13373 switch (OMP_CLAUSE_MAP_KIND (c
))
13375 case GOMP_MAP_FROM
:
13376 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
13377 have_clause
= true;
13379 case GOMP_MAP_RELEASE
:
13380 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
13381 have_clause
= true;
13383 case GOMP_MAP_TO_PSET
:
13384 /* Fortran arrays with descriptors must map that descriptor when
13385 doing standalone "attach" operations (in OpenACC). In that
13386 case GOMP_MAP_TO_PSET appears by itself with no preceding
13387 clause (see trans-openmp.c:gfc_trans_omp_clauses). */
13389 case GOMP_MAP_POINTER
:
13390 /* TODO PR92929: we may see these here, but they'll always follow
13391 one of the clauses above, and will be handled by libgomp as
13392 one group, so no handling required here. */
13393 gcc_assert (have_clause
);
13395 case GOMP_MAP_DETACH
:
13396 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_DETACH
);
13397 have_clause
= false;
13399 case GOMP_MAP_STRUCT
:
13400 have_clause
= false;
13403 gcc_unreachable ();
13406 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
13408 gimplify_seq_add_stmt (pre_p
, stmt
);
13409 *expr_p
= NULL_TREE
;
13412 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
13413 stabilized the lhs of the atomic operation as *ADDR. Return true if
13414 EXPR is this stabilized form. */
13417 goa_lhs_expr_p (tree expr
, tree addr
)
13419 /* Also include casts to other type variants. The C front end is fond
13420 of adding these for e.g. volatile variables. This is like
13421 STRIP_TYPE_NOPS but includes the main variant lookup. */
13422 STRIP_USELESS_TYPE_CONVERSION (expr
);
13424 if (TREE_CODE (expr
) == INDIRECT_REF
)
13426 expr
= TREE_OPERAND (expr
, 0);
13427 while (expr
!= addr
13428 && (CONVERT_EXPR_P (expr
)
13429 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
13430 && TREE_CODE (expr
) == TREE_CODE (addr
)
13431 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
13433 expr
= TREE_OPERAND (expr
, 0);
13434 addr
= TREE_OPERAND (addr
, 0);
13438 return (TREE_CODE (addr
) == ADDR_EXPR
13439 && TREE_CODE (expr
) == ADDR_EXPR
13440 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
13442 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
13447 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
13448 expression does not involve the lhs, evaluate it into a temporary.
13449 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13450 or -1 if an error was encountered. */
13453 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
13456 tree expr
= *expr_p
;
13459 if (goa_lhs_expr_p (expr
, lhs_addr
))
13464 if (is_gimple_val (expr
))
13468 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
13471 case tcc_comparison
:
13472 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
13476 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
13479 case tcc_expression
:
13480 switch (TREE_CODE (expr
))
13482 case TRUTH_ANDIF_EXPR
:
13483 case TRUTH_ORIF_EXPR
:
13484 case TRUTH_AND_EXPR
:
13485 case TRUTH_OR_EXPR
:
13486 case TRUTH_XOR_EXPR
:
13487 case BIT_INSERT_EXPR
:
13488 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
13489 lhs_addr
, lhs_var
);
13491 case TRUTH_NOT_EXPR
:
13492 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
13493 lhs_addr
, lhs_var
);
13495 case COMPOUND_EXPR
:
13496 /* Break out any preevaluations from cp_build_modify_expr. */
13497 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
13498 expr
= TREE_OPERAND (expr
, 1))
13499 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
13501 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
);
13506 case tcc_reference
:
13507 if (TREE_CODE (expr
) == BIT_FIELD_REF
)
13508 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
13509 lhs_addr
, lhs_var
);
13517 enum gimplify_status gs
;
13518 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
13519 if (gs
!= GS_ALL_DONE
)
13526 /* Gimplify an OMP_ATOMIC statement. */
13528 static enum gimplify_status
13529 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
13531 tree addr
= TREE_OPERAND (*expr_p
, 0);
13532 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
13533 ? NULL
: TREE_OPERAND (*expr_p
, 1);
13534 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
13536 gomp_atomic_load
*loadstmt
;
13537 gomp_atomic_store
*storestmt
;
13539 tmp_load
= create_tmp_reg (type
);
13540 if (rhs
&& goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
) < 0)
13543 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
13547 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
13548 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
13549 gimplify_seq_add_stmt (pre_p
, loadstmt
);
13552 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13553 representatives. Use BIT_FIELD_REF on the lhs instead. */
13554 if (TREE_CODE (rhs
) == BIT_INSERT_EXPR
13555 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
13557 tree bitpos
= TREE_OPERAND (rhs
, 2);
13558 tree op1
= TREE_OPERAND (rhs
, 1);
13560 tree tmp_store
= tmp_load
;
13561 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
13562 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
);
13563 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
13564 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
13566 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
13567 gcc_assert (TREE_OPERAND (rhs
, 0) == tmp_load
);
13568 tree t
= build2_loc (EXPR_LOCATION (rhs
),
13569 MODIFY_EXPR
, void_type_node
,
13570 build3_loc (EXPR_LOCATION (rhs
), BIT_FIELD_REF
,
13571 TREE_TYPE (op1
), tmp_store
, bitsize
,
13573 gimplify_and_add (t
, pre_p
);
13576 if (gimplify_expr (&rhs
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
13581 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
13584 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
13585 gimplify_seq_add_stmt (pre_p
, storestmt
);
13586 switch (TREE_CODE (*expr_p
))
13588 case OMP_ATOMIC_READ
:
13589 case OMP_ATOMIC_CAPTURE_OLD
:
13590 *expr_p
= tmp_load
;
13591 gimple_omp_atomic_set_need_value (loadstmt
);
13593 case OMP_ATOMIC_CAPTURE_NEW
:
13595 gimple_omp_atomic_set_need_value (storestmt
);
13602 return GS_ALL_DONE
;
13605 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13606 body, and adding some EH bits. */
13608 static enum gimplify_status
13609 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
13611 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
13613 gtransaction
*trans_stmt
;
13614 gimple_seq body
= NULL
;
13617 /* Wrap the transaction body in a BIND_EXPR so we have a context
13618 where to put decls for OMP. */
13619 if (TREE_CODE (tbody
) != BIND_EXPR
)
13621 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
13622 TREE_SIDE_EFFECTS (bind
) = 1;
13623 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
13624 TRANSACTION_EXPR_BODY (expr
) = bind
;
13627 push_gimplify_context ();
13628 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
13630 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
13631 pop_gimplify_context (body_stmt
);
13633 trans_stmt
= gimple_build_transaction (body
);
13634 if (TRANSACTION_EXPR_OUTER (expr
))
13635 subcode
= GTMA_IS_OUTER
;
13636 else if (TRANSACTION_EXPR_RELAXED (expr
))
13637 subcode
= GTMA_IS_RELAXED
;
13638 gimple_transaction_set_subcode (trans_stmt
, subcode
);
13640 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
13648 *expr_p
= NULL_TREE
;
13649 return GS_ALL_DONE
;
13652 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13653 is the OMP_BODY of the original EXPR (which has already been
13654 gimplified so it's not present in the EXPR).
13656 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13659 gimplify_omp_ordered (tree expr
, gimple_seq body
)
13664 tree source_c
= NULL_TREE
;
13665 tree sink_c
= NULL_TREE
;
13667 if (gimplify_omp_ctxp
)
13669 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13670 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
13671 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ()
13672 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
13673 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
))
13675 error_at (OMP_CLAUSE_LOCATION (c
),
13676 "%<ordered%> construct with %<depend%> clause must be "
13677 "closely nested inside a loop with %<ordered%> clause "
13678 "with a parameter");
13681 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
13682 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
13685 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
13686 decls
&& TREE_CODE (decls
) == TREE_LIST
;
13687 decls
= TREE_CHAIN (decls
), ++i
)
13688 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
13690 else if (TREE_VALUE (decls
)
13691 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
13693 error_at (OMP_CLAUSE_LOCATION (c
),
13694 "variable %qE is not an iteration "
13695 "of outermost loop %d, expected %qE",
13696 TREE_VALUE (decls
), i
+ 1,
13697 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
13703 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
13704 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
13706 error_at (OMP_CLAUSE_LOCATION (c
),
13707 "number of variables in %<depend%> clause with "
13708 "%<sink%> modifier does not match number of "
13709 "iteration variables");
13714 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
13715 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
13719 error_at (OMP_CLAUSE_LOCATION (c
),
13720 "more than one %<depend%> clause with %<source%> "
13721 "modifier on an %<ordered%> construct");
13728 if (source_c
&& sink_c
)
13730 error_at (OMP_CLAUSE_LOCATION (source_c
),
13731 "%<depend%> clause with %<source%> modifier specified "
13732 "together with %<depend%> clauses with %<sink%> modifier "
13733 "on the same construct");
13738 return gimple_build_nop ();
13739 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
13742 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
13743 expression produces a value to be used as an operand inside a GIMPLE
13744 statement, the value will be stored back in *EXPR_P. This value will
13745 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13746 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13747 emitted in PRE_P and POST_P.
13749 Additionally, this process may overwrite parts of the input
13750 expression during gimplification. Ideally, it should be
13751 possible to do non-destructive gimplification.
13753 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13754 the expression needs to evaluate to a value to be used as
13755 an operand in a GIMPLE statement, this value will be stored in
13756 *EXPR_P on exit. This happens when the caller specifies one
13757 of fb_lvalue or fb_rvalue fallback flags.
13759 PRE_P will contain the sequence of GIMPLE statements corresponding
13760 to the evaluation of EXPR and all the side-effects that must
13761 be executed before the main expression. On exit, the last
13762 statement of PRE_P is the core statement being gimplified. For
13763 instance, when gimplifying 'if (++a)' the last statement in
13764 PRE_P will be 'if (t.1)' where t.1 is the result of
13765 pre-incrementing 'a'.
13767 POST_P will contain the sequence of GIMPLE statements corresponding
13768 to the evaluation of all the side-effects that must be executed
13769 after the main expression. If this is NULL, the post
13770 side-effects are stored at the end of PRE_P.
13772 The reason why the output is split in two is to handle post
13773 side-effects explicitly. In some cases, an expression may have
13774 inner and outer post side-effects which need to be emitted in
13775 an order different from the one given by the recursive
13776 traversal. For instance, for the expression (*p--)++ the post
13777 side-effects of '--' must actually occur *after* the post
13778 side-effects of '++'. However, gimplification will first visit
13779 the inner expression, so if a separate POST sequence was not
13780 used, the resulting sequence would be:
13787 However, the post-decrement operation in line #2 must not be
13788 evaluated until after the store to *p at line #4, so the
13789 correct sequence should be:
13796 So, by specifying a separate post queue, it is possible
13797 to emit the post side-effects in the correct order.
13798 If POST_P is NULL, an internal queue will be used. Before
13799 returning to the caller, the sequence POST_P is appended to
13800 the main output sequence PRE_P.
13802 GIMPLE_TEST_F points to a function that takes a tree T and
13803 returns nonzero if T is in the GIMPLE form requested by the
13804 caller. The GIMPLE predicates are in gimple.c.
13806 FALLBACK tells the function what sort of a temporary we want if
13807 gimplification cannot produce an expression that complies with
13810 fb_none means that no temporary should be generated
13811 fb_rvalue means that an rvalue is OK to generate
13812 fb_lvalue means that an lvalue is OK to generate
13813 fb_either means that either is OK, but an lvalue is preferable.
13814 fb_mayfail means that gimplification may fail (in which case
13815 GS_ERROR will be returned)
13817 The return value is either GS_ERROR or GS_ALL_DONE, since this
13818 function iterates until EXPR is completely gimplified or an error
13821 enum gimplify_status
13822 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
13823 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
13826 gimple_seq internal_pre
= NULL
;
13827 gimple_seq internal_post
= NULL
;
13830 location_t saved_location
;
13831 enum gimplify_status ret
;
13832 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
13835 save_expr
= *expr_p
;
13836 if (save_expr
== NULL_TREE
)
13837 return GS_ALL_DONE
;
13839 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
13840 is_statement
= gimple_test_f
== is_gimple_stmt
;
13842 gcc_assert (pre_p
);
13844 /* Consistency checks. */
13845 if (gimple_test_f
== is_gimple_reg
)
13846 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
13847 else if (gimple_test_f
== is_gimple_val
13848 || gimple_test_f
== is_gimple_call_addr
13849 || gimple_test_f
== is_gimple_condexpr
13850 || gimple_test_f
== is_gimple_condexpr_for_cond
13851 || gimple_test_f
== is_gimple_mem_rhs
13852 || gimple_test_f
== is_gimple_mem_rhs_or_call
13853 || gimple_test_f
== is_gimple_reg_rhs
13854 || gimple_test_f
== is_gimple_reg_rhs_or_call
13855 || gimple_test_f
== is_gimple_asm_val
13856 || gimple_test_f
== is_gimple_mem_ref_addr
)
13857 gcc_assert (fallback
& fb_rvalue
);
13858 else if (gimple_test_f
== is_gimple_min_lval
13859 || gimple_test_f
== is_gimple_lvalue
)
13860 gcc_assert (fallback
& fb_lvalue
);
13861 else if (gimple_test_f
== is_gimple_addressable
)
13862 gcc_assert (fallback
& fb_either
);
13863 else if (gimple_test_f
== is_gimple_stmt
)
13864 gcc_assert (fallback
== fb_none
);
13867 /* We should have recognized the GIMPLE_TEST_F predicate to
13868 know what kind of fallback to use in case a temporary is
13869 needed to hold the value or address of *EXPR_P. */
13870 gcc_unreachable ();
13873 /* We used to check the predicate here and return immediately if it
13874 succeeds. This is wrong; the design is for gimplification to be
13875 idempotent, and for the predicates to only test for valid forms, not
13876 whether they are fully simplified. */
13878 pre_p
= &internal_pre
;
13880 if (post_p
== NULL
)
13881 post_p
= &internal_post
;
13883 /* Remember the last statements added to PRE_P and POST_P. Every
13884 new statement added by the gimplification helpers needs to be
13885 annotated with location information. To centralize the
13886 responsibility, we remember the last statement that had been
13887 added to both queues before gimplifying *EXPR_P. If
13888 gimplification produces new statements in PRE_P and POST_P, those
13889 statements will be annotated with the same location information
13891 pre_last_gsi
= gsi_last (*pre_p
);
13892 post_last_gsi
= gsi_last (*post_p
);
13894 saved_location
= input_location
;
13895 if (save_expr
!= error_mark_node
13896 && EXPR_HAS_LOCATION (*expr_p
))
13897 input_location
= EXPR_LOCATION (*expr_p
);
13899 /* Loop over the specific gimplifiers until the toplevel node
13900 remains the same. */
13903 /* Strip away as many useless type conversions as possible
13904 at the toplevel. */
13905 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
13907 /* Remember the expr. */
13908 save_expr
= *expr_p
;
13910 /* Die, die, die, my darling. */
13911 if (error_operand_p (save_expr
))
13917 /* Do any language-specific gimplification. */
13918 ret
= ((enum gimplify_status
)
13919 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
13922 if (*expr_p
== NULL_TREE
)
13924 if (*expr_p
!= save_expr
)
13927 else if (ret
!= GS_UNHANDLED
)
13930 /* Make sure that all the cases set 'ret' appropriately. */
13931 ret
= GS_UNHANDLED
;
13932 switch (TREE_CODE (*expr_p
))
13934 /* First deal with the special cases. */
13936 case POSTINCREMENT_EXPR
:
13937 case POSTDECREMENT_EXPR
:
13938 case PREINCREMENT_EXPR
:
13939 case PREDECREMENT_EXPR
:
13940 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
13941 fallback
!= fb_none
,
13942 TREE_TYPE (*expr_p
));
13945 case VIEW_CONVERT_EXPR
:
13946 if ((fallback
& fb_rvalue
)
13947 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
13948 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
13950 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13951 post_p
, is_gimple_val
, fb_rvalue
);
13952 recalculate_side_effects (*expr_p
);
13958 case ARRAY_RANGE_REF
:
13959 case REALPART_EXPR
:
13960 case IMAGPART_EXPR
:
13961 case COMPONENT_REF
:
13962 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
13963 fallback
? fallback
: fb_rvalue
);
13967 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
13969 /* C99 code may assign to an array in a structure value of a
13970 conditional expression, and this has undefined behavior
13971 only on execution, so create a temporary if an lvalue is
13973 if (fallback
== fb_lvalue
)
13975 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
13976 mark_addressable (*expr_p
);
13982 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
13984 /* C99 code may assign to an array in a structure returned
13985 from a function, and this has undefined behavior only on
13986 execution, so create a temporary if an lvalue is
13988 if (fallback
== fb_lvalue
)
13990 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
13991 mark_addressable (*expr_p
);
13997 gcc_unreachable ();
13999 case COMPOUND_EXPR
:
14000 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
14003 case COMPOUND_LITERAL_EXPR
:
14004 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
14005 gimple_test_f
, fallback
);
14010 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
14011 fallback
!= fb_none
);
14014 case TRUTH_ANDIF_EXPR
:
14015 case TRUTH_ORIF_EXPR
:
14017 /* Preserve the original type of the expression and the
14018 source location of the outer expression. */
14019 tree org_type
= TREE_TYPE (*expr_p
);
14020 *expr_p
= gimple_boolify (*expr_p
);
14021 *expr_p
= build3_loc (input_location
, COND_EXPR
,
14025 org_type
, boolean_true_node
),
14028 org_type
, boolean_false_node
));
14033 case TRUTH_NOT_EXPR
:
14035 tree type
= TREE_TYPE (*expr_p
);
14036 /* The parsers are careful to generate TRUTH_NOT_EXPR
14037 only with operands that are always zero or one.
14038 We do not fold here but handle the only interesting case
14039 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
14040 *expr_p
= gimple_boolify (*expr_p
);
14041 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
14042 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
14043 TREE_TYPE (*expr_p
),
14044 TREE_OPERAND (*expr_p
, 0));
14046 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
14047 TREE_TYPE (*expr_p
),
14048 TREE_OPERAND (*expr_p
, 0),
14049 build_int_cst (TREE_TYPE (*expr_p
), 1));
14050 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
14051 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
14057 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
14060 case ANNOTATE_EXPR
:
14062 tree cond
= TREE_OPERAND (*expr_p
, 0);
14063 tree kind
= TREE_OPERAND (*expr_p
, 1);
14064 tree data
= TREE_OPERAND (*expr_p
, 2);
14065 tree type
= TREE_TYPE (cond
);
14066 if (!INTEGRAL_TYPE_P (type
))
14072 tree tmp
= create_tmp_var (type
);
14073 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
14075 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
14076 gimple_call_set_lhs (call
, tmp
);
14077 gimplify_seq_add_stmt (pre_p
, call
);
14084 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
14088 if (IS_EMPTY_STMT (*expr_p
))
14094 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
14095 || fallback
== fb_none
)
14097 /* Just strip a conversion to void (or in void context) and
14099 *expr_p
= TREE_OPERAND (*expr_p
, 0);
14104 ret
= gimplify_conversion (expr_p
);
14105 if (ret
== GS_ERROR
)
14107 if (*expr_p
!= save_expr
)
14111 case FIX_TRUNC_EXPR
:
14112 /* unary_expr: ... | '(' cast ')' val | ... */
14113 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14114 is_gimple_val
, fb_rvalue
);
14115 recalculate_side_effects (*expr_p
);
14120 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
14121 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
14122 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
14124 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
14125 if (*expr_p
!= save_expr
)
14131 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14132 is_gimple_reg
, fb_rvalue
);
14133 if (ret
== GS_ERROR
)
14136 recalculate_side_effects (*expr_p
);
14137 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
14138 TREE_TYPE (*expr_p
),
14139 TREE_OPERAND (*expr_p
, 0),
14140 build_int_cst (saved_ptr_type
, 0));
14141 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
14142 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
14147 /* We arrive here through the various re-gimplifcation paths. */
14149 /* First try re-folding the whole thing. */
14150 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
14151 TREE_OPERAND (*expr_p
, 0),
14152 TREE_OPERAND (*expr_p
, 1));
14155 REF_REVERSE_STORAGE_ORDER (tmp
)
14156 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
14158 recalculate_side_effects (*expr_p
);
14162 /* Avoid re-gimplifying the address operand if it is already
14163 in suitable form. Re-gimplifying would mark the address
14164 operand addressable. Always gimplify when not in SSA form
14165 as we still may have to gimplify decls with value-exprs. */
14166 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
14167 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
14169 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14170 is_gimple_mem_ref_addr
, fb_rvalue
);
14171 if (ret
== GS_ERROR
)
14174 recalculate_side_effects (*expr_p
);
14178 /* Constants need not be gimplified. */
14185 /* Drop the overflow flag on constants, we do not want
14186 that in the GIMPLE IL. */
14187 if (TREE_OVERFLOW_P (*expr_p
))
14188 *expr_p
= drop_tree_overflow (*expr_p
);
14193 /* If we require an lvalue, such as for ADDR_EXPR, retain the
14194 CONST_DECL node. Otherwise the decl is replaceable by its
14196 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
14197 if (fallback
& fb_lvalue
)
14201 *expr_p
= DECL_INITIAL (*expr_p
);
14207 ret
= gimplify_decl_expr (expr_p
, pre_p
);
14211 ret
= gimplify_bind_expr (expr_p
, pre_p
);
14215 ret
= gimplify_loop_expr (expr_p
, pre_p
);
14219 ret
= gimplify_switch_expr (expr_p
, pre_p
);
14223 ret
= gimplify_exit_expr (expr_p
);
14227 /* If the target is not LABEL, then it is a computed jump
14228 and the target needs to be gimplified. */
14229 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
14231 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
14232 NULL
, is_gimple_val
, fb_rvalue
);
14233 if (ret
== GS_ERROR
)
14236 gimplify_seq_add_stmt (pre_p
,
14237 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
14242 gimplify_seq_add_stmt (pre_p
,
14243 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
14244 PREDICT_EXPR_OUTCOME (*expr_p
)));
14249 ret
= gimplify_label_expr (expr_p
, pre_p
);
14250 label
= LABEL_EXPR_LABEL (*expr_p
);
14251 gcc_assert (decl_function_context (label
) == current_function_decl
);
14253 /* If the label is used in a goto statement, or address of the label
14254 is taken, we need to unpoison all variables that were seen so far.
14255 Doing so would prevent us from reporting a false positives. */
14256 if (asan_poisoned_variables
14257 && asan_used_labels
!= NULL
14258 && asan_used_labels
->contains (label
))
14259 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
14262 case CASE_LABEL_EXPR
:
14263 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
14265 if (gimplify_ctxp
->live_switch_vars
)
14266 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
14271 ret
= gimplify_return_expr (*expr_p
, pre_p
);
14275 /* Don't reduce this in place; let gimplify_init_constructor work its
14276 magic. Buf if we're just elaborating this for side effects, just
14277 gimplify any element that has side-effects. */
14278 if (fallback
== fb_none
)
14280 unsigned HOST_WIDE_INT ix
;
14282 tree temp
= NULL_TREE
;
14283 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
14284 if (TREE_SIDE_EFFECTS (val
))
14285 append_to_statement_list (val
, &temp
);
14288 ret
= temp
? GS_OK
: GS_ALL_DONE
;
14290 /* C99 code may assign to an array in a constructed
14291 structure or union, and this has undefined behavior only
14292 on execution, so create a temporary if an lvalue is
14294 else if (fallback
== fb_lvalue
)
14296 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
14297 mark_addressable (*expr_p
);
14304 /* The following are special cases that are not handled by the
14305 original GIMPLE grammar. */
14307 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
14310 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
14313 case BIT_FIELD_REF
:
14314 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14315 post_p
, is_gimple_lvalue
, fb_either
);
14316 recalculate_side_effects (*expr_p
);
14319 case TARGET_MEM_REF
:
14321 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
14323 if (TMR_BASE (*expr_p
))
14324 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
14325 post_p
, is_gimple_mem_ref_addr
, fb_either
);
14326 if (TMR_INDEX (*expr_p
))
14327 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
14328 post_p
, is_gimple_val
, fb_rvalue
);
14329 if (TMR_INDEX2 (*expr_p
))
14330 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
14331 post_p
, is_gimple_val
, fb_rvalue
);
14332 /* TMR_STEP and TMR_OFFSET are always integer constants. */
14333 ret
= MIN (r0
, r1
);
14337 case NON_LVALUE_EXPR
:
14338 /* This should have been stripped above. */
14339 gcc_unreachable ();
14342 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
14345 case TRY_FINALLY_EXPR
:
14346 case TRY_CATCH_EXPR
:
14348 gimple_seq eval
, cleanup
;
14351 /* Calls to destructors are generated automatically in FINALLY/CATCH
14352 block. They should have location as UNKNOWN_LOCATION. However,
14353 gimplify_call_expr will reset these call stmts to input_location
14354 if it finds stmt's location is unknown. To prevent resetting for
14355 destructors, we set the input_location to unknown.
14356 Note that this only affects the destructor calls in FINALLY/CATCH
14357 block, and will automatically reset to its original value by the
14358 end of gimplify_expr. */
14359 input_location
= UNKNOWN_LOCATION
;
14360 eval
= cleanup
= NULL
;
14361 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
14362 if (TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
14363 && TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == EH_ELSE_EXPR
)
14365 gimple_seq n
= NULL
, e
= NULL
;
14366 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
14368 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
14370 if (!gimple_seq_empty_p (n
) && !gimple_seq_empty_p (e
))
14372 geh_else
*stmt
= gimple_build_eh_else (n
, e
);
14373 gimple_seq_add_stmt (&cleanup
, stmt
);
14377 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
14378 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
14379 if (gimple_seq_empty_p (cleanup
))
14381 gimple_seq_add_seq (pre_p
, eval
);
14385 try_
= gimple_build_try (eval
, cleanup
,
14386 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
14387 ? GIMPLE_TRY_FINALLY
14388 : GIMPLE_TRY_CATCH
);
14389 if (EXPR_HAS_LOCATION (save_expr
))
14390 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
14391 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
14392 gimple_set_location (try_
, saved_location
);
14393 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
14394 gimple_try_set_catch_is_cleanup (try_
,
14395 TRY_CATCH_IS_CLEANUP (*expr_p
));
14396 gimplify_seq_add_stmt (pre_p
, try_
);
14401 case CLEANUP_POINT_EXPR
:
14402 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
14406 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
14412 gimple_seq handler
= NULL
;
14413 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
14414 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
14415 gimplify_seq_add_stmt (pre_p
, c
);
14420 case EH_FILTER_EXPR
:
14423 gimple_seq failure
= NULL
;
14425 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
14426 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
14427 gimple_set_no_warning (ehf
, TREE_NO_WARNING (*expr_p
));
14428 gimplify_seq_add_stmt (pre_p
, ehf
);
14435 enum gimplify_status r0
, r1
;
14436 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
14437 post_p
, is_gimple_val
, fb_rvalue
);
14438 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
14439 post_p
, is_gimple_val
, fb_rvalue
);
14440 TREE_SIDE_EFFECTS (*expr_p
) = 0;
14441 ret
= MIN (r0
, r1
);
14446 /* We get here when taking the address of a label. We mark
14447 the label as "forced"; meaning it can never be removed and
14448 it is a potential target for any computed goto. */
14449 FORCED_LABEL (*expr_p
) = 1;
14453 case STATEMENT_LIST
:
14454 ret
= gimplify_statement_list (expr_p
, pre_p
);
14457 case WITH_SIZE_EXPR
:
14459 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14460 post_p
== &internal_post
? NULL
: post_p
,
14461 gimple_test_f
, fallback
);
14462 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
14463 is_gimple_val
, fb_rvalue
);
14470 ret
= gimplify_var_or_parm_decl (expr_p
);
14474 /* When within an OMP context, notice uses of variables. */
14475 if (gimplify_omp_ctxp
)
14476 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
14480 case DEBUG_EXPR_DECL
:
14481 gcc_unreachable ();
14483 case DEBUG_BEGIN_STMT
:
14484 gimplify_seq_add_stmt (pre_p
,
14485 gimple_build_debug_begin_stmt
14486 (TREE_BLOCK (*expr_p
),
14487 EXPR_LOCATION (*expr_p
)));
14493 /* Allow callbacks into the gimplifier during optimization. */
14498 gimplify_omp_parallel (expr_p
, pre_p
);
14503 gimplify_omp_task (expr_p
, pre_p
);
14509 case OMP_DISTRIBUTE
:
14512 ret
= gimplify_omp_for (expr_p
, pre_p
);
14516 ret
= gimplify_omp_loop (expr_p
, pre_p
);
14520 gimplify_oacc_cache (expr_p
, pre_p
);
14525 gimplify_oacc_declare (expr_p
, pre_p
);
14529 case OACC_HOST_DATA
:
14532 case OACC_PARALLEL
:
14537 case OMP_TARGET_DATA
:
14539 gimplify_omp_workshare (expr_p
, pre_p
);
14543 case OACC_ENTER_DATA
:
14544 case OACC_EXIT_DATA
:
14546 case OMP_TARGET_UPDATE
:
14547 case OMP_TARGET_ENTER_DATA
:
14548 case OMP_TARGET_EXIT_DATA
:
14549 gimplify_omp_target_update (expr_p
, pre_p
);
14559 gimple_seq body
= NULL
;
14561 bool saved_in_omp_construct
= in_omp_construct
;
14563 in_omp_construct
= true;
14564 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
14565 in_omp_construct
= saved_in_omp_construct
;
14566 switch (TREE_CODE (*expr_p
))
14569 g
= gimple_build_omp_section (body
);
14572 g
= gimple_build_omp_master (body
);
14575 g
= gimplify_omp_ordered (*expr_p
, body
);
14578 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
14579 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
14580 gimplify_adjust_omp_clauses (pre_p
, body
,
14581 &OMP_CRITICAL_CLAUSES (*expr_p
),
14583 g
= gimple_build_omp_critical (body
,
14584 OMP_CRITICAL_NAME (*expr_p
),
14585 OMP_CRITICAL_CLAUSES (*expr_p
));
14588 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
14589 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
14590 gimplify_adjust_omp_clauses (pre_p
, body
,
14591 &OMP_SCAN_CLAUSES (*expr_p
),
14593 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
14596 gcc_unreachable ();
14598 gimplify_seq_add_stmt (pre_p
, g
);
14603 case OMP_TASKGROUP
:
14605 gimple_seq body
= NULL
;
14607 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
14608 bool saved_in_omp_construct
= in_omp_construct
;
14609 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
14611 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
14613 in_omp_construct
= true;
14614 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
14615 in_omp_construct
= saved_in_omp_construct
;
14616 gimple_seq cleanup
= NULL
;
14617 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
14618 gimple
*g
= gimple_build_call (fn
, 0);
14619 gimple_seq_add_stmt (&cleanup
, g
);
14620 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
14622 gimple_seq_add_stmt (&body
, g
);
14623 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
14624 gimplify_seq_add_stmt (pre_p
, g
);
14630 case OMP_ATOMIC_READ
:
14631 case OMP_ATOMIC_CAPTURE_OLD
:
14632 case OMP_ATOMIC_CAPTURE_NEW
:
14633 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
14636 case TRANSACTION_EXPR
:
14637 ret
= gimplify_transaction (expr_p
, pre_p
);
14640 case TRUTH_AND_EXPR
:
14641 case TRUTH_OR_EXPR
:
14642 case TRUTH_XOR_EXPR
:
14644 tree orig_type
= TREE_TYPE (*expr_p
);
14645 tree new_type
, xop0
, xop1
;
14646 *expr_p
= gimple_boolify (*expr_p
);
14647 new_type
= TREE_TYPE (*expr_p
);
14648 if (!useless_type_conversion_p (orig_type
, new_type
))
14650 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
14655 /* Boolified binary truth expressions are semantically equivalent
14656 to bitwise binary expressions. Canonicalize them to the
14657 bitwise variant. */
14658 switch (TREE_CODE (*expr_p
))
14660 case TRUTH_AND_EXPR
:
14661 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
14663 case TRUTH_OR_EXPR
:
14664 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
14666 case TRUTH_XOR_EXPR
:
14667 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
14672 /* Now make sure that operands have compatible type to
14673 expression's new_type. */
14674 xop0
= TREE_OPERAND (*expr_p
, 0);
14675 xop1
= TREE_OPERAND (*expr_p
, 1);
14676 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
14677 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
14680 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
14681 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
14684 /* Continue classified as tcc_binary. */
14688 case VEC_COND_EXPR
:
14691 case VEC_PERM_EXPR
:
14692 /* Classified as tcc_expression. */
14695 case BIT_INSERT_EXPR
:
14696 /* Argument 3 is a constant. */
14699 case POINTER_PLUS_EXPR
:
14701 enum gimplify_status r0
, r1
;
14702 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14703 post_p
, is_gimple_val
, fb_rvalue
);
14704 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
14705 post_p
, is_gimple_val
, fb_rvalue
);
14706 recalculate_side_effects (*expr_p
);
14707 ret
= MIN (r0
, r1
);
14712 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
14714 case tcc_comparison
:
14715 /* Handle comparison of objects of non scalar mode aggregates
14716 with a call to memcmp. It would be nice to only have to do
14717 this for variable-sized objects, but then we'd have to allow
14718 the same nest of reference nodes we allow for MODIFY_EXPR and
14719 that's too complex.
14721 Compare scalar mode aggregates as scalar mode values. Using
14722 memcmp for them would be very inefficient at best, and is
14723 plain wrong if bitfields are involved. */
14725 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
14727 /* Vector comparisons need no boolification. */
14728 if (TREE_CODE (type
) == VECTOR_TYPE
)
14730 else if (!AGGREGATE_TYPE_P (type
))
14732 tree org_type
= TREE_TYPE (*expr_p
);
14733 *expr_p
= gimple_boolify (*expr_p
);
14734 if (!useless_type_conversion_p (org_type
,
14735 TREE_TYPE (*expr_p
)))
14737 *expr_p
= fold_convert_loc (input_location
,
14738 org_type
, *expr_p
);
14744 else if (TYPE_MODE (type
) != BLKmode
)
14745 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
14747 ret
= gimplify_variable_sized_compare (expr_p
);
14752 /* If *EXPR_P does not need to be special-cased, handle it
14753 according to its class. */
14755 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14756 post_p
, is_gimple_val
, fb_rvalue
);
14762 enum gimplify_status r0
, r1
;
14764 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14765 post_p
, is_gimple_val
, fb_rvalue
);
14766 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
14767 post_p
, is_gimple_val
, fb_rvalue
);
14769 ret
= MIN (r0
, r1
);
14775 enum gimplify_status r0
, r1
, r2
;
14777 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14778 post_p
, is_gimple_val
, fb_rvalue
);
14779 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
14780 post_p
, is_gimple_val
, fb_rvalue
);
14781 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
14782 post_p
, is_gimple_val
, fb_rvalue
);
14784 ret
= MIN (MIN (r0
, r1
), r2
);
14788 case tcc_declaration
:
14791 goto dont_recalculate
;
14794 gcc_unreachable ();
14797 recalculate_side_effects (*expr_p
);
14803 gcc_assert (*expr_p
|| ret
!= GS_OK
);
14805 while (ret
== GS_OK
);
14807 /* If we encountered an error_mark somewhere nested inside, either
14808 stub out the statement or propagate the error back out. */
14809 if (ret
== GS_ERROR
)
14816 /* This was only valid as a return value from the langhook, which
14817 we handled. Make sure it doesn't escape from any other context. */
14818 gcc_assert (ret
!= GS_UNHANDLED
);
14820 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
14822 /* We aren't looking for a value, and we don't have a valid
14823 statement. If it doesn't have side-effects, throw it away.
14824 We can also get here with code such as "*&&L;", where L is
14825 a LABEL_DECL that is marked as FORCED_LABEL. */
14826 if (TREE_CODE (*expr_p
) == LABEL_DECL
14827 || !TREE_SIDE_EFFECTS (*expr_p
))
14829 else if (!TREE_THIS_VOLATILE (*expr_p
))
14831 /* This is probably a _REF that contains something nested that
14832 has side effects. Recurse through the operands to find it. */
14833 enum tree_code code
= TREE_CODE (*expr_p
);
14837 case COMPONENT_REF
:
14838 case REALPART_EXPR
:
14839 case IMAGPART_EXPR
:
14840 case VIEW_CONVERT_EXPR
:
14841 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14842 gimple_test_f
, fallback
);
14846 case ARRAY_RANGE_REF
:
14847 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14848 gimple_test_f
, fallback
);
14849 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
14850 gimple_test_f
, fallback
);
14854 /* Anything else with side-effects must be converted to
14855 a valid statement before we get here. */
14856 gcc_unreachable ();
14861 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
14862 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
)
14864 /* Historically, the compiler has treated a bare reference
14865 to a non-BLKmode volatile lvalue as forcing a load. */
14866 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
14868 /* Normally, we do not want to create a temporary for a
14869 TREE_ADDRESSABLE type because such a type should not be
14870 copied by bitwise-assignment. However, we make an
14871 exception here, as all we are doing here is ensuring that
14872 we read the bytes that make up the type. We use
14873 create_tmp_var_raw because create_tmp_var will abort when
14874 given a TREE_ADDRESSABLE type. */
14875 tree tmp
= create_tmp_var_raw (type
, "vol");
14876 gimple_add_tmp_var (tmp
);
14877 gimplify_assign (tmp
, *expr_p
, pre_p
);
14881 /* We can't do anything useful with a volatile reference to
14882 an incomplete type, so just throw it away. Likewise for
14883 a BLKmode type, since any implicit inner load should
14884 already have been turned into an explicit one by the
14885 gimplification process. */
14889 /* If we are gimplifying at the statement level, we're done. Tack
14890 everything together and return. */
14891 if (fallback
== fb_none
|| is_statement
)
14893 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14894 it out for GC to reclaim it. */
14895 *expr_p
= NULL_TREE
;
14897 if (!gimple_seq_empty_p (internal_pre
)
14898 || !gimple_seq_empty_p (internal_post
))
14900 gimplify_seq_add_seq (&internal_pre
, internal_post
);
14901 gimplify_seq_add_seq (pre_p
, internal_pre
);
14904 /* The result of gimplifying *EXPR_P is going to be the last few
14905 statements in *PRE_P and *POST_P. Add location information
14906 to all the statements that were added by the gimplification
14908 if (!gimple_seq_empty_p (*pre_p
))
14909 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
14911 if (!gimple_seq_empty_p (*post_p
))
14912 annotate_all_with_location_after (*post_p
, post_last_gsi
,
14918 #ifdef ENABLE_GIMPLE_CHECKING
14921 enum tree_code code
= TREE_CODE (*expr_p
);
14922 /* These expressions should already be in gimple IR form. */
14923 gcc_assert (code
!= MODIFY_EXPR
14924 && code
!= ASM_EXPR
14925 && code
!= BIND_EXPR
14926 && code
!= CATCH_EXPR
14927 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
14928 && code
!= EH_FILTER_EXPR
14929 && code
!= GOTO_EXPR
14930 && code
!= LABEL_EXPR
14931 && code
!= LOOP_EXPR
14932 && code
!= SWITCH_EXPR
14933 && code
!= TRY_FINALLY_EXPR
14934 && code
!= EH_ELSE_EXPR
14935 && code
!= OACC_PARALLEL
14936 && code
!= OACC_KERNELS
14937 && code
!= OACC_SERIAL
14938 && code
!= OACC_DATA
14939 && code
!= OACC_HOST_DATA
14940 && code
!= OACC_DECLARE
14941 && code
!= OACC_UPDATE
14942 && code
!= OACC_ENTER_DATA
14943 && code
!= OACC_EXIT_DATA
14944 && code
!= OACC_CACHE
14945 && code
!= OMP_CRITICAL
14947 && code
!= OACC_LOOP
14948 && code
!= OMP_MASTER
14949 && code
!= OMP_TASKGROUP
14950 && code
!= OMP_ORDERED
14951 && code
!= OMP_PARALLEL
14952 && code
!= OMP_SCAN
14953 && code
!= OMP_SECTIONS
14954 && code
!= OMP_SECTION
14955 && code
!= OMP_SINGLE
);
14959 /* Otherwise we're gimplifying a subexpression, so the resulting
14960 value is interesting. If it's a valid operand that matches
14961 GIMPLE_TEST_F, we're done. Unless we are handling some
14962 post-effects internally; if that's the case, we need to copy into
14963 a temporary before adding the post-effects to POST_P. */
14964 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
14967 /* Otherwise, we need to create a new temporary for the gimplified
14970 /* We can't return an lvalue if we have an internal postqueue. The
14971 object the lvalue refers to would (probably) be modified by the
14972 postqueue; we need to copy the value out first, which means an
14974 if ((fallback
& fb_lvalue
)
14975 && gimple_seq_empty_p (internal_post
)
14976 && is_gimple_addressable (*expr_p
))
14978 /* An lvalue will do. Take the address of the expression, store it
14979 in a temporary, and replace the expression with an INDIRECT_REF of
14981 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
14982 unsigned int ref_align
= get_object_alignment (*expr_p
);
14983 tree ref_type
= TREE_TYPE (*expr_p
);
14984 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
14985 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
14986 if (TYPE_ALIGN (ref_type
) != ref_align
)
14987 ref_type
= build_aligned_type (ref_type
, ref_align
);
14988 *expr_p
= build2 (MEM_REF
, ref_type
,
14989 tmp
, build_zero_cst (ref_alias_type
));
14991 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
14993 /* An rvalue will do. Assign the gimplified expression into a
14994 new temporary TMP and replace the original expression with
14995 TMP. First, make sure that the expression has a type so that
14996 it can be assigned into a temporary. */
14997 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
14998 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
15002 #ifdef ENABLE_GIMPLE_CHECKING
15003 if (!(fallback
& fb_mayfail
))
15005 fprintf (stderr
, "gimplification failed:\n");
15006 print_generic_expr (stderr
, *expr_p
);
15007 debug_tree (*expr_p
);
15008 internal_error ("gimplification failed");
15011 gcc_assert (fallback
& fb_mayfail
);
15013 /* If this is an asm statement, and the user asked for the
15014 impossible, don't die. Fail and let gimplify_asm_expr
15020 /* Make sure the temporary matches our predicate. */
15021 gcc_assert ((*gimple_test_f
) (*expr_p
));
15023 if (!gimple_seq_empty_p (internal_post
))
15025 annotate_all_with_location (internal_post
, input_location
);
15026 gimplify_seq_add_seq (pre_p
, internal_post
);
15030 input_location
= saved_location
;
15034 /* Like gimplify_expr but make sure the gimplified result is not itself
15035 a SSA name (but a decl if it were). Temporaries required by
15036 evaluating *EXPR_P may be still SSA names. */
15038 static enum gimplify_status
15039 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
15040 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
15043 bool was_ssa_name_p
= TREE_CODE (*expr_p
) == SSA_NAME
;
15044 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
15045 gimple_test_f
, fallback
);
15047 && TREE_CODE (*expr_p
) == SSA_NAME
)
15049 tree name
= *expr_p
;
15050 if (was_ssa_name_p
)
15051 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
15054 /* Avoid the extra copy if possible. */
15055 *expr_p
= create_tmp_reg (TREE_TYPE (name
));
15056 if (!gimple_nop_p (SSA_NAME_DEF_STMT (name
)))
15057 gimple_set_lhs (SSA_NAME_DEF_STMT (name
), *expr_p
);
15058 release_ssa_name (name
);
15064 /* Look through TYPE for variable-sized objects and gimplify each such
15065 size that we find. Add to LIST_P any statements generated. */
15068 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
15072 if (type
== NULL
|| type
== error_mark_node
)
15075 /* We first do the main variant, then copy into any other variants. */
15076 type
= TYPE_MAIN_VARIANT (type
);
15078 /* Avoid infinite recursion. */
15079 if (TYPE_SIZES_GIMPLIFIED (type
))
15082 TYPE_SIZES_GIMPLIFIED (type
) = 1;
15084 switch (TREE_CODE (type
))
15087 case ENUMERAL_TYPE
:
15090 case FIXED_POINT_TYPE
:
15091 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
15092 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
15094 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
15096 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
15097 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
15102 /* These types may not have declarations, so handle them here. */
15103 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
15104 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
15105 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
15106 with assigned stack slots, for -O1+ -g they should be tracked
15108 if (!(TYPE_NAME (type
)
15109 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
15110 && DECL_IGNORED_P (TYPE_NAME (type
)))
15111 && TYPE_DOMAIN (type
)
15112 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
15114 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
15115 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
15116 DECL_IGNORED_P (t
) = 0;
15117 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
15118 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
15119 DECL_IGNORED_P (t
) = 0;
15125 case QUAL_UNION_TYPE
:
15126 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
15127 if (TREE_CODE (field
) == FIELD_DECL
)
15129 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
15130 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
15131 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
15132 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
15137 case REFERENCE_TYPE
:
15138 /* We used to recurse on the pointed-to type here, which turned out to
15139 be incorrect because its definition might refer to variables not
15140 yet initialized at this point if a forward declaration is involved.
15142 It was actually useful for anonymous pointed-to types to ensure
15143 that the sizes evaluation dominates every possible later use of the
15144 values. Restricting to such types here would be safe since there
15145 is no possible forward declaration around, but would introduce an
15146 undesirable middle-end semantic to anonymity. We then defer to
15147 front-ends the responsibility of ensuring that the sizes are
15148 evaluated both early and late enough, e.g. by attaching artificial
15149 type declarations to the tree. */
15156 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
15157 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
15159 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
15161 TYPE_SIZE (t
) = TYPE_SIZE (type
);
15162 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
15163 TYPE_SIZES_GIMPLIFIED (t
) = 1;
15167 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
15168 a size or position, has had all of its SAVE_EXPRs evaluated.
15169 We add any required statements to *STMT_P. */
15172 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
15174 tree expr
= *expr_p
;
15176 /* We don't do anything if the value isn't there, is constant, or contains
15177 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
15178 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
15179 will want to replace it with a new variable, but that will cause problems
15180 if this type is from outside the function. It's OK to have that here. */
15181 if (expr
== NULL_TREE
15182 || is_gimple_constant (expr
)
15183 || TREE_CODE (expr
) == VAR_DECL
15184 || CONTAINS_PLACEHOLDER_P (expr
))
15187 *expr_p
= unshare_expr (expr
);
15189 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
15190 if the def vanishes. */
15191 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
15193 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
15194 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
15195 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
15196 if (is_gimple_constant (*expr_p
))
15197 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
15200 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
15201 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
15202 is true, also gimplify the parameters. */
15205 gimplify_body (tree fndecl
, bool do_parms
)
15207 location_t saved_location
= input_location
;
15208 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
15209 gimple
*outer_stmt
;
15212 timevar_push (TV_TREE_GIMPLIFY
);
15214 init_tree_ssa (cfun
);
15216 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
15218 default_rtl_profile ();
15220 gcc_assert (gimplify_ctxp
== NULL
);
15221 push_gimplify_context (true);
15223 if (flag_openacc
|| flag_openmp
)
15225 gcc_assert (gimplify_omp_ctxp
== NULL
);
15226 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
15227 gimplify_omp_ctxp
= new_omp_context (ORT_IMPLICIT_TARGET
);
15230 /* Unshare most shared trees in the body and in that of any nested functions.
15231 It would seem we don't have to do this for nested functions because
15232 they are supposed to be output and then the outer function gimplified
15233 first, but the g++ front end doesn't always do it that way. */
15234 unshare_body (fndecl
);
15235 unvisit_body (fndecl
);
15237 /* Make sure input_location isn't set to something weird. */
15238 input_location
= DECL_SOURCE_LOCATION (fndecl
);
15240 /* Resolve callee-copies. This has to be done before processing
15241 the body so that DECL_VALUE_EXPR gets processed correctly. */
15242 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
15244 /* Gimplify the function's body. */
15246 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
15247 outer_stmt
= gimple_seq_first_nondebug_stmt (seq
);
15250 outer_stmt
= gimple_build_nop ();
15251 gimplify_seq_add_stmt (&seq
, outer_stmt
);
15254 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
15255 not the case, wrap everything in a GIMPLE_BIND to make it so. */
15256 if (gimple_code (outer_stmt
) == GIMPLE_BIND
15257 && (gimple_seq_first_nondebug_stmt (seq
)
15258 == gimple_seq_last_nondebug_stmt (seq
)))
15260 outer_bind
= as_a
<gbind
*> (outer_stmt
);
15261 if (gimple_seq_first_stmt (seq
) != outer_stmt
15262 || gimple_seq_last_stmt (seq
) != outer_stmt
)
15264 /* If there are debug stmts before or after outer_stmt, move them
15265 inside of outer_bind body. */
15266 gimple_stmt_iterator gsi
= gsi_for_stmt (outer_stmt
, &seq
);
15267 gimple_seq second_seq
= NULL
;
15268 if (gimple_seq_first_stmt (seq
) != outer_stmt
15269 && gimple_seq_last_stmt (seq
) != outer_stmt
)
15271 second_seq
= gsi_split_seq_after (gsi
);
15272 gsi_remove (&gsi
, false);
15274 else if (gimple_seq_first_stmt (seq
) != outer_stmt
)
15275 gsi_remove (&gsi
, false);
15278 gsi_remove (&gsi
, false);
15282 gimple_seq_add_seq_without_update (&seq
,
15283 gimple_bind_body (outer_bind
));
15284 gimple_seq_add_seq_without_update (&seq
, second_seq
);
15285 gimple_bind_set_body (outer_bind
, seq
);
15289 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
15291 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
15293 /* If we had callee-copies statements, insert them at the beginning
15294 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
15295 if (!gimple_seq_empty_p (parm_stmts
))
15299 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
15302 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
15303 GIMPLE_TRY_FINALLY
);
15305 gimple_seq_add_stmt (&parm_stmts
, g
);
15307 gimple_bind_set_body (outer_bind
, parm_stmts
);
15309 for (parm
= DECL_ARGUMENTS (current_function_decl
);
15310 parm
; parm
= DECL_CHAIN (parm
))
15311 if (DECL_HAS_VALUE_EXPR_P (parm
))
15313 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
15314 DECL_IGNORED_P (parm
) = 0;
15318 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
15319 && gimplify_omp_ctxp
)
15321 delete_omp_context (gimplify_omp_ctxp
);
15322 gimplify_omp_ctxp
= NULL
;
15325 pop_gimplify_context (outer_bind
);
15326 gcc_assert (gimplify_ctxp
== NULL
);
15328 if (flag_checking
&& !seen_error ())
15329 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
15331 timevar_pop (TV_TREE_GIMPLIFY
);
15332 input_location
= saved_location
;
15337 typedef char *char_p
; /* For DEF_VEC_P. */
15339 /* Return whether we should exclude FNDECL from instrumentation. */
15342 flag_instrument_functions_exclude_p (tree fndecl
)
15346 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
15347 if (v
&& v
->length () > 0)
15353 name
= lang_hooks
.decl_printable_name (fndecl
, 1);
15354 FOR_EACH_VEC_ELT (*v
, i
, s
)
15355 if (strstr (name
, s
) != NULL
)
15359 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
15360 if (v
&& v
->length () > 0)
15366 name
= DECL_SOURCE_FILE (fndecl
);
15367 FOR_EACH_VEC_ELT (*v
, i
, s
)
15368 if (strstr (name
, s
) != NULL
)
15375 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
15376 node for the function we want to gimplify.
15378 Return the sequence of GIMPLE statements corresponding to the body
15382 gimplify_function_tree (tree fndecl
)
15387 gcc_assert (!gimple_body (fndecl
));
15389 if (DECL_STRUCT_FUNCTION (fndecl
))
15390 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
15392 push_struct_function (fndecl
);
15394 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
15396 cfun
->curr_properties
|= PROP_gimple_lva
;
15398 if (asan_sanitize_use_after_scope ())
15399 asan_poisoned_variables
= new hash_set
<tree
> ();
15400 bind
= gimplify_body (fndecl
, true);
15401 if (asan_poisoned_variables
)
15403 delete asan_poisoned_variables
;
15404 asan_poisoned_variables
= NULL
;
15407 /* The tree body of the function is no longer needed, replace it
15408 with the new GIMPLE body. */
15410 gimple_seq_add_stmt (&seq
, bind
);
15411 gimple_set_body (fndecl
, seq
);
15413 /* If we're instrumenting function entry/exit, then prepend the call to
15414 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
15415 catch the exit hook. */
15416 /* ??? Add some way to ignore exceptions for this TFE. */
15417 if (flag_instrument_function_entry_exit
15418 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
15419 /* Do not instrument extern inline functions. */
15420 && !(DECL_DECLARED_INLINE_P (fndecl
)
15421 && DECL_EXTERNAL (fndecl
)
15422 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
15423 && !flag_instrument_functions_exclude_p (fndecl
))
15428 gimple_seq cleanup
= NULL
, body
= NULL
;
15429 tree tmp_var
, this_fn_addr
;
15432 /* The instrumentation hooks aren't going to call the instrumented
15433 function and the address they receive is expected to be matchable
15434 against symbol addresses. Make sure we don't create a trampoline,
15435 in case the current function is nested. */
15436 this_fn_addr
= build_fold_addr_expr (current_function_decl
);
15437 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
15439 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
15440 call
= gimple_build_call (x
, 1, integer_zero_node
);
15441 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
15442 gimple_call_set_lhs (call
, tmp_var
);
15443 gimplify_seq_add_stmt (&cleanup
, call
);
15444 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT
);
15445 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
15446 gimplify_seq_add_stmt (&cleanup
, call
);
15447 tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
15449 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
15450 call
= gimple_build_call (x
, 1, integer_zero_node
);
15451 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
15452 gimple_call_set_lhs (call
, tmp_var
);
15453 gimplify_seq_add_stmt (&body
, call
);
15454 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER
);
15455 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
15456 gimplify_seq_add_stmt (&body
, call
);
15457 gimplify_seq_add_stmt (&body
, tf
);
15458 new_bind
= gimple_build_bind (NULL
, body
, NULL
);
15460 /* Replace the current function body with the body
15461 wrapped in the try/finally TF. */
15463 gimple_seq_add_stmt (&seq
, new_bind
);
15464 gimple_set_body (fndecl
, seq
);
15468 if (sanitize_flags_p (SANITIZE_THREAD
)
15469 && param_tsan_instrument_func_entry_exit
)
15471 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
15472 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
15473 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
15474 /* Replace the current function body with the body
15475 wrapped in the try/finally TF. */
15477 gimple_seq_add_stmt (&seq
, new_bind
);
15478 gimple_set_body (fndecl
, seq
);
15481 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
15482 cfun
->curr_properties
|= PROP_gimple_any
;
15486 dump_function (TDI_gimple
, fndecl
);
15489 /* Return a dummy expression of type TYPE in order to keep going after an
15493 dummy_object (tree type
)
15495 tree t
= build_int_cst (build_pointer_type (type
), 0);
15496 return build2 (MEM_REF
, type
, t
, t
);
15499 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15500 builtin function, but a very special sort of operator. */
15502 enum gimplify_status
15503 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
15504 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
15506 tree promoted_type
, have_va_type
;
15507 tree valist
= TREE_OPERAND (*expr_p
, 0);
15508 tree type
= TREE_TYPE (*expr_p
);
15509 tree t
, tag
, aptag
;
15510 location_t loc
= EXPR_LOCATION (*expr_p
);
15512 /* Verify that valist is of the proper type. */
15513 have_va_type
= TREE_TYPE (valist
);
15514 if (have_va_type
== error_mark_node
)
15516 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
15517 if (have_va_type
== NULL_TREE
15518 && POINTER_TYPE_P (TREE_TYPE (valist
)))
15519 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15521 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
15522 gcc_assert (have_va_type
!= NULL_TREE
);
15524 /* Generate a diagnostic for requesting data of a type that cannot
15525 be passed through `...' due to type promotion at the call site. */
15526 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
15529 static bool gave_help
;
15531 /* Use the expansion point to handle cases such as passing bool (defined
15532 in a system header) through `...'. */
15534 = expansion_point_location_if_in_system_header (loc
);
15536 /* Unfortunately, this is merely undefined, rather than a constraint
15537 violation, so we cannot make this an error. If this call is never
15538 executed, the program is still strictly conforming. */
15539 auto_diagnostic_group d
;
15540 warned
= warning_at (xloc
, 0,
15541 "%qT is promoted to %qT when passed through %<...%>",
15542 type
, promoted_type
);
15543 if (!gave_help
&& warned
)
15546 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
15547 promoted_type
, type
);
15550 /* We can, however, treat "undefined" any way we please.
15551 Call abort to encourage the user to fix the program. */
15553 inform (xloc
, "if this code is reached, the program will abort");
15554 /* Before the abort, allow the evaluation of the va_list
15555 expression to exit or longjmp. */
15556 gimplify_and_add (valist
, pre_p
);
15557 t
= build_call_expr_loc (loc
,
15558 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
15559 gimplify_and_add (t
, pre_p
);
15561 /* This is dead code, but go ahead and finish so that the
15562 mode of the result comes out right. */
15563 *expr_p
= dummy_object (type
);
15564 return GS_ALL_DONE
;
15567 tag
= build_int_cst (build_pointer_type (type
), 0);
15568 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
15570 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
15571 valist
, tag
, aptag
);
15573 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15574 needs to be expanded. */
15575 cfun
->curr_properties
&= ~PROP_gimple_lva
;
15580 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15582 DST/SRC are the destination and source respectively. You can pass
15583 ungimplified trees in DST or SRC, in which case they will be
15584 converted to a gimple operand if necessary.
15586 This function returns the newly created GIMPLE_ASSIGN tuple. */
15589 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
15591 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
15592 gimplify_and_add (t
, seq_p
);
15594 return gimple_seq_last_stmt (*seq_p
);
15598 gimplify_hasher::hash (const elt_t
*p
)
15601 return iterative_hash_expr (t
, 0);
15605 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
15609 enum tree_code code
= TREE_CODE (t1
);
15611 if (TREE_CODE (t2
) != code
15612 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
15615 if (!operand_equal_p (t1
, t2
, 0))
15618 /* Only allow them to compare equal if they also hash equal; otherwise
15619 results are nondeterminate, and we fail bootstrap comparison. */
15620 gcc_checking_assert (hash (p1
) == hash (p2
));