1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
135 /* True if this parallel directive is nested within another. */
138 /* True if this construct can be cancelled. */
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
143 bool combined_into_simd_safelen0
;
146 static splay_tree all_contexts
;
147 static int taskreg_nesting_level
;
148 static int target_nesting_level
;
149 static bitmap task_shared_vars
;
150 static vec
<omp_context
*> taskreg_contexts
;
152 static void scan_omp (gimple_seq
*, omp_context
*);
153 static tree
scan_omp_1_op (tree
*, int *, void *);
155 #define WALK_SUBSTMTS \
159 case GIMPLE_EH_FILTER: \
160 case GIMPLE_TRANSACTION: \
161 /* The sub-statements for these should be walked. */ \
162 *handled_ops_p = false; \
165 /* Return true if CTX corresponds to an oacc parallel region. */
168 is_oacc_parallel (omp_context
*ctx
)
170 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
171 return ((outer_type
== GIMPLE_OMP_TARGET
)
172 && (gimple_omp_target_kind (ctx
->stmt
)
173 == GF_OMP_TARGET_KIND_OACC_PARALLEL
));
176 /* Return true if CTX corresponds to an oacc kernels region. */
179 is_oacc_kernels (omp_context
*ctx
)
181 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
182 return ((outer_type
== GIMPLE_OMP_TARGET
)
183 && (gimple_omp_target_kind (ctx
->stmt
)
184 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
187 /* If DECL is the artificial dummy VAR_DECL created for non-static
188 data member privatization, return the underlying "this" parameter,
189 otherwise return NULL. */
192 omp_member_access_dummy_var (tree decl
)
195 || !DECL_ARTIFICIAL (decl
)
196 || !DECL_IGNORED_P (decl
)
197 || !DECL_HAS_VALUE_EXPR_P (decl
)
198 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
201 tree v
= DECL_VALUE_EXPR (decl
);
202 if (TREE_CODE (v
) != COMPONENT_REF
)
206 switch (TREE_CODE (v
))
212 case POINTER_PLUS_EXPR
:
213 v
= TREE_OPERAND (v
, 0);
216 if (DECL_CONTEXT (v
) == current_function_decl
217 && DECL_ARTIFICIAL (v
)
218 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
226 /* Helper for unshare_and_remap, called through walk_tree. */
229 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
231 tree
*pair
= (tree
*) data
;
234 *tp
= unshare_expr (pair
[1]);
237 else if (IS_TYPE_OR_DECL_P (*tp
))
242 /* Return unshare_expr (X) with all occurrences of FROM
246 unshare_and_remap (tree x
, tree from
, tree to
)
248 tree pair
[2] = { from
, to
};
249 x
= unshare_expr (x
);
250 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
254 /* Convenience function for calling scan_omp_1_op on tree operands. */
257 scan_omp_op (tree
*tp
, omp_context
*ctx
)
259 struct walk_stmt_info wi
;
261 memset (&wi
, 0, sizeof (wi
));
263 wi
.want_locations
= true;
265 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
268 static void lower_omp (gimple_seq
*, omp_context
*);
269 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
270 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
272 /* Return true if CTX is for an omp parallel. */
275 is_parallel_ctx (omp_context
*ctx
)
277 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
281 /* Return true if CTX is for an omp task. */
284 is_task_ctx (omp_context
*ctx
)
286 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
290 /* Return true if CTX is for an omp taskloop. */
293 is_taskloop_ctx (omp_context
*ctx
)
295 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
296 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
300 /* Return true if CTX is for a host omp teams. */
303 is_host_teams_ctx (omp_context
*ctx
)
305 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
306 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
309 /* Return true if CTX is for an omp parallel or omp task or host omp teams
310 (the last one is strictly not a task region in OpenMP speak, but we
311 need to treat it similarly). */
314 is_taskreg_ctx (omp_context
*ctx
)
316 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
319 /* Return true if EXPR is variable sized. */
322 is_variable_sized (const_tree expr
)
324 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
327 /* Lookup variables. The "maybe" form
328 allows for the variable form to not have been entered, otherwise we
329 assert that the variable must have been entered. */
332 lookup_decl (tree var
, omp_context
*ctx
)
334 tree
*n
= ctx
->cb
.decl_map
->get (var
);
339 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
341 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
342 return n
? *n
: NULL_TREE
;
346 lookup_field (tree var
, omp_context
*ctx
)
349 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
350 return (tree
) n
->value
;
354 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
357 n
= splay_tree_lookup (ctx
->sfield_map
358 ? ctx
->sfield_map
: ctx
->field_map
, key
);
359 return (tree
) n
->value
;
363 lookup_sfield (tree var
, omp_context
*ctx
)
365 return lookup_sfield ((splay_tree_key
) var
, ctx
);
369 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
372 n
= splay_tree_lookup (ctx
->field_map
, key
);
373 return n
? (tree
) n
->value
: NULL_TREE
;
377 maybe_lookup_field (tree var
, omp_context
*ctx
)
379 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
382 /* Return true if DECL should be copied by pointer. SHARED_CTX is
383 the parallel context if DECL is to be shared. */
386 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
388 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
389 || TYPE_ATOMIC (TREE_TYPE (decl
)))
392 /* We can only use copy-in/copy-out semantics for shared variables
393 when we know the value is not accessible from an outer scope. */
396 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
398 /* ??? Trivially accessible from anywhere. But why would we even
399 be passing an address in this case? Should we simply assert
400 this to be false, or should we have a cleanup pass that removes
401 these from the list of mappings? */
402 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
405 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
406 without analyzing the expression whether or not its location
407 is accessible to anyone else. In the case of nested parallel
408 regions it certainly may be. */
409 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
412 /* Do not use copy-in/copy-out for variables that have their
414 if (TREE_ADDRESSABLE (decl
))
417 /* lower_send_shared_vars only uses copy-in, but not copy-out
419 if (TREE_READONLY (decl
)
420 || ((TREE_CODE (decl
) == RESULT_DECL
421 || TREE_CODE (decl
) == PARM_DECL
)
422 && DECL_BY_REFERENCE (decl
)))
425 /* Disallow copy-in/out in nested parallel if
426 decl is shared in outer parallel, otherwise
427 each thread could store the shared variable
428 in its own copy-in location, making the
429 variable no longer really shared. */
430 if (shared_ctx
->is_nested
)
434 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
435 if (is_taskreg_ctx (up
) && maybe_lookup_decl (decl
, up
))
442 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
443 c
; c
= OMP_CLAUSE_CHAIN (c
))
444 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
445 && OMP_CLAUSE_DECL (c
) == decl
)
449 goto maybe_mark_addressable_and_ret
;
453 /* For tasks avoid using copy-in/out. As tasks can be
454 deferred or executed in different thread, when GOMP_task
455 returns, the task hasn't necessarily terminated. */
456 if (is_task_ctx (shared_ctx
))
459 maybe_mark_addressable_and_ret
:
460 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
461 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
463 /* Taking address of OUTER in lower_send_shared_vars
464 might need regimplification of everything that uses the
466 if (!task_shared_vars
)
467 task_shared_vars
= BITMAP_ALLOC (NULL
);
468 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
469 TREE_ADDRESSABLE (outer
) = 1;
478 /* Construct a new automatic decl similar to VAR. */
481 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
483 tree copy
= copy_var_decl (var
, name
, type
);
485 DECL_CONTEXT (copy
) = current_function_decl
;
486 DECL_CHAIN (copy
) = ctx
->block_vars
;
487 /* If VAR is listed in task_shared_vars, it means it wasn't
488 originally addressable and is just because task needs to take
489 it's address. But we don't need to take address of privatizations
491 if (TREE_ADDRESSABLE (var
)
493 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
494 TREE_ADDRESSABLE (copy
) = 0;
495 ctx
->block_vars
= copy
;
501 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
503 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
506 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
509 omp_build_component_ref (tree obj
, tree field
)
511 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
512 if (TREE_THIS_VOLATILE (field
))
513 TREE_THIS_VOLATILE (ret
) |= 1;
514 if (TREE_READONLY (field
))
515 TREE_READONLY (ret
) |= 1;
519 /* Build tree nodes to access the field for VAR on the receiver side. */
522 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
524 tree x
, field
= lookup_field (var
, ctx
);
526 /* If the receiver record type was remapped in the child function,
527 remap the field into the new record type. */
528 x
= maybe_lookup_field (field
, ctx
);
532 x
= build_simple_mem_ref (ctx
->receiver_decl
);
533 TREE_THIS_NOTRAP (x
) = 1;
534 x
= omp_build_component_ref (x
, field
);
537 x
= build_simple_mem_ref (x
);
538 TREE_THIS_NOTRAP (x
) = 1;
544 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
545 of a parallel, this is a component reference; for workshare constructs
546 this is some variable. */
549 build_outer_var_ref (tree var
, omp_context
*ctx
,
550 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
553 omp_context
*outer
= ctx
->outer
;
554 while (outer
&& gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
555 outer
= outer
->outer
;
557 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
559 else if (is_variable_sized (var
))
561 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
562 x
= build_outer_var_ref (x
, ctx
, code
);
563 x
= build_simple_mem_ref (x
);
565 else if (is_taskreg_ctx (ctx
))
567 bool by_ref
= use_pointer_for_field (var
, NULL
);
568 x
= build_receiver_ref (var
, by_ref
, ctx
);
570 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
571 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
572 || (code
== OMP_CLAUSE_PRIVATE
573 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
574 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
575 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
577 /* #pragma omp simd isn't a worksharing construct, and can reference
578 even private vars in its linear etc. clauses.
579 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
580 to private vars in all worksharing constructs. */
582 if (outer
&& is_taskreg_ctx (outer
))
583 x
= lookup_decl (var
, outer
);
585 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
589 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
593 = splay_tree_lookup (outer
->field_map
,
594 (splay_tree_key
) &DECL_UID (var
));
597 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
600 x
= lookup_decl (var
, outer
);
604 tree field
= (tree
) n
->value
;
605 /* If the receiver record type was remapped in the child function,
606 remap the field into the new record type. */
607 x
= maybe_lookup_field (field
, outer
);
611 x
= build_simple_mem_ref (outer
->receiver_decl
);
612 x
= omp_build_component_ref (x
, field
);
613 if (use_pointer_for_field (var
, outer
))
614 x
= build_simple_mem_ref (x
);
619 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_GRID_BODY
)
621 outer
= outer
->outer
;
623 && gimple_code (outer
->stmt
) != GIMPLE_OMP_GRID_BODY
);
625 x
= lookup_decl (var
, outer
);
627 else if (omp_is_reference (var
))
628 /* This can happen with orphaned constructs. If var is reference, it is
629 possible it is shared and as such valid. */
631 else if (omp_member_access_dummy_var (var
))
638 tree t
= omp_member_access_dummy_var (var
);
641 x
= DECL_VALUE_EXPR (var
);
642 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
644 x
= unshare_and_remap (x
, t
, o
);
646 x
= unshare_expr (x
);
650 if (omp_is_reference (var
))
651 x
= build_simple_mem_ref (x
);
656 /* Build tree nodes to access the field for VAR on the sender side. */
659 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
661 tree field
= lookup_sfield (key
, ctx
);
662 return omp_build_component_ref (ctx
->sender_decl
, field
);
666 build_sender_ref (tree var
, omp_context
*ctx
)
668 return build_sender_ref ((splay_tree_key
) var
, ctx
);
671 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
672 BASE_POINTERS_RESTRICT, declare the field with restrict. */
675 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
677 tree field
, type
, sfield
= NULL_TREE
;
678 splay_tree_key key
= (splay_tree_key
) var
;
682 key
= (splay_tree_key
) &DECL_UID (var
);
683 gcc_checking_assert (key
!= (splay_tree_key
) var
);
685 gcc_assert ((mask
& 1) == 0
686 || !splay_tree_lookup (ctx
->field_map
, key
));
687 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
688 || !splay_tree_lookup (ctx
->sfield_map
, key
));
689 gcc_assert ((mask
& 3) == 3
690 || !is_gimple_omp_oacc (ctx
->stmt
));
692 type
= TREE_TYPE (var
);
693 /* Prevent redeclaring the var in the split-off function with a restrict
694 pointer type. Note that we only clear type itself, restrict qualifiers in
695 the pointed-to type will be ignored by points-to analysis. */
696 if (POINTER_TYPE_P (type
)
697 && TYPE_RESTRICT (type
))
698 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
702 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
703 type
= build_pointer_type (build_pointer_type (type
));
706 type
= build_pointer_type (type
);
707 else if ((mask
& 3) == 1 && omp_is_reference (var
))
708 type
= TREE_TYPE (type
);
710 field
= build_decl (DECL_SOURCE_LOCATION (var
),
711 FIELD_DECL
, DECL_NAME (var
), type
);
713 /* Remember what variable this field was created for. This does have a
714 side effect of making dwarf2out ignore this member, so for helpful
715 debugging we clear it later in delete_omp_context. */
716 DECL_ABSTRACT_ORIGIN (field
) = var
;
717 if (type
== TREE_TYPE (var
))
719 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
720 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
721 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
724 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
728 insert_field_into_struct (ctx
->record_type
, field
);
729 if (ctx
->srecord_type
)
731 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
732 FIELD_DECL
, DECL_NAME (var
), type
);
733 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
734 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
735 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
736 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
737 insert_field_into_struct (ctx
->srecord_type
, sfield
);
742 if (ctx
->srecord_type
== NULL_TREE
)
746 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
747 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
748 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
750 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
751 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
752 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
753 insert_field_into_struct (ctx
->srecord_type
, sfield
);
754 splay_tree_insert (ctx
->sfield_map
,
755 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
756 (splay_tree_value
) sfield
);
760 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
761 : ctx
->srecord_type
, field
);
765 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
766 if ((mask
& 2) && ctx
->sfield_map
)
767 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
771 install_var_local (tree var
, omp_context
*ctx
)
773 tree new_var
= omp_copy_decl_1 (var
, ctx
);
774 insert_decl_map (&ctx
->cb
, var
, new_var
);
778 /* Adjust the replacement for DECL in CTX for the new context. This means
779 copying the DECL_VALUE_EXPR, and fixing up the type. */
782 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
786 new_decl
= lookup_decl (decl
, ctx
);
788 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
790 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
791 && DECL_HAS_VALUE_EXPR_P (decl
))
793 tree ve
= DECL_VALUE_EXPR (decl
);
794 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
795 SET_DECL_VALUE_EXPR (new_decl
, ve
);
796 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
799 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
801 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
802 if (size
== error_mark_node
)
803 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
804 DECL_SIZE (new_decl
) = size
;
806 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
807 if (size
== error_mark_node
)
808 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
809 DECL_SIZE_UNIT (new_decl
) = size
;
813 /* The callback for remap_decl. Search all containing contexts for a
814 mapping of the variable; this avoids having to duplicate the splay
815 tree ahead of time. We know a mapping doesn't already exist in the
816 given context. Create new mappings to implement default semantics. */
819 omp_copy_decl (tree var
, copy_body_data
*cb
)
821 omp_context
*ctx
= (omp_context
*) cb
;
824 if (TREE_CODE (var
) == LABEL_DECL
)
826 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
828 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
829 DECL_CONTEXT (new_var
) = current_function_decl
;
830 insert_decl_map (&ctx
->cb
, var
, new_var
);
834 while (!is_taskreg_ctx (ctx
))
839 new_var
= maybe_lookup_decl (var
, ctx
);
844 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
847 return error_mark_node
;
850 /* Create a new context, with OUTER_CTX being the surrounding context. */
853 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
855 omp_context
*ctx
= XCNEW (omp_context
);
857 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
858 (splay_tree_value
) ctx
);
863 ctx
->outer
= outer_ctx
;
864 ctx
->cb
= outer_ctx
->cb
;
865 ctx
->cb
.block
= NULL
;
866 ctx
->depth
= outer_ctx
->depth
+ 1;
870 ctx
->cb
.src_fn
= current_function_decl
;
871 ctx
->cb
.dst_fn
= current_function_decl
;
872 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
873 gcc_checking_assert (ctx
->cb
.src_node
);
874 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
875 ctx
->cb
.src_cfun
= cfun
;
876 ctx
->cb
.copy_decl
= omp_copy_decl
;
877 ctx
->cb
.eh_lp_nr
= 0;
878 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
879 ctx
->cb
.adjust_array_error_bounds
= true;
880 ctx
->cb
.dont_remap_vla_if_no_change
= true;
884 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
889 static gimple_seq
maybe_catch_exception (gimple_seq
);
891 /* Finalize task copyfn. */
894 finalize_task_copyfn (gomp_task
*task_stmt
)
896 struct function
*child_cfun
;
898 gimple_seq seq
= NULL
, new_seq
;
901 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
902 if (child_fn
== NULL_TREE
)
905 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
906 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
908 push_cfun (child_cfun
);
909 bind
= gimplify_body (child_fn
, false);
910 gimple_seq_add_stmt (&seq
, bind
);
911 new_seq
= maybe_catch_exception (seq
);
914 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
916 gimple_seq_add_stmt (&seq
, bind
);
918 gimple_set_body (child_fn
, seq
);
921 /* Inform the callgraph about the new function. */
922 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
923 node
->parallelized_function
= 1;
924 cgraph_node::add_new_function (child_fn
, false);
927 /* Destroy a omp_context data structures. Called through the splay tree
928 value delete callback. */
931 delete_omp_context (splay_tree_value value
)
933 omp_context
*ctx
= (omp_context
*) value
;
935 delete ctx
->cb
.decl_map
;
938 splay_tree_delete (ctx
->field_map
);
940 splay_tree_delete (ctx
->sfield_map
);
942 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
943 it produces corrupt debug information. */
944 if (ctx
->record_type
)
947 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
948 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
950 if (ctx
->srecord_type
)
953 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
954 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
957 if (is_task_ctx (ctx
))
958 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
960 if (ctx
->task_reduction_map
)
962 ctx
->task_reductions
.release ();
963 delete ctx
->task_reduction_map
;
966 delete ctx
->lastprivate_conditional_map
;
971 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
975 fixup_child_record_type (omp_context
*ctx
)
977 tree f
, type
= ctx
->record_type
;
979 if (!ctx
->receiver_decl
)
981 /* ??? It isn't sufficient to just call remap_type here, because
982 variably_modified_type_p doesn't work the way we expect for
983 record types. Testing each field for whether it needs remapping
984 and creating a new record by hand works, however. */
985 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
986 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
990 tree name
, new_fields
= NULL
;
992 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
993 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
994 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
995 TYPE_DECL
, name
, type
);
996 TYPE_NAME (type
) = name
;
998 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1000 tree new_f
= copy_node (f
);
1001 DECL_CONTEXT (new_f
) = type
;
1002 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1003 DECL_CHAIN (new_f
) = new_fields
;
1004 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1005 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1007 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1011 /* Arrange to be able to look up the receiver field
1012 given the sender field. */
1013 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1014 (splay_tree_value
) new_f
);
1016 TYPE_FIELDS (type
) = nreverse (new_fields
);
1020 /* In a target region we never modify any of the pointers in *.omp_data_i,
1021 so attempt to help the optimizers. */
1022 if (is_gimple_omp_offloaded (ctx
->stmt
))
1023 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1025 TREE_TYPE (ctx
->receiver_decl
)
1026 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1029 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1030 specified by CLAUSES. */
1033 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1036 bool scan_array_reductions
= false;
1038 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1042 switch (OMP_CLAUSE_CODE (c
))
1044 case OMP_CLAUSE_PRIVATE
:
1045 decl
= OMP_CLAUSE_DECL (c
);
1046 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1048 else if (!is_variable_sized (decl
))
1049 install_var_local (decl
, ctx
);
1052 case OMP_CLAUSE_SHARED
:
1053 decl
= OMP_CLAUSE_DECL (c
);
1054 /* Ignore shared directives in teams construct inside of
1055 target construct. */
1056 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1057 && !is_host_teams_ctx (ctx
))
1059 /* Global variables don't need to be copied,
1060 the receiver side will use them directly. */
1061 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1062 if (is_global_var (odecl
))
1064 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1067 gcc_assert (is_taskreg_ctx (ctx
));
1068 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1069 || !is_variable_sized (decl
));
1070 /* Global variables don't need to be copied,
1071 the receiver side will use them directly. */
1072 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1074 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1076 use_pointer_for_field (decl
, ctx
);
1079 by_ref
= use_pointer_for_field (decl
, NULL
);
1080 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1081 || TREE_ADDRESSABLE (decl
)
1083 || omp_is_reference (decl
))
1085 by_ref
= use_pointer_for_field (decl
, ctx
);
1086 install_var_field (decl
, by_ref
, 3, ctx
);
1087 install_var_local (decl
, ctx
);
1090 /* We don't need to copy const scalar vars back. */
1091 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1094 case OMP_CLAUSE_REDUCTION
:
1095 case OMP_CLAUSE_IN_REDUCTION
:
1096 decl
= OMP_CLAUSE_DECL (c
);
1097 if (TREE_CODE (decl
) == MEM_REF
)
1099 tree t
= TREE_OPERAND (decl
, 0);
1100 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1101 t
= TREE_OPERAND (t
, 0);
1102 if (TREE_CODE (t
) == INDIRECT_REF
1103 || TREE_CODE (t
) == ADDR_EXPR
)
1104 t
= TREE_OPERAND (t
, 0);
1105 install_var_local (t
, ctx
);
1106 if (is_taskreg_ctx (ctx
)
1107 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1108 || (is_task_ctx (ctx
)
1109 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1110 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1111 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1112 == POINTER_TYPE
)))))
1113 && !is_variable_sized (t
)
1114 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1115 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1116 && !is_task_ctx (ctx
))))
1118 by_ref
= use_pointer_for_field (t
, NULL
);
1119 if (is_task_ctx (ctx
)
1120 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1121 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1123 install_var_field (t
, false, 1, ctx
);
1124 install_var_field (t
, by_ref
, 2, ctx
);
1127 install_var_field (t
, by_ref
, 3, ctx
);
1131 if (is_task_ctx (ctx
)
1132 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1133 && OMP_CLAUSE_REDUCTION_TASK (c
)
1134 && is_parallel_ctx (ctx
)))
1136 /* Global variables don't need to be copied,
1137 the receiver side will use them directly. */
1138 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1140 by_ref
= use_pointer_for_field (decl
, ctx
);
1141 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1142 install_var_field (decl
, by_ref
, 3, ctx
);
1144 install_var_local (decl
, ctx
);
1147 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1148 && OMP_CLAUSE_REDUCTION_TASK (c
))
1150 install_var_local (decl
, ctx
);
1155 case OMP_CLAUSE_LASTPRIVATE
:
1156 /* Let the corresponding firstprivate clause create
1158 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1162 case OMP_CLAUSE_FIRSTPRIVATE
:
1163 case OMP_CLAUSE_LINEAR
:
1164 decl
= OMP_CLAUSE_DECL (c
);
1166 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1167 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1168 && is_gimple_omp_offloaded (ctx
->stmt
))
1170 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1171 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1172 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1173 install_var_field (decl
, true, 3, ctx
);
1175 install_var_field (decl
, false, 3, ctx
);
1177 if (is_variable_sized (decl
))
1179 if (is_task_ctx (ctx
))
1180 install_var_field (decl
, false, 1, ctx
);
1183 else if (is_taskreg_ctx (ctx
))
1186 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1187 by_ref
= use_pointer_for_field (decl
, NULL
);
1189 if (is_task_ctx (ctx
)
1190 && (global
|| by_ref
|| omp_is_reference (decl
)))
1192 install_var_field (decl
, false, 1, ctx
);
1194 install_var_field (decl
, by_ref
, 2, ctx
);
1197 install_var_field (decl
, by_ref
, 3, ctx
);
1199 install_var_local (decl
, ctx
);
1202 case OMP_CLAUSE_USE_DEVICE_PTR
:
1203 decl
= OMP_CLAUSE_DECL (c
);
1204 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1205 install_var_field (decl
, true, 3, ctx
);
1207 install_var_field (decl
, false, 3, ctx
);
1208 if (DECL_SIZE (decl
)
1209 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1211 tree decl2
= DECL_VALUE_EXPR (decl
);
1212 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1213 decl2
= TREE_OPERAND (decl2
, 0);
1214 gcc_assert (DECL_P (decl2
));
1215 install_var_local (decl2
, ctx
);
1217 install_var_local (decl
, ctx
);
1220 case OMP_CLAUSE_IS_DEVICE_PTR
:
1221 decl
= OMP_CLAUSE_DECL (c
);
1224 case OMP_CLAUSE__LOOPTEMP_
:
1225 case OMP_CLAUSE__REDUCTEMP_
:
1226 gcc_assert (is_taskreg_ctx (ctx
));
1227 decl
= OMP_CLAUSE_DECL (c
);
1228 install_var_field (decl
, false, 3, ctx
);
1229 install_var_local (decl
, ctx
);
1232 case OMP_CLAUSE_COPYPRIVATE
:
1233 case OMP_CLAUSE_COPYIN
:
1234 decl
= OMP_CLAUSE_DECL (c
);
1235 by_ref
= use_pointer_for_field (decl
, NULL
);
1236 install_var_field (decl
, by_ref
, 3, ctx
);
1239 case OMP_CLAUSE_FINAL
:
1241 case OMP_CLAUSE_NUM_THREADS
:
1242 case OMP_CLAUSE_NUM_TEAMS
:
1243 case OMP_CLAUSE_THREAD_LIMIT
:
1244 case OMP_CLAUSE_DEVICE
:
1245 case OMP_CLAUSE_SCHEDULE
:
1246 case OMP_CLAUSE_DIST_SCHEDULE
:
1247 case OMP_CLAUSE_DEPEND
:
1248 case OMP_CLAUSE_PRIORITY
:
1249 case OMP_CLAUSE_GRAINSIZE
:
1250 case OMP_CLAUSE_NUM_TASKS
:
1251 case OMP_CLAUSE_NUM_GANGS
:
1252 case OMP_CLAUSE_NUM_WORKERS
:
1253 case OMP_CLAUSE_VECTOR_LENGTH
:
1255 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1259 case OMP_CLAUSE_FROM
:
1260 case OMP_CLAUSE_MAP
:
1262 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1263 decl
= OMP_CLAUSE_DECL (c
);
1264 /* Global variables with "omp declare target" attribute
1265 don't need to be copied, the receiver side will use them
1266 directly. However, global variables with "omp declare target link"
1267 attribute need to be copied. Or when ALWAYS modifier is used. */
1268 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1270 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1271 && (OMP_CLAUSE_MAP_KIND (c
)
1272 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1273 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1274 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1275 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1276 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1277 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1278 && varpool_node::get_create (decl
)->offloadable
1279 && !lookup_attribute ("omp declare target link",
1280 DECL_ATTRIBUTES (decl
)))
1282 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1283 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1285 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1286 not offloaded; there is nothing to map for those. */
1287 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1288 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1289 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1292 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1293 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1294 || (OMP_CLAUSE_MAP_KIND (c
)
1295 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1297 if (TREE_CODE (decl
) == COMPONENT_REF
1298 || (TREE_CODE (decl
) == INDIRECT_REF
1299 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1300 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1301 == REFERENCE_TYPE
)))
1303 if (DECL_SIZE (decl
)
1304 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1306 tree decl2
= DECL_VALUE_EXPR (decl
);
1307 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1308 decl2
= TREE_OPERAND (decl2
, 0);
1309 gcc_assert (DECL_P (decl2
));
1310 install_var_local (decl2
, ctx
);
1312 install_var_local (decl
, ctx
);
1317 if (DECL_SIZE (decl
)
1318 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1320 tree decl2
= DECL_VALUE_EXPR (decl
);
1321 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1322 decl2
= TREE_OPERAND (decl2
, 0);
1323 gcc_assert (DECL_P (decl2
));
1324 install_var_field (decl2
, true, 3, ctx
);
1325 install_var_local (decl2
, ctx
);
1326 install_var_local (decl
, ctx
);
1330 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1331 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1332 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1333 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1334 install_var_field (decl
, true, 7, ctx
);
1336 install_var_field (decl
, true, 3, ctx
);
1337 if (is_gimple_omp_offloaded (ctx
->stmt
)
1338 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1339 install_var_local (decl
, ctx
);
1344 tree base
= get_base_address (decl
);
1345 tree nc
= OMP_CLAUSE_CHAIN (c
);
1348 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1349 && OMP_CLAUSE_DECL (nc
) == base
1350 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1351 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1353 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1354 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1360 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1361 decl
= OMP_CLAUSE_DECL (c
);
1363 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1364 (splay_tree_key
) decl
));
1366 = build_decl (OMP_CLAUSE_LOCATION (c
),
1367 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1368 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1369 insert_field_into_struct (ctx
->record_type
, field
);
1370 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1371 (splay_tree_value
) field
);
1376 case OMP_CLAUSE__GRIDDIM_
:
1379 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c
), ctx
->outer
);
1380 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c
), ctx
->outer
);
1384 case OMP_CLAUSE_NOWAIT
:
1385 case OMP_CLAUSE_ORDERED
:
1386 case OMP_CLAUSE_COLLAPSE
:
1387 case OMP_CLAUSE_UNTIED
:
1388 case OMP_CLAUSE_MERGEABLE
:
1389 case OMP_CLAUSE_PROC_BIND
:
1390 case OMP_CLAUSE_SAFELEN
:
1391 case OMP_CLAUSE_SIMDLEN
:
1392 case OMP_CLAUSE_THREADS
:
1393 case OMP_CLAUSE_SIMD
:
1394 case OMP_CLAUSE_NOGROUP
:
1395 case OMP_CLAUSE_DEFAULTMAP
:
1396 case OMP_CLAUSE_ASYNC
:
1397 case OMP_CLAUSE_WAIT
:
1398 case OMP_CLAUSE_GANG
:
1399 case OMP_CLAUSE_WORKER
:
1400 case OMP_CLAUSE_VECTOR
:
1401 case OMP_CLAUSE_INDEPENDENT
:
1402 case OMP_CLAUSE_AUTO
:
1403 case OMP_CLAUSE_SEQ
:
1404 case OMP_CLAUSE_TILE
:
1405 case OMP_CLAUSE__SIMT_
:
1406 case OMP_CLAUSE_DEFAULT
:
1407 case OMP_CLAUSE_NONTEMPORAL
:
1408 case OMP_CLAUSE_IF_PRESENT
:
1409 case OMP_CLAUSE_FINALIZE
:
1410 case OMP_CLAUSE_TASK_REDUCTION
:
1413 case OMP_CLAUSE_ALIGNED
:
1414 decl
= OMP_CLAUSE_DECL (c
);
1415 if (is_global_var (decl
)
1416 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1417 install_var_local (decl
, ctx
);
1420 case OMP_CLAUSE__CONDTEMP_
:
1421 decl
= OMP_CLAUSE_DECL (c
);
1422 if (is_parallel_ctx (ctx
))
1424 install_var_field (decl
, false, 3, ctx
);
1425 install_var_local (decl
, ctx
);
1427 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1428 && (gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
1429 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1430 install_var_local (decl
, ctx
);
1433 case OMP_CLAUSE__CACHE_
:
1439 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1441 switch (OMP_CLAUSE_CODE (c
))
1443 case OMP_CLAUSE_LASTPRIVATE
:
1444 /* Let the corresponding firstprivate clause create
1446 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1447 scan_array_reductions
= true;
1448 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1452 case OMP_CLAUSE_FIRSTPRIVATE
:
1453 case OMP_CLAUSE_PRIVATE
:
1454 case OMP_CLAUSE_LINEAR
:
1455 case OMP_CLAUSE_IS_DEVICE_PTR
:
1456 decl
= OMP_CLAUSE_DECL (c
);
1457 if (is_variable_sized (decl
))
1459 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1460 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1461 && is_gimple_omp_offloaded (ctx
->stmt
))
1463 tree decl2
= DECL_VALUE_EXPR (decl
);
1464 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1465 decl2
= TREE_OPERAND (decl2
, 0);
1466 gcc_assert (DECL_P (decl2
));
1467 install_var_local (decl2
, ctx
);
1468 fixup_remapped_decl (decl2
, ctx
, false);
1470 install_var_local (decl
, ctx
);
1472 fixup_remapped_decl (decl
, ctx
,
1473 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1474 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1475 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1476 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1477 scan_array_reductions
= true;
1480 case OMP_CLAUSE_REDUCTION
:
1481 case OMP_CLAUSE_IN_REDUCTION
:
1482 decl
= OMP_CLAUSE_DECL (c
);
1483 if (TREE_CODE (decl
) != MEM_REF
)
1485 if (is_variable_sized (decl
))
1486 install_var_local (decl
, ctx
);
1487 fixup_remapped_decl (decl
, ctx
, false);
1489 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1490 scan_array_reductions
= true;
1493 case OMP_CLAUSE_TASK_REDUCTION
:
1494 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1495 scan_array_reductions
= true;
1498 case OMP_CLAUSE_SHARED
:
1499 /* Ignore shared directives in teams construct inside of
1500 target construct. */
1501 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1502 && !is_host_teams_ctx (ctx
))
1504 decl
= OMP_CLAUSE_DECL (c
);
1505 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1507 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1509 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1512 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1513 install_var_field (decl
, by_ref
, 11, ctx
);
1516 fixup_remapped_decl (decl
, ctx
, false);
1519 case OMP_CLAUSE_MAP
:
1520 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1522 decl
= OMP_CLAUSE_DECL (c
);
1524 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1525 && (OMP_CLAUSE_MAP_KIND (c
)
1526 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1527 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1528 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1529 && varpool_node::get_create (decl
)->offloadable
)
1533 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1534 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1535 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1536 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1538 tree new_decl
= lookup_decl (decl
, ctx
);
1539 TREE_TYPE (new_decl
)
1540 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1542 else if (DECL_SIZE (decl
)
1543 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1545 tree decl2
= DECL_VALUE_EXPR (decl
);
1546 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1547 decl2
= TREE_OPERAND (decl2
, 0);
1548 gcc_assert (DECL_P (decl2
));
1549 fixup_remapped_decl (decl2
, ctx
, false);
1550 fixup_remapped_decl (decl
, ctx
, true);
1553 fixup_remapped_decl (decl
, ctx
, false);
1557 case OMP_CLAUSE_COPYPRIVATE
:
1558 case OMP_CLAUSE_COPYIN
:
1559 case OMP_CLAUSE_DEFAULT
:
1561 case OMP_CLAUSE_NUM_THREADS
:
1562 case OMP_CLAUSE_NUM_TEAMS
:
1563 case OMP_CLAUSE_THREAD_LIMIT
:
1564 case OMP_CLAUSE_DEVICE
:
1565 case OMP_CLAUSE_SCHEDULE
:
1566 case OMP_CLAUSE_DIST_SCHEDULE
:
1567 case OMP_CLAUSE_NOWAIT
:
1568 case OMP_CLAUSE_ORDERED
:
1569 case OMP_CLAUSE_COLLAPSE
:
1570 case OMP_CLAUSE_UNTIED
:
1571 case OMP_CLAUSE_FINAL
:
1572 case OMP_CLAUSE_MERGEABLE
:
1573 case OMP_CLAUSE_PROC_BIND
:
1574 case OMP_CLAUSE_SAFELEN
:
1575 case OMP_CLAUSE_SIMDLEN
:
1576 case OMP_CLAUSE_ALIGNED
:
1577 case OMP_CLAUSE_DEPEND
:
1578 case OMP_CLAUSE__LOOPTEMP_
:
1579 case OMP_CLAUSE__REDUCTEMP_
:
1581 case OMP_CLAUSE_FROM
:
1582 case OMP_CLAUSE_PRIORITY
:
1583 case OMP_CLAUSE_GRAINSIZE
:
1584 case OMP_CLAUSE_NUM_TASKS
:
1585 case OMP_CLAUSE_THREADS
:
1586 case OMP_CLAUSE_SIMD
:
1587 case OMP_CLAUSE_NOGROUP
:
1588 case OMP_CLAUSE_DEFAULTMAP
:
1589 case OMP_CLAUSE_USE_DEVICE_PTR
:
1590 case OMP_CLAUSE_NONTEMPORAL
:
1591 case OMP_CLAUSE_ASYNC
:
1592 case OMP_CLAUSE_WAIT
:
1593 case OMP_CLAUSE_NUM_GANGS
:
1594 case OMP_CLAUSE_NUM_WORKERS
:
1595 case OMP_CLAUSE_VECTOR_LENGTH
:
1596 case OMP_CLAUSE_GANG
:
1597 case OMP_CLAUSE_WORKER
:
1598 case OMP_CLAUSE_VECTOR
:
1599 case OMP_CLAUSE_INDEPENDENT
:
1600 case OMP_CLAUSE_AUTO
:
1601 case OMP_CLAUSE_SEQ
:
1602 case OMP_CLAUSE_TILE
:
1603 case OMP_CLAUSE__GRIDDIM_
:
1604 case OMP_CLAUSE__SIMT_
:
1605 case OMP_CLAUSE_IF_PRESENT
:
1606 case OMP_CLAUSE_FINALIZE
:
1607 case OMP_CLAUSE__CONDTEMP_
:
1610 case OMP_CLAUSE__CACHE_
:
1616 gcc_checking_assert (!scan_array_reductions
1617 || !is_gimple_omp_oacc (ctx
->stmt
));
1618 if (scan_array_reductions
)
1620 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1621 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1622 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1623 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1624 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1626 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1627 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1629 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1630 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1631 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1632 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1633 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1634 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1638 /* Create a new name for omp child function. Returns an identifier. */
1641 create_omp_child_function_name (bool task_copy
)
1643 return clone_function_name_numbered (current_function_decl
,
1644 task_copy
? "_omp_cpyfn" : "_omp_fn");
1647 /* Return true if CTX may belong to offloaded code: either if current function
1648 is offloaded, or any enclosing context corresponds to a target region. */
1651 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1653 if (cgraph_node::get (current_function_decl
)->offloadable
)
1655 for (; ctx
; ctx
= ctx
->outer
)
1656 if (is_gimple_omp_offloaded (ctx
->stmt
))
1661 /* Build a decl for the omp child function. It'll not contain a body
1662 yet, just the bare decl. */
1665 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1667 tree decl
, type
, name
, t
;
1669 name
= create_omp_child_function_name (task_copy
);
1671 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1672 ptr_type_node
, NULL_TREE
);
1674 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1676 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1678 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1681 ctx
->cb
.dst_fn
= decl
;
1683 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1685 TREE_STATIC (decl
) = 1;
1686 TREE_USED (decl
) = 1;
1687 DECL_ARTIFICIAL (decl
) = 1;
1688 DECL_IGNORED_P (decl
) = 0;
1689 TREE_PUBLIC (decl
) = 0;
1690 DECL_UNINLINABLE (decl
) = 1;
1691 DECL_EXTERNAL (decl
) = 0;
1692 DECL_CONTEXT (decl
) = NULL_TREE
;
1693 DECL_INITIAL (decl
) = make_node (BLOCK
);
1694 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1695 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1696 /* Remove omp declare simd attribute from the new attributes. */
1697 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1699 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1702 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1703 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1704 *p
= TREE_CHAIN (*p
);
1707 tree chain
= TREE_CHAIN (*p
);
1708 *p
= copy_node (*p
);
1709 p
= &TREE_CHAIN (*p
);
1713 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1714 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1715 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1716 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1717 DECL_FUNCTION_VERSIONED (decl
)
1718 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1720 if (omp_maybe_offloaded_ctx (ctx
))
1722 cgraph_node::get_create (decl
)->offloadable
= 1;
1723 if (ENABLE_OFFLOADING
)
1724 g
->have_offload
= true;
1727 if (cgraph_node::get_create (decl
)->offloadable
1728 && !lookup_attribute ("omp declare target",
1729 DECL_ATTRIBUTES (current_function_decl
)))
1731 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1732 ? "omp target entrypoint"
1733 : "omp declare target");
1734 DECL_ATTRIBUTES (decl
)
1735 = tree_cons (get_identifier (target_attr
),
1736 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1739 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1740 RESULT_DECL
, NULL_TREE
, void_type_node
);
1741 DECL_ARTIFICIAL (t
) = 1;
1742 DECL_IGNORED_P (t
) = 1;
1743 DECL_CONTEXT (t
) = decl
;
1744 DECL_RESULT (decl
) = t
;
1746 tree data_name
= get_identifier (".omp_data_i");
1747 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1749 DECL_ARTIFICIAL (t
) = 1;
1750 DECL_NAMELESS (t
) = 1;
1751 DECL_ARG_TYPE (t
) = ptr_type_node
;
1752 DECL_CONTEXT (t
) = current_function_decl
;
1754 TREE_READONLY (t
) = 1;
1755 DECL_ARGUMENTS (decl
) = t
;
1757 ctx
->receiver_decl
= t
;
1760 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1761 PARM_DECL
, get_identifier (".omp_data_o"),
1763 DECL_ARTIFICIAL (t
) = 1;
1764 DECL_NAMELESS (t
) = 1;
1765 DECL_ARG_TYPE (t
) = ptr_type_node
;
1766 DECL_CONTEXT (t
) = current_function_decl
;
1768 TREE_ADDRESSABLE (t
) = 1;
1769 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1770 DECL_ARGUMENTS (decl
) = t
;
1773 /* Allocate memory for the function structure. The call to
1774 allocate_struct_function clobbers CFUN, so we need to restore
1776 push_struct_function (decl
);
1777 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1778 init_tree_ssa (cfun
);
1782 /* Callback for walk_gimple_seq. Check if combined parallel
1783 contains gimple_omp_for_combined_into_p OMP_FOR. */
1786 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1787 bool *handled_ops_p
,
1788 struct walk_stmt_info
*wi
)
1790 gimple
*stmt
= gsi_stmt (*gsi_p
);
1792 *handled_ops_p
= true;
1793 switch (gimple_code (stmt
))
1797 case GIMPLE_OMP_FOR
:
1798 if (gimple_omp_for_combined_into_p (stmt
)
1799 && gimple_omp_for_kind (stmt
)
1800 == *(const enum gf_mask
*) (wi
->info
))
1803 return integer_zero_node
;
1812 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1815 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1816 omp_context
*outer_ctx
)
1818 struct walk_stmt_info wi
;
1820 memset (&wi
, 0, sizeof (wi
));
1822 wi
.info
= (void *) &msk
;
1823 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1824 if (wi
.info
!= (void *) &msk
)
1826 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1827 struct omp_for_data fd
;
1828 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1829 /* We need two temporaries with fd.loop.v type (istart/iend)
1830 and then (fd.collapse - 1) temporaries with the same
1831 type for count2 ... countN-1 vars if not constant. */
1832 size_t count
= 2, i
;
1833 tree type
= fd
.iter_type
;
1835 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1837 count
+= fd
.collapse
- 1;
1838 /* If there are lastprivate clauses on the inner
1839 GIMPLE_OMP_FOR, add one more temporaries for the total number
1840 of iterations (product of count1 ... countN-1). */
1841 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1842 OMP_CLAUSE_LASTPRIVATE
))
1844 else if (msk
== GF_OMP_FOR_KIND_FOR
1845 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1846 OMP_CLAUSE_LASTPRIVATE
))
1849 for (i
= 0; i
< count
; i
++)
1851 tree temp
= create_tmp_var (type
);
1852 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1853 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1854 OMP_CLAUSE_DECL (c
) = temp
;
1855 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1856 gimple_omp_taskreg_set_clauses (stmt
, c
);
1859 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
1860 && omp_find_clause (gimple_omp_task_clauses (stmt
),
1861 OMP_CLAUSE_REDUCTION
))
1863 tree type
= build_pointer_type (pointer_sized_int_node
);
1864 tree temp
= create_tmp_var (type
);
1865 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1866 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1867 OMP_CLAUSE_DECL (c
) = temp
;
1868 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
1869 gimple_omp_task_set_clauses (stmt
, c
);
1873 /* Scan an OpenMP parallel directive. */
1876 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1880 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1882 /* Ignore parallel directives with empty bodies, unless there
1883 are copyin clauses. */
1885 && empty_body_p (gimple_omp_body (stmt
))
1886 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1887 OMP_CLAUSE_COPYIN
) == NULL
)
1889 gsi_replace (gsi
, gimple_build_nop (), false);
1893 if (gimple_omp_parallel_combined_p (stmt
))
1894 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1895 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1896 OMP_CLAUSE_REDUCTION
);
1897 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
1898 if (OMP_CLAUSE_REDUCTION_TASK (c
))
1900 tree type
= build_pointer_type (pointer_sized_int_node
);
1901 tree temp
= create_tmp_var (type
);
1902 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1904 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1905 OMP_CLAUSE_DECL (c
) = temp
;
1906 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
1907 gimple_omp_parallel_set_clauses (stmt
, c
);
1910 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
1913 ctx
= new_omp_context (stmt
, outer_ctx
);
1914 taskreg_contexts
.safe_push (ctx
);
1915 if (taskreg_nesting_level
> 1)
1916 ctx
->is_nested
= true;
1917 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1918 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1919 name
= create_tmp_var_name (".omp_data_s");
1920 name
= build_decl (gimple_location (stmt
),
1921 TYPE_DECL
, name
, ctx
->record_type
);
1922 DECL_ARTIFICIAL (name
) = 1;
1923 DECL_NAMELESS (name
) = 1;
1924 TYPE_NAME (ctx
->record_type
) = name
;
1925 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1926 if (!gimple_omp_parallel_grid_phony (stmt
))
1928 create_omp_child_function (ctx
, false);
1929 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1932 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
1933 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1935 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1936 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1939 /* Scan an OpenMP task directive. */
1942 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1946 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
1948 /* Ignore task directives with empty bodies, unless they have depend
1951 && gimple_omp_body (stmt
)
1952 && empty_body_p (gimple_omp_body (stmt
))
1953 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
1955 gsi_replace (gsi
, gimple_build_nop (), false);
1959 if (gimple_omp_task_taskloop_p (stmt
))
1960 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
1962 ctx
= new_omp_context (stmt
, outer_ctx
);
1964 if (gimple_omp_task_taskwait_p (stmt
))
1966 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1970 taskreg_contexts
.safe_push (ctx
);
1971 if (taskreg_nesting_level
> 1)
1972 ctx
->is_nested
= true;
1973 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1974 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1975 name
= create_tmp_var_name (".omp_data_s");
1976 name
= build_decl (gimple_location (stmt
),
1977 TYPE_DECL
, name
, ctx
->record_type
);
1978 DECL_ARTIFICIAL (name
) = 1;
1979 DECL_NAMELESS (name
) = 1;
1980 TYPE_NAME (ctx
->record_type
) = name
;
1981 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1982 create_omp_child_function (ctx
, false);
1983 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1985 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1987 if (ctx
->srecord_type
)
1989 name
= create_tmp_var_name (".omp_data_a");
1990 name
= build_decl (gimple_location (stmt
),
1991 TYPE_DECL
, name
, ctx
->srecord_type
);
1992 DECL_ARTIFICIAL (name
) = 1;
1993 DECL_NAMELESS (name
) = 1;
1994 TYPE_NAME (ctx
->srecord_type
) = name
;
1995 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
1996 create_omp_child_function (ctx
, true);
1999 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2001 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2003 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2004 t
= build_int_cst (long_integer_type_node
, 0);
2005 gimple_omp_task_set_arg_size (stmt
, t
);
2006 t
= build_int_cst (long_integer_type_node
, 1);
2007 gimple_omp_task_set_arg_align (stmt
, t
);
2011 /* Helper function for finish_taskreg_scan, called through walk_tree.
2012 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2013 tree, replace it in the expression. */
2016 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2020 omp_context
*ctx
= (omp_context
*) data
;
2021 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2024 if (DECL_HAS_VALUE_EXPR_P (t
))
2025 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2030 else if (IS_TYPE_OR_DECL_P (*tp
))
2035 /* If any decls have been made addressable during scan_omp,
2036 adjust their fields if needed, and layout record types
2037 of parallel/task constructs. */
2040 finish_taskreg_scan (omp_context
*ctx
)
2042 if (ctx
->record_type
== NULL_TREE
)
2045 /* If any task_shared_vars were needed, verify all
2046 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2047 statements if use_pointer_for_field hasn't changed
2048 because of that. If it did, update field types now. */
2049 if (task_shared_vars
)
2053 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2054 c
; c
= OMP_CLAUSE_CHAIN (c
))
2055 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2056 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2058 tree decl
= OMP_CLAUSE_DECL (c
);
2060 /* Global variables don't need to be copied,
2061 the receiver side will use them directly. */
2062 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2064 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2065 || !use_pointer_for_field (decl
, ctx
))
2067 tree field
= lookup_field (decl
, ctx
);
2068 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2069 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2071 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2072 TREE_THIS_VOLATILE (field
) = 0;
2073 DECL_USER_ALIGN (field
) = 0;
2074 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2075 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2076 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2077 if (ctx
->srecord_type
)
2079 tree sfield
= lookup_sfield (decl
, ctx
);
2080 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2081 TREE_THIS_VOLATILE (sfield
) = 0;
2082 DECL_USER_ALIGN (sfield
) = 0;
2083 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2084 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2085 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2090 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2092 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2093 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2096 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2097 expects to find it at the start of data. */
2098 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2099 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2103 *p
= DECL_CHAIN (*p
);
2107 p
= &DECL_CHAIN (*p
);
2108 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2109 TYPE_FIELDS (ctx
->record_type
) = f
;
2111 layout_type (ctx
->record_type
);
2112 fixup_child_record_type (ctx
);
2114 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2116 layout_type (ctx
->record_type
);
2117 fixup_child_record_type (ctx
);
2121 location_t loc
= gimple_location (ctx
->stmt
);
2122 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2123 /* Move VLA fields to the end. */
2124 p
= &TYPE_FIELDS (ctx
->record_type
);
2126 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2127 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2130 *p
= TREE_CHAIN (*p
);
2131 TREE_CHAIN (*q
) = NULL_TREE
;
2132 q
= &TREE_CHAIN (*q
);
2135 p
= &DECL_CHAIN (*p
);
2137 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2139 /* Move fields corresponding to first and second _looptemp_
2140 clause first. There are filled by GOMP_taskloop
2141 and thus need to be in specific positions. */
2142 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2143 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2144 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2145 OMP_CLAUSE__LOOPTEMP_
);
2146 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2147 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2148 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2149 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2150 p
= &TYPE_FIELDS (ctx
->record_type
);
2152 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2153 *p
= DECL_CHAIN (*p
);
2155 p
= &DECL_CHAIN (*p
);
2156 DECL_CHAIN (f1
) = f2
;
2159 DECL_CHAIN (f2
) = f3
;
2160 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2163 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2164 TYPE_FIELDS (ctx
->record_type
) = f1
;
2165 if (ctx
->srecord_type
)
2167 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2168 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2170 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2171 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2173 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2174 *p
= DECL_CHAIN (*p
);
2176 p
= &DECL_CHAIN (*p
);
2177 DECL_CHAIN (f1
) = f2
;
2178 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2181 DECL_CHAIN (f2
) = f3
;
2182 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2185 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2186 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2189 layout_type (ctx
->record_type
);
2190 fixup_child_record_type (ctx
);
2191 if (ctx
->srecord_type
)
2192 layout_type (ctx
->srecord_type
);
2193 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2194 TYPE_SIZE_UNIT (ctx
->record_type
));
2195 if (TREE_CODE (t
) != INTEGER_CST
)
2197 t
= unshare_expr (t
);
2198 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2200 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2201 t
= build_int_cst (long_integer_type_node
,
2202 TYPE_ALIGN_UNIT (ctx
->record_type
));
2203 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2207 /* Find the enclosing offload context. */
2209 static omp_context
*
2210 enclosing_target_ctx (omp_context
*ctx
)
2212 for (; ctx
; ctx
= ctx
->outer
)
2213 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2219 /* Return true if ctx is part of an oacc kernels region. */
2222 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2224 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2226 gimple
*stmt
= ctx
->stmt
;
2227 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2228 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2235 /* Check the parallelism clauses inside a kernels regions.
2236 Until kernels handling moves to use the same loop indirection
2237 scheme as parallel, we need to do this checking early. */
2240 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2242 bool checking
= true;
2243 unsigned outer_mask
= 0;
2244 unsigned this_mask
= 0;
2245 bool has_seq
= false, has_auto
= false;
2248 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2252 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2254 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2257 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2259 switch (OMP_CLAUSE_CODE (c
))
2261 case OMP_CLAUSE_GANG
:
2262 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2264 case OMP_CLAUSE_WORKER
:
2265 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2267 case OMP_CLAUSE_VECTOR
:
2268 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2270 case OMP_CLAUSE_SEQ
:
2273 case OMP_CLAUSE_AUTO
:
2283 if (has_seq
&& (this_mask
|| has_auto
))
2284 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2285 " OpenACC loop specifiers");
2286 else if (has_auto
&& this_mask
)
2287 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2288 " OpenACC loop specifiers");
2290 if (this_mask
& outer_mask
)
2291 error_at (gimple_location (stmt
), "inner loop uses same"
2292 " OpenACC parallelism as containing loop");
2295 return outer_mask
| this_mask
;
2298 /* Scan a GIMPLE_OMP_FOR. */
2300 static omp_context
*
2301 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2305 tree clauses
= gimple_omp_for_clauses (stmt
);
2307 ctx
= new_omp_context (stmt
, outer_ctx
);
2309 if (is_gimple_omp_oacc (stmt
))
2311 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2313 if (!tgt
|| is_oacc_parallel (tgt
))
2314 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2316 char const *check
= NULL
;
2318 switch (OMP_CLAUSE_CODE (c
))
2320 case OMP_CLAUSE_GANG
:
2324 case OMP_CLAUSE_WORKER
:
2328 case OMP_CLAUSE_VECTOR
:
2336 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2337 error_at (gimple_location (stmt
),
2338 "argument not permitted on %qs clause in"
2339 " OpenACC %<parallel%>", check
);
2342 if (tgt
&& is_oacc_kernels (tgt
))
2344 /* Strip out reductions, as they are not handled yet. */
2345 tree
*prev_ptr
= &clauses
;
2347 while (tree probe
= *prev_ptr
)
2349 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2351 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2352 *prev_ptr
= *next_ptr
;
2354 prev_ptr
= next_ptr
;
2357 gimple_omp_for_set_clauses (stmt
, clauses
);
2358 check_oacc_kernel_gwv (stmt
, ctx
);
2362 scan_sharing_clauses (clauses
, ctx
);
2364 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2365 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2367 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2368 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2369 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2370 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2372 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2376 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2379 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2380 omp_context
*outer_ctx
)
2382 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2383 gsi_replace (gsi
, bind
, false);
2384 gimple_seq seq
= NULL
;
2385 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2386 tree cond
= create_tmp_var_raw (integer_type_node
);
2387 DECL_CONTEXT (cond
) = current_function_decl
;
2388 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2389 gimple_bind_set_vars (bind
, cond
);
2390 gimple_call_set_lhs (g
, cond
);
2391 gimple_seq_add_stmt (&seq
, g
);
2392 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2393 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2394 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2395 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2396 gimple_seq_add_stmt (&seq
, g
);
2397 g
= gimple_build_label (lab1
);
2398 gimple_seq_add_stmt (&seq
, g
);
2399 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2400 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2401 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2402 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2403 gimple_omp_for_set_clauses (new_stmt
, clause
);
2404 gimple_seq_add_stmt (&seq
, new_stmt
);
2405 g
= gimple_build_goto (lab3
);
2406 gimple_seq_add_stmt (&seq
, g
);
2407 g
= gimple_build_label (lab2
);
2408 gimple_seq_add_stmt (&seq
, g
);
2409 gimple_seq_add_stmt (&seq
, stmt
);
2410 g
= gimple_build_label (lab3
);
2411 gimple_seq_add_stmt (&seq
, g
);
2412 gimple_bind_set_body (bind
, seq
);
2414 scan_omp_for (new_stmt
, outer_ctx
);
2415 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2418 /* Scan an OpenMP sections directive. */
2421 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2425 ctx
= new_omp_context (stmt
, outer_ctx
);
2426 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2427 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2430 /* Scan an OpenMP single directive. */
2433 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2438 ctx
= new_omp_context (stmt
, outer_ctx
);
2439 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2440 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2441 name
= create_tmp_var_name (".omp_copy_s");
2442 name
= build_decl (gimple_location (stmt
),
2443 TYPE_DECL
, name
, ctx
->record_type
);
2444 TYPE_NAME (ctx
->record_type
) = name
;
2446 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2447 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2449 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2450 ctx
->record_type
= NULL
;
2452 layout_type (ctx
->record_type
);
2455 /* Scan a GIMPLE_OMP_TARGET. */
2458 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2462 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2463 tree clauses
= gimple_omp_target_clauses (stmt
);
2465 ctx
= new_omp_context (stmt
, outer_ctx
);
2466 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2467 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2468 name
= create_tmp_var_name (".omp_data_t");
2469 name
= build_decl (gimple_location (stmt
),
2470 TYPE_DECL
, name
, ctx
->record_type
);
2471 DECL_ARTIFICIAL (name
) = 1;
2472 DECL_NAMELESS (name
) = 1;
2473 TYPE_NAME (ctx
->record_type
) = name
;
2474 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2478 create_omp_child_function (ctx
, false);
2479 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2482 scan_sharing_clauses (clauses
, ctx
);
2483 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2485 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2486 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2489 TYPE_FIELDS (ctx
->record_type
)
2490 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2493 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2494 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2496 field
= DECL_CHAIN (field
))
2497 gcc_assert (DECL_ALIGN (field
) == align
);
2499 layout_type (ctx
->record_type
);
2501 fixup_child_record_type (ctx
);
2505 /* Scan an OpenMP teams directive. */
2508 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2510 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2512 if (!gimple_omp_teams_host (stmt
))
2514 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2515 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2518 taskreg_contexts
.safe_push (ctx
);
2519 gcc_assert (taskreg_nesting_level
== 1);
2520 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2521 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2522 tree name
= create_tmp_var_name (".omp_data_s");
2523 name
= build_decl (gimple_location (stmt
),
2524 TYPE_DECL
, name
, ctx
->record_type
);
2525 DECL_ARTIFICIAL (name
) = 1;
2526 DECL_NAMELESS (name
) = 1;
2527 TYPE_NAME (ctx
->record_type
) = name
;
2528 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2529 create_omp_child_function (ctx
, false);
2530 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2532 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2533 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2535 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2536 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2539 /* Check nesting restrictions. */
2541 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2545 if (ctx
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_GRID_BODY
)
2546 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2547 the original copy of its contents. */
2550 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2551 inside an OpenACC CTX. */
2552 if (!(is_gimple_omp (stmt
)
2553 && is_gimple_omp_oacc (stmt
))
2554 /* Except for atomic codes that we share with OpenMP. */
2555 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2556 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2558 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2560 error_at (gimple_location (stmt
),
2561 "non-OpenACC construct inside of OpenACC routine");
2565 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2566 if (is_gimple_omp (octx
->stmt
)
2567 && is_gimple_omp_oacc (octx
->stmt
))
2569 error_at (gimple_location (stmt
),
2570 "non-OpenACC construct inside of OpenACC region");
2577 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2578 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
2581 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2583 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2584 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2586 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2587 && (ctx
->outer
== NULL
2588 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2589 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2590 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2591 != GF_OMP_FOR_KIND_FOR
)
2592 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2594 error_at (gimple_location (stmt
),
2595 "%<ordered simd threads%> must be closely "
2596 "nested inside of %<for simd%> region");
2602 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2603 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
)
2605 error_at (gimple_location (stmt
),
2606 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2607 " or %<#pragma omp atomic%> may not be nested inside"
2608 " %<simd%> region");
2611 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2613 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2614 || ((gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
)
2615 && (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
)))
2616 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2618 error_at (gimple_location (stmt
),
2619 "only %<distribute%> or %<parallel%> regions are "
2620 "allowed to be strictly nested inside %<teams%> "
2626 switch (gimple_code (stmt
))
2628 case GIMPLE_OMP_FOR
:
2629 if (gimple_omp_for_kind (stmt
) & GF_OMP_FOR_SIMD
)
2631 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2633 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2635 error_at (gimple_location (stmt
),
2636 "%<distribute%> region must be strictly nested "
2637 "inside %<teams%> construct");
2642 /* We split taskloop into task and nested taskloop in it. */
2643 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2645 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2650 switch (gimple_code (ctx
->stmt
))
2652 case GIMPLE_OMP_FOR
:
2653 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2654 == GF_OMP_FOR_KIND_OACC_LOOP
);
2657 case GIMPLE_OMP_TARGET
:
2658 switch (gimple_omp_target_kind (ctx
->stmt
))
2660 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2661 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2672 else if (oacc_get_fn_attrib (current_function_decl
))
2676 error_at (gimple_location (stmt
),
2677 "OpenACC loop directive must be associated with"
2678 " an OpenACC compute region");
2684 if (is_gimple_call (stmt
)
2685 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2686 == BUILT_IN_GOMP_CANCEL
2687 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2688 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2690 const char *bad
= NULL
;
2691 const char *kind
= NULL
;
2692 const char *construct
2693 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2694 == BUILT_IN_GOMP_CANCEL
)
2695 ? "#pragma omp cancel"
2696 : "#pragma omp cancellation point";
2699 error_at (gimple_location (stmt
), "orphaned %qs construct",
2703 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2704 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2708 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2709 bad
= "#pragma omp parallel";
2710 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2711 == BUILT_IN_GOMP_CANCEL
2712 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2713 ctx
->cancellable
= true;
2717 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2718 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2719 bad
= "#pragma omp for";
2720 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2721 == BUILT_IN_GOMP_CANCEL
2722 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2724 ctx
->cancellable
= true;
2725 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2727 warning_at (gimple_location (stmt
), 0,
2728 "%<#pragma omp cancel for%> inside "
2729 "%<nowait%> for construct");
2730 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2731 OMP_CLAUSE_ORDERED
))
2732 warning_at (gimple_location (stmt
), 0,
2733 "%<#pragma omp cancel for%> inside "
2734 "%<ordered%> for construct");
2739 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
2740 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
2741 bad
= "#pragma omp sections";
2742 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2743 == BUILT_IN_GOMP_CANCEL
2744 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2746 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
2748 ctx
->cancellable
= true;
2749 if (omp_find_clause (gimple_omp_sections_clauses
2752 warning_at (gimple_location (stmt
), 0,
2753 "%<#pragma omp cancel sections%> inside "
2754 "%<nowait%> sections construct");
2758 gcc_assert (ctx
->outer
2759 && gimple_code (ctx
->outer
->stmt
)
2760 == GIMPLE_OMP_SECTIONS
);
2761 ctx
->outer
->cancellable
= true;
2762 if (omp_find_clause (gimple_omp_sections_clauses
2765 warning_at (gimple_location (stmt
), 0,
2766 "%<#pragma omp cancel sections%> inside "
2767 "%<nowait%> sections construct");
2773 if (!is_task_ctx (ctx
)
2774 && (!is_taskloop_ctx (ctx
)
2775 || ctx
->outer
== NULL
2776 || !is_task_ctx (ctx
->outer
)))
2777 bad
= "#pragma omp task";
2780 for (omp_context
*octx
= ctx
->outer
;
2781 octx
; octx
= octx
->outer
)
2783 switch (gimple_code (octx
->stmt
))
2785 case GIMPLE_OMP_TASKGROUP
:
2787 case GIMPLE_OMP_TARGET
:
2788 if (gimple_omp_target_kind (octx
->stmt
)
2789 != GF_OMP_TARGET_KIND_REGION
)
2792 case GIMPLE_OMP_PARALLEL
:
2793 case GIMPLE_OMP_TEAMS
:
2794 error_at (gimple_location (stmt
),
2795 "%<%s taskgroup%> construct not closely "
2796 "nested inside of %<taskgroup%> region",
2799 case GIMPLE_OMP_TASK
:
2800 if (gimple_omp_task_taskloop_p (octx
->stmt
)
2802 && is_taskloop_ctx (octx
->outer
))
2805 = gimple_omp_for_clauses (octx
->outer
->stmt
);
2806 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
2815 ctx
->cancellable
= true;
2820 error_at (gimple_location (stmt
), "invalid arguments");
2825 error_at (gimple_location (stmt
),
2826 "%<%s %s%> construct not closely nested inside of %qs",
2827 construct
, kind
, bad
);
2832 case GIMPLE_OMP_SECTIONS
:
2833 case GIMPLE_OMP_SINGLE
:
2834 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2835 switch (gimple_code (ctx
->stmt
))
2837 case GIMPLE_OMP_FOR
:
2838 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2839 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2842 case GIMPLE_OMP_SECTIONS
:
2843 case GIMPLE_OMP_SINGLE
:
2844 case GIMPLE_OMP_ORDERED
:
2845 case GIMPLE_OMP_MASTER
:
2846 case GIMPLE_OMP_TASK
:
2847 case GIMPLE_OMP_CRITICAL
:
2848 if (is_gimple_call (stmt
))
2850 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2851 != BUILT_IN_GOMP_BARRIER
)
2853 error_at (gimple_location (stmt
),
2854 "barrier region may not be closely nested inside "
2855 "of work-sharing, %<critical%>, %<ordered%>, "
2856 "%<master%>, explicit %<task%> or %<taskloop%> "
2860 error_at (gimple_location (stmt
),
2861 "work-sharing region may not be closely nested inside "
2862 "of work-sharing, %<critical%>, %<ordered%>, "
2863 "%<master%>, explicit %<task%> or %<taskloop%> region");
2865 case GIMPLE_OMP_PARALLEL
:
2866 case GIMPLE_OMP_TEAMS
:
2868 case GIMPLE_OMP_TARGET
:
2869 if (gimple_omp_target_kind (ctx
->stmt
)
2870 == GF_OMP_TARGET_KIND_REGION
)
2877 case GIMPLE_OMP_MASTER
:
2878 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2879 switch (gimple_code (ctx
->stmt
))
2881 case GIMPLE_OMP_FOR
:
2882 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2883 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2886 case GIMPLE_OMP_SECTIONS
:
2887 case GIMPLE_OMP_SINGLE
:
2888 case GIMPLE_OMP_TASK
:
2889 error_at (gimple_location (stmt
),
2890 "%<master%> region may not be closely nested inside "
2891 "of work-sharing, explicit %<task%> or %<taskloop%> "
2894 case GIMPLE_OMP_PARALLEL
:
2895 case GIMPLE_OMP_TEAMS
:
2897 case GIMPLE_OMP_TARGET
:
2898 if (gimple_omp_target_kind (ctx
->stmt
)
2899 == GF_OMP_TARGET_KIND_REGION
)
2906 case GIMPLE_OMP_TASK
:
2907 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2908 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2909 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2910 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2912 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2913 error_at (OMP_CLAUSE_LOCATION (c
),
2914 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2915 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2919 case GIMPLE_OMP_ORDERED
:
2920 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2921 c
; c
= OMP_CLAUSE_CHAIN (c
))
2923 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
2925 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
2926 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
2929 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2930 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
2931 || kind
== OMP_CLAUSE_DEPEND_SINK
)
2934 /* Look for containing ordered(N) loop. */
2936 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2938 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2939 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
2941 error_at (OMP_CLAUSE_LOCATION (c
),
2942 "%<ordered%> construct with %<depend%> clause "
2943 "must be closely nested inside an %<ordered%> "
2947 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
2949 error_at (OMP_CLAUSE_LOCATION (c
),
2950 "%<ordered%> construct with %<depend%> clause "
2951 "must be closely nested inside a loop with "
2952 "%<ordered%> clause with a parameter");
2958 error_at (OMP_CLAUSE_LOCATION (c
),
2959 "invalid depend kind in omp %<ordered%> %<depend%>");
2963 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2964 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2966 /* ordered simd must be closely nested inside of simd region,
2967 and simd region must not encounter constructs other than
2968 ordered simd, therefore ordered simd may be either orphaned,
2969 or ctx->stmt must be simd. The latter case is handled already
2973 error_at (gimple_location (stmt
),
2974 "%<ordered%> %<simd%> must be closely nested inside "
2979 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2980 switch (gimple_code (ctx
->stmt
))
2982 case GIMPLE_OMP_CRITICAL
:
2983 case GIMPLE_OMP_TASK
:
2984 case GIMPLE_OMP_ORDERED
:
2985 ordered_in_taskloop
:
2986 error_at (gimple_location (stmt
),
2987 "%<ordered%> region may not be closely nested inside "
2988 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2989 "%<taskloop%> region");
2991 case GIMPLE_OMP_FOR
:
2992 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2993 goto ordered_in_taskloop
;
2995 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2996 OMP_CLAUSE_ORDERED
);
2999 error_at (gimple_location (stmt
),
3000 "%<ordered%> region must be closely nested inside "
3001 "a loop region with an %<ordered%> clause");
3004 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3005 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3007 error_at (gimple_location (stmt
),
3008 "%<ordered%> region without %<depend%> clause may "
3009 "not be closely nested inside a loop region with "
3010 "an %<ordered%> clause with a parameter");
3014 case GIMPLE_OMP_TARGET
:
3015 if (gimple_omp_target_kind (ctx
->stmt
)
3016 != GF_OMP_TARGET_KIND_REGION
)
3019 case GIMPLE_OMP_PARALLEL
:
3020 case GIMPLE_OMP_TEAMS
:
3021 error_at (gimple_location (stmt
),
3022 "%<ordered%> region must be closely nested inside "
3023 "a loop region with an %<ordered%> clause");
3029 case GIMPLE_OMP_CRITICAL
:
3032 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3033 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3034 if (gomp_critical
*other_crit
3035 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3036 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3038 error_at (gimple_location (stmt
),
3039 "%<critical%> region may not be nested inside "
3040 "a %<critical%> region with the same name");
3045 case GIMPLE_OMP_TEAMS
:
3048 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3049 || (gimple_omp_target_kind (ctx
->stmt
)
3050 != GF_OMP_TARGET_KIND_REGION
))
3052 /* Teams construct can appear either strictly nested inside of
3053 target construct with no intervening stmts, or can be encountered
3054 only by initial task (so must not appear inside any OpenMP
3056 error_at (gimple_location (stmt
),
3057 "%<teams%> construct must be closely nested inside of "
3058 "%<target%> construct or not nested in any OpenMP "
3063 case GIMPLE_OMP_TARGET
:
3064 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3065 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3066 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3067 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3069 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3070 error_at (OMP_CLAUSE_LOCATION (c
),
3071 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3072 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3075 if (is_gimple_omp_offloaded (stmt
)
3076 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3078 error_at (gimple_location (stmt
),
3079 "OpenACC region inside of OpenACC routine, nested "
3080 "parallelism not supported yet");
3083 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3085 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3087 if (is_gimple_omp (stmt
)
3088 && is_gimple_omp_oacc (stmt
)
3089 && is_gimple_omp (ctx
->stmt
))
3091 error_at (gimple_location (stmt
),
3092 "OpenACC construct inside of non-OpenACC region");
3098 const char *stmt_name
, *ctx_stmt_name
;
3099 switch (gimple_omp_target_kind (stmt
))
3101 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3102 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3103 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3104 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3105 stmt_name
= "target enter data"; break;
3106 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3107 stmt_name
= "target exit data"; break;
3108 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3109 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3110 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3111 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3112 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
3113 stmt_name
= "enter/exit data"; break;
3114 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3116 default: gcc_unreachable ();
3118 switch (gimple_omp_target_kind (ctx
->stmt
))
3120 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3121 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3122 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3123 ctx_stmt_name
= "parallel"; break;
3124 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3125 ctx_stmt_name
= "kernels"; break;
3126 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3127 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3128 ctx_stmt_name
= "host_data"; break;
3129 default: gcc_unreachable ();
3132 /* OpenACC/OpenMP mismatch? */
3133 if (is_gimple_omp_oacc (stmt
)
3134 != is_gimple_omp_oacc (ctx
->stmt
))
3136 error_at (gimple_location (stmt
),
3137 "%s %qs construct inside of %s %qs region",
3138 (is_gimple_omp_oacc (stmt
)
3139 ? "OpenACC" : "OpenMP"), stmt_name
,
3140 (is_gimple_omp_oacc (ctx
->stmt
)
3141 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3144 if (is_gimple_omp_offloaded (ctx
->stmt
))
3146 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3147 if (is_gimple_omp_oacc (ctx
->stmt
))
3149 error_at (gimple_location (stmt
),
3150 "%qs construct inside of %qs region",
3151 stmt_name
, ctx_stmt_name
);
3156 warning_at (gimple_location (stmt
), 0,
3157 "%qs construct inside of %qs region",
3158 stmt_name
, ctx_stmt_name
);
3170 /* Helper function scan_omp.
3172 Callback for walk_tree or operators in walk_gimple_stmt used to
3173 scan for OMP directives in TP. */
3176 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3178 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3179 omp_context
*ctx
= (omp_context
*) wi
->info
;
3182 switch (TREE_CODE (t
))
3190 tree repl
= remap_decl (t
, &ctx
->cb
);
3191 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3197 if (ctx
&& TYPE_P (t
))
3198 *tp
= remap_type (t
, &ctx
->cb
);
3199 else if (!DECL_P (t
))
3204 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3205 if (tem
!= TREE_TYPE (t
))
3207 if (TREE_CODE (t
) == INTEGER_CST
)
3208 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3210 TREE_TYPE (t
) = tem
;
3220 /* Return true if FNDECL is a setjmp or a longjmp. */
3223 setjmp_or_longjmp_p (const_tree fndecl
)
3225 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3226 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3229 tree declname
= DECL_NAME (fndecl
);
3232 const char *name
= IDENTIFIER_POINTER (declname
);
3233 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3237 /* Helper function for scan_omp.
3239 Callback for walk_gimple_stmt used to scan for OMP directives in
3240 the current statement in GSI. */
3243 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3244 struct walk_stmt_info
*wi
)
3246 gimple
*stmt
= gsi_stmt (*gsi
);
3247 omp_context
*ctx
= (omp_context
*) wi
->info
;
3249 if (gimple_has_location (stmt
))
3250 input_location
= gimple_location (stmt
);
3252 /* Check the nesting restrictions. */
3253 bool remove
= false;
3254 if (is_gimple_omp (stmt
))
3255 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3256 else if (is_gimple_call (stmt
))
3258 tree fndecl
= gimple_call_fndecl (stmt
);
3261 if (setjmp_or_longjmp_p (fndecl
)
3263 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3264 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
3267 error_at (gimple_location (stmt
),
3268 "setjmp/longjmp inside simd construct");
3270 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3271 switch (DECL_FUNCTION_CODE (fndecl
))
3273 case BUILT_IN_GOMP_BARRIER
:
3274 case BUILT_IN_GOMP_CANCEL
:
3275 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3276 case BUILT_IN_GOMP_TASKYIELD
:
3277 case BUILT_IN_GOMP_TASKWAIT
:
3278 case BUILT_IN_GOMP_TASKGROUP_START
:
3279 case BUILT_IN_GOMP_TASKGROUP_END
:
3280 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3289 stmt
= gimple_build_nop ();
3290 gsi_replace (gsi
, stmt
, false);
3293 *handled_ops_p
= true;
3295 switch (gimple_code (stmt
))
3297 case GIMPLE_OMP_PARALLEL
:
3298 taskreg_nesting_level
++;
3299 scan_omp_parallel (gsi
, ctx
);
3300 taskreg_nesting_level
--;
3303 case GIMPLE_OMP_TASK
:
3304 taskreg_nesting_level
++;
3305 scan_omp_task (gsi
, ctx
);
3306 taskreg_nesting_level
--;
3309 case GIMPLE_OMP_FOR
:
3310 if (((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3311 & GF_OMP_FOR_KIND_MASK
) == GF_OMP_FOR_KIND_SIMD
)
3312 && omp_maybe_offloaded_ctx (ctx
)
3313 && omp_max_simt_vf ())
3314 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3316 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3319 case GIMPLE_OMP_SECTIONS
:
3320 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3323 case GIMPLE_OMP_SINGLE
:
3324 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3327 case GIMPLE_OMP_SECTION
:
3328 case GIMPLE_OMP_MASTER
:
3329 case GIMPLE_OMP_ORDERED
:
3330 case GIMPLE_OMP_CRITICAL
:
3331 case GIMPLE_OMP_GRID_BODY
:
3332 ctx
= new_omp_context (stmt
, ctx
);
3333 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3336 case GIMPLE_OMP_TASKGROUP
:
3337 ctx
= new_omp_context (stmt
, ctx
);
3338 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
3339 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3342 case GIMPLE_OMP_TARGET
:
3343 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3346 case GIMPLE_OMP_TEAMS
:
3347 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
3349 taskreg_nesting_level
++;
3350 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3351 taskreg_nesting_level
--;
3354 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3361 *handled_ops_p
= false;
3363 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3365 var
= DECL_CHAIN (var
))
3366 insert_decl_map (&ctx
->cb
, var
, var
);
3370 *handled_ops_p
= false;
3378 /* Scan all the statements starting at the current statement. CTX
3379 contains context information about the OMP directives and
3380 clauses found during the scan. */
3383 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3385 location_t saved_location
;
3386 struct walk_stmt_info wi
;
3388 memset (&wi
, 0, sizeof (wi
));
3390 wi
.want_locations
= true;
3392 saved_location
= input_location
;
3393 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3394 input_location
= saved_location
;
3397 /* Re-gimplification and code generation routines. */
3399 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3400 of BIND if in a method. */
3403 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3405 if (DECL_ARGUMENTS (current_function_decl
)
3406 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3407 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3410 tree vars
= gimple_bind_vars (bind
);
3411 for (tree
*pvar
= &vars
; *pvar
; )
3412 if (omp_member_access_dummy_var (*pvar
))
3413 *pvar
= DECL_CHAIN (*pvar
);
3415 pvar
= &DECL_CHAIN (*pvar
);
3416 gimple_bind_set_vars (bind
, vars
);
3420 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3421 block and its subblocks. */
3424 remove_member_access_dummy_vars (tree block
)
3426 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3427 if (omp_member_access_dummy_var (*pvar
))
3428 *pvar
= DECL_CHAIN (*pvar
);
3430 pvar
= &DECL_CHAIN (*pvar
);
3432 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3433 remove_member_access_dummy_vars (block
);
3436 /* If a context was created for STMT when it was scanned, return it. */
3438 static omp_context
*
3439 maybe_lookup_ctx (gimple
*stmt
)
3442 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3443 return n
? (omp_context
*) n
->value
: NULL
;
3447 /* Find the mapping for DECL in CTX or the immediately enclosing
3448 context that has a mapping for DECL.
3450 If CTX is a nested parallel directive, we may have to use the decl
3451 mappings created in CTX's parent context. Suppose that we have the
3452 following parallel nesting (variable UIDs showed for clarity):
3455 #omp parallel shared(iD.1562) -> outer parallel
3456 iD.1562 = iD.1562 + 1;
3458 #omp parallel shared (iD.1562) -> inner parallel
3459 iD.1562 = iD.1562 - 1;
3461 Each parallel structure will create a distinct .omp_data_s structure
3462 for copying iD.1562 in/out of the directive:
3464 outer parallel .omp_data_s.1.i -> iD.1562
3465 inner parallel .omp_data_s.2.i -> iD.1562
3467 A shared variable mapping will produce a copy-out operation before
3468 the parallel directive and a copy-in operation after it. So, in
3469 this case we would have:
3472 .omp_data_o.1.i = iD.1562;
3473 #omp parallel shared(iD.1562) -> outer parallel
3474 .omp_data_i.1 = &.omp_data_o.1
3475 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3477 .omp_data_o.2.i = iD.1562; -> **
3478 #omp parallel shared(iD.1562) -> inner parallel
3479 .omp_data_i.2 = &.omp_data_o.2
3480 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3483 ** This is a problem. The symbol iD.1562 cannot be referenced
3484 inside the body of the outer parallel region. But since we are
3485 emitting this copy operation while expanding the inner parallel
3486 directive, we need to access the CTX structure of the outer
3487 parallel directive to get the correct mapping:
3489 .omp_data_o.2.i = .omp_data_i.1->i
3491 Since there may be other workshare or parallel directives enclosing
3492 the parallel directive, it may be necessary to walk up the context
3493 parent chain. This is not a problem in general because nested
3494 parallelism happens only rarely. */
3497 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3502 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3503 t
= maybe_lookup_decl (decl
, up
);
3505 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3507 return t
? t
: decl
;
3511 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3512 in outer contexts. */
3515 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3520 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3521 t
= maybe_lookup_decl (decl
, up
);
3523 return t
? t
: decl
;
3527 /* Construct the initialization value for reduction operation OP. */
3530 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3539 case TRUTH_ORIF_EXPR
:
3540 case TRUTH_XOR_EXPR
:
3542 return build_zero_cst (type
);
3545 case TRUTH_AND_EXPR
:
3546 case TRUTH_ANDIF_EXPR
:
3548 return fold_convert_loc (loc
, type
, integer_one_node
);
3551 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3554 if (SCALAR_FLOAT_TYPE_P (type
))
3556 REAL_VALUE_TYPE max
, min
;
3557 if (HONOR_INFINITIES (type
))
3560 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3563 real_maxval (&min
, 1, TYPE_MODE (type
));
3564 return build_real (type
, min
);
3566 else if (POINTER_TYPE_P (type
))
3569 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3570 return wide_int_to_tree (type
, min
);
3574 gcc_assert (INTEGRAL_TYPE_P (type
));
3575 return TYPE_MIN_VALUE (type
);
3579 if (SCALAR_FLOAT_TYPE_P (type
))
3581 REAL_VALUE_TYPE max
;
3582 if (HONOR_INFINITIES (type
))
3585 real_maxval (&max
, 0, TYPE_MODE (type
));
3586 return build_real (type
, max
);
3588 else if (POINTER_TYPE_P (type
))
3591 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3592 return wide_int_to_tree (type
, max
);
3596 gcc_assert (INTEGRAL_TYPE_P (type
));
3597 return TYPE_MAX_VALUE (type
);
3605 /* Construct the initialization value for reduction CLAUSE. */
3608 omp_reduction_init (tree clause
, tree type
)
3610 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
3611 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
3614 /* Return alignment to be assumed for var in CLAUSE, which should be
3615 OMP_CLAUSE_ALIGNED. */
3618 omp_clause_aligned_alignment (tree clause
)
3620 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
3621 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
3623 /* Otherwise return implementation defined alignment. */
3624 unsigned int al
= 1;
3625 opt_scalar_mode mode_iter
;
3626 auto_vector_sizes sizes
;
3627 targetm
.vectorize
.autovectorize_vector_sizes (&sizes
, true);
3629 for (unsigned int i
= 0; i
< sizes
.length (); ++i
)
3630 vs
= ordered_max (vs
, sizes
[i
]);
3631 static enum mode_class classes
[]
3632 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
3633 for (int i
= 0; i
< 4; i
+= 2)
3634 /* The for loop above dictates that we only walk through scalar classes. */
3635 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
3637 scalar_mode mode
= mode_iter
.require ();
3638 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
3639 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
3641 while (maybe_ne (vs
, 0U)
3642 && known_lt (GET_MODE_SIZE (vmode
), vs
)
3643 && GET_MODE_2XWIDER_MODE (vmode
).exists ())
3644 vmode
= GET_MODE_2XWIDER_MODE (vmode
).require ();
3646 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3647 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
3649 poly_uint64 nelts
= exact_div (GET_MODE_SIZE (vmode
),
3650 GET_MODE_SIZE (mode
));
3651 type
= build_vector_type (type
, nelts
);
3652 if (TYPE_MODE (type
) != vmode
)
3654 if (TYPE_ALIGN_UNIT (type
) > al
)
3655 al
= TYPE_ALIGN_UNIT (type
);
3657 return build_int_cst (integer_type_node
, al
);
3661 /* This structure is part of the interface between lower_rec_simd_input_clauses
3662 and lower_rec_input_clauses. */
3664 struct omplow_simd_context
{
3665 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3668 vec
<tree
, va_heap
> simt_eargs
;
3669 gimple_seq simt_dlist
;
3670 poly_uint64_pod max_vf
;
3674 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3678 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
3679 omplow_simd_context
*sctx
, tree
&ivar
, tree
&lvar
)
3681 if (known_eq (sctx
->max_vf
, 0U))
3683 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
3684 if (maybe_gt (sctx
->max_vf
, 1U))
3686 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3687 OMP_CLAUSE_SAFELEN
);
3690 poly_uint64 safe_len
;
3691 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
3692 || maybe_lt (safe_len
, 1U))
3695 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
3698 if (maybe_gt (sctx
->max_vf
, 1U))
3700 sctx
->idx
= create_tmp_var (unsigned_type_node
);
3701 sctx
->lane
= create_tmp_var (unsigned_type_node
);
3704 if (known_eq (sctx
->max_vf
, 1U))
3709 if (is_gimple_reg (new_var
))
3711 ivar
= lvar
= new_var
;
3714 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
3715 ivar
= lvar
= create_tmp_var (type
);
3716 TREE_ADDRESSABLE (ivar
) = 1;
3717 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
3718 NULL
, DECL_ATTRIBUTES (ivar
));
3719 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
3720 tree clobber
= build_constructor (type
, NULL
);
3721 TREE_THIS_VOLATILE (clobber
) = 1;
3722 gimple
*g
= gimple_build_assign (ivar
, clobber
);
3723 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
3727 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
3728 tree avar
= create_tmp_var_raw (atype
);
3729 if (TREE_ADDRESSABLE (new_var
))
3730 TREE_ADDRESSABLE (avar
) = 1;
3731 DECL_ATTRIBUTES (avar
)
3732 = tree_cons (get_identifier ("omp simd array"), NULL
,
3733 DECL_ATTRIBUTES (avar
));
3734 gimple_add_tmp_var (avar
);
3735 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->idx
,
3736 NULL_TREE
, NULL_TREE
);
3737 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
3738 NULL_TREE
, NULL_TREE
);
3739 TREE_THIS_NOTRAP (ivar
) = 1;
3740 TREE_THIS_NOTRAP (lvar
) = 1;
3742 if (DECL_P (new_var
))
3744 SET_DECL_VALUE_EXPR (new_var
, lvar
);
3745 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3750 /* Helper function of lower_rec_input_clauses. For a reference
3751 in simd reduction, add an underlying variable it will reference. */
3754 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
3756 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
3757 if (TREE_CONSTANT (z
))
3759 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
3760 get_name (new_vard
));
3761 gimple_add_tmp_var (z
);
3762 TREE_ADDRESSABLE (z
) = 1;
3763 z
= build_fold_addr_expr_loc (loc
, z
);
3764 gimplify_assign (new_vard
, z
, ilist
);
3768 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3769 code to emit (type) (tskred_temp[idx]). */
3772 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
3775 unsigned HOST_WIDE_INT sz
3776 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
3777 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
3778 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
3780 tree v
= create_tmp_var (pointer_sized_int_node
);
3781 gimple
*g
= gimple_build_assign (v
, r
);
3782 gimple_seq_add_stmt (ilist
, g
);
3783 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
3785 v
= create_tmp_var (type
);
3786 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
3787 gimple_seq_add_stmt (ilist
, g
);
3792 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3793 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3794 private variables. Initialization statements go in ILIST, while calls
3795 to destructors go in DLIST. */
3798 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
3799 omp_context
*ctx
, struct omp_for_data
*fd
)
3801 tree c
, dtor
, copyin_seq
, x
, ptr
;
3802 bool copyin_by_ref
= false;
3803 bool lastprivate_firstprivate
= false;
3804 bool reduction_omp_orig_ref
= false;
3806 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3807 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
);
3808 omplow_simd_context sctx
= omplow_simd_context ();
3809 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
3810 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
3811 gimple_seq llist
[3] = { };
3812 tree nonconst_simd_if
= NULL_TREE
;
3815 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
3817 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3818 with data sharing clauses referencing variable sized vars. That
3819 is unnecessarily hard to support and very unlikely to result in
3820 vectorized code anyway. */
3822 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3823 switch (OMP_CLAUSE_CODE (c
))
3825 case OMP_CLAUSE_LINEAR
:
3826 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
3829 case OMP_CLAUSE_PRIVATE
:
3830 case OMP_CLAUSE_FIRSTPRIVATE
:
3831 case OMP_CLAUSE_LASTPRIVATE
:
3832 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
3834 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
3836 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
3837 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
3841 case OMP_CLAUSE_REDUCTION
:
3842 case OMP_CLAUSE_IN_REDUCTION
:
3843 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
3844 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
3846 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
3848 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
3849 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
3854 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
3856 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
3857 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
3859 case OMP_CLAUSE_SIMDLEN
:
3860 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
3863 case OMP_CLAUSE__CONDTEMP_
:
3864 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
3872 /* Add a placeholder for simduid. */
3873 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
3874 sctx
.simt_eargs
.safe_push (NULL_TREE
);
3876 unsigned task_reduction_cnt
= 0;
3877 unsigned task_reduction_cntorig
= 0;
3878 unsigned task_reduction_cnt_full
= 0;
3879 unsigned task_reduction_cntorig_full
= 0;
3880 unsigned task_reduction_other_cnt
= 0;
3881 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
3882 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
3883 /* Do all the fixed sized types in the first pass, and the variable sized
3884 types in the second pass. This makes sure that the scalar arguments to
3885 the variable sized types are processed before we use them in the
3886 variable sized operations. For task reductions we use 4 passes, in the
3887 first two we ignore them, in the third one gather arguments for
3888 GOMP_task_reduction_remap call and in the last pass actually handle
3889 the task reductions. */
3890 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
3893 if (pass
== 2 && task_reduction_cnt
)
3896 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
3897 + task_reduction_cntorig
);
3898 tskred_avar
= create_tmp_var_raw (tskred_atype
);
3899 gimple_add_tmp_var (tskred_avar
);
3900 TREE_ADDRESSABLE (tskred_avar
) = 1;
3901 task_reduction_cnt_full
= task_reduction_cnt
;
3902 task_reduction_cntorig_full
= task_reduction_cntorig
;
3904 else if (pass
== 3 && task_reduction_cnt
)
3906 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
3908 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
3909 size_int (task_reduction_cntorig
),
3910 build_fold_addr_expr (tskred_avar
));
3911 gimple_seq_add_stmt (ilist
, g
);
3913 if (pass
== 3 && task_reduction_other_cnt
)
3915 /* For reduction clauses, build
3916 tskred_base = (void *) tskred_temp[2]
3917 + omp_get_thread_num () * tskred_temp[1]
3918 or if tskred_temp[1] is known to be constant, that constant
3919 directly. This is the start of the private reduction copy block
3920 for the current thread. */
3921 tree v
= create_tmp_var (integer_type_node
);
3922 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
3923 gimple
*g
= gimple_build_call (x
, 0);
3924 gimple_call_set_lhs (g
, v
);
3925 gimple_seq_add_stmt (ilist
, g
);
3926 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
3927 tskred_temp
= OMP_CLAUSE_DECL (c
);
3928 if (is_taskreg_ctx (ctx
))
3929 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
3930 tree v2
= create_tmp_var (sizetype
);
3931 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
3932 gimple_seq_add_stmt (ilist
, g
);
3933 if (ctx
->task_reductions
[0])
3934 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
3936 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
3937 tree v3
= create_tmp_var (sizetype
);
3938 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
3939 gimple_seq_add_stmt (ilist
, g
);
3940 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
3941 tskred_base
= create_tmp_var (ptr_type_node
);
3942 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
3943 gimple_seq_add_stmt (ilist
, g
);
3945 task_reduction_cnt
= 0;
3946 task_reduction_cntorig
= 0;
3947 task_reduction_other_cnt
= 0;
3948 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3950 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
3953 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
3954 bool task_reduction_p
= false;
3955 bool task_reduction_needs_orig_p
= false;
3956 tree cond
= NULL_TREE
;
3960 case OMP_CLAUSE_PRIVATE
:
3961 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
3964 case OMP_CLAUSE_SHARED
:
3965 /* Ignore shared directives in teams construct inside
3966 of target construct. */
3967 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
3968 && !is_host_teams_ctx (ctx
))
3970 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
3972 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
3973 || is_global_var (OMP_CLAUSE_DECL (c
)));
3976 case OMP_CLAUSE_FIRSTPRIVATE
:
3977 case OMP_CLAUSE_COPYIN
:
3979 case OMP_CLAUSE_LINEAR
:
3980 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
3981 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
3982 lastprivate_firstprivate
= true;
3984 case OMP_CLAUSE_REDUCTION
:
3985 case OMP_CLAUSE_IN_REDUCTION
:
3986 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
3988 task_reduction_p
= true;
3989 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
3991 task_reduction_other_cnt
++;
3996 task_reduction_cnt
++;
3997 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3999 var
= OMP_CLAUSE_DECL (c
);
4000 /* If var is a global variable that isn't privatized
4001 in outer contexts, we don't need to look up the
4002 original address, it is always the address of the
4003 global variable itself. */
4005 || omp_is_reference (var
)
4007 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4009 task_reduction_needs_orig_p
= true;
4010 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4011 task_reduction_cntorig
++;
4015 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4016 reduction_omp_orig_ref
= true;
4018 case OMP_CLAUSE__REDUCTEMP_
:
4019 if (!is_taskreg_ctx (ctx
))
4022 case OMP_CLAUSE__LOOPTEMP_
:
4023 /* Handle _looptemp_/_reductemp_ clauses only on
4028 case OMP_CLAUSE_LASTPRIVATE
:
4029 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4031 lastprivate_firstprivate
= true;
4032 if (pass
!= 0 || is_taskloop_ctx (ctx
))
4035 /* Even without corresponding firstprivate, if
4036 decl is Fortran allocatable, it needs outer var
4039 && lang_hooks
.decls
.omp_private_outer_ref
4040 (OMP_CLAUSE_DECL (c
)))
4041 lastprivate_firstprivate
= true;
4043 case OMP_CLAUSE_ALIGNED
:
4046 var
= OMP_CLAUSE_DECL (c
);
4047 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
4048 && !is_global_var (var
))
4050 new_var
= maybe_lookup_decl (var
, ctx
);
4051 if (new_var
== NULL_TREE
)
4052 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4053 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4054 tree alarg
= omp_clause_aligned_alignment (c
);
4055 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4056 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
4057 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4058 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4059 gimplify_and_add (x
, ilist
);
4061 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
4062 && is_global_var (var
))
4064 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
4065 new_var
= lookup_decl (var
, ctx
);
4066 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4067 t
= build_fold_addr_expr_loc (clause_loc
, t
);
4068 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4069 tree alarg
= omp_clause_aligned_alignment (c
);
4070 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4071 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
4072 t
= fold_convert_loc (clause_loc
, ptype
, t
);
4073 x
= create_tmp_var (ptype
);
4074 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
4075 gimplify_and_add (t
, ilist
);
4076 t
= build_simple_mem_ref_loc (clause_loc
, x
);
4077 SET_DECL_VALUE_EXPR (new_var
, t
);
4078 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4081 case OMP_CLAUSE__CONDTEMP_
:
4082 if (is_parallel_ctx (ctx
)
4083 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
4090 if (task_reduction_p
!= (pass
>= 2))
4093 new_var
= var
= OMP_CLAUSE_DECL (c
);
4094 if ((c_kind
== OMP_CLAUSE_REDUCTION
4095 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4096 && TREE_CODE (var
) == MEM_REF
)
4098 var
= TREE_OPERAND (var
, 0);
4099 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
4100 var
= TREE_OPERAND (var
, 0);
4101 if (TREE_CODE (var
) == INDIRECT_REF
4102 || TREE_CODE (var
) == ADDR_EXPR
)
4103 var
= TREE_OPERAND (var
, 0);
4104 if (is_variable_sized (var
))
4106 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
4107 var
= DECL_VALUE_EXPR (var
);
4108 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
4109 var
= TREE_OPERAND (var
, 0);
4110 gcc_assert (DECL_P (var
));
4114 if (c_kind
!= OMP_CLAUSE_COPYIN
)
4115 new_var
= lookup_decl (var
, ctx
);
4117 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
4122 /* C/C++ array section reductions. */
4123 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4124 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4125 && var
!= OMP_CLAUSE_DECL (c
))
4130 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
4131 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
4133 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
4135 tree b
= TREE_OPERAND (orig_var
, 1);
4136 b
= maybe_lookup_decl (b
, ctx
);
4139 b
= TREE_OPERAND (orig_var
, 1);
4140 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
4142 if (integer_zerop (bias
))
4146 bias
= fold_convert_loc (clause_loc
,
4147 TREE_TYPE (b
), bias
);
4148 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4149 TREE_TYPE (b
), b
, bias
);
4151 orig_var
= TREE_OPERAND (orig_var
, 0);
4155 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4156 if (is_global_var (out
)
4157 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
4158 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
4159 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
4164 bool by_ref
= use_pointer_for_field (var
, NULL
);
4165 x
= build_receiver_ref (var
, by_ref
, ctx
);
4166 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
4167 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
4169 x
= build_fold_addr_expr (x
);
4171 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
4172 x
= build_simple_mem_ref (x
);
4173 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
4175 if (var
== TREE_OPERAND (orig_var
, 0))
4176 x
= build_fold_addr_expr (x
);
4178 bias
= fold_convert (sizetype
, bias
);
4179 x
= fold_convert (ptr_type_node
, x
);
4180 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
4181 TREE_TYPE (x
), x
, bias
);
4182 unsigned cnt
= task_reduction_cnt
- 1;
4183 if (!task_reduction_needs_orig_p
)
4184 cnt
+= (task_reduction_cntorig_full
4185 - task_reduction_cntorig
);
4187 cnt
= task_reduction_cntorig
- 1;
4188 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4189 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4190 gimplify_assign (r
, x
, ilist
);
4194 if (TREE_CODE (orig_var
) == INDIRECT_REF
4195 || TREE_CODE (orig_var
) == ADDR_EXPR
)
4196 orig_var
= TREE_OPERAND (orig_var
, 0);
4197 tree d
= OMP_CLAUSE_DECL (c
);
4198 tree type
= TREE_TYPE (d
);
4199 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
4200 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
4201 const char *name
= get_name (orig_var
);
4204 tree xv
= create_tmp_var (ptr_type_node
);
4205 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4207 unsigned cnt
= task_reduction_cnt
- 1;
4208 if (!task_reduction_needs_orig_p
)
4209 cnt
+= (task_reduction_cntorig_full
4210 - task_reduction_cntorig
);
4212 cnt
= task_reduction_cntorig
- 1;
4213 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4214 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4216 gimple
*g
= gimple_build_assign (xv
, x
);
4217 gimple_seq_add_stmt (ilist
, g
);
4221 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4223 if (ctx
->task_reductions
[1 + idx
])
4224 off
= fold_convert (sizetype
,
4225 ctx
->task_reductions
[1 + idx
]);
4227 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4229 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
4231 gimple_seq_add_stmt (ilist
, g
);
4233 x
= fold_convert (build_pointer_type (boolean_type_node
),
4235 if (TREE_CONSTANT (v
))
4236 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
4237 TYPE_SIZE_UNIT (type
));
4240 tree t
= maybe_lookup_decl (v
, ctx
);
4244 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4245 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
4247 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4249 build_int_cst (TREE_TYPE (v
), 1));
4250 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4252 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4253 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4255 cond
= create_tmp_var (TREE_TYPE (x
));
4256 gimplify_assign (cond
, x
, ilist
);
4259 else if (TREE_CONSTANT (v
))
4261 x
= create_tmp_var_raw (type
, name
);
4262 gimple_add_tmp_var (x
);
4263 TREE_ADDRESSABLE (x
) = 1;
4264 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4269 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4270 tree t
= maybe_lookup_decl (v
, ctx
);
4274 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4275 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
4276 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4278 build_int_cst (TREE_TYPE (v
), 1));
4279 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4281 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4282 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
4283 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
4286 tree ptype
= build_pointer_type (TREE_TYPE (type
));
4287 x
= fold_convert_loc (clause_loc
, ptype
, x
);
4288 tree y
= create_tmp_var (ptype
, name
);
4289 gimplify_assign (y
, x
, ilist
);
4293 if (!integer_zerop (bias
))
4295 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4297 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4299 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
4300 pointer_sized_int_node
, yb
, bias
);
4301 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
4302 yb
= create_tmp_var (ptype
, name
);
4303 gimplify_assign (yb
, x
, ilist
);
4307 d
= TREE_OPERAND (d
, 0);
4308 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
4309 d
= TREE_OPERAND (d
, 0);
4310 if (TREE_CODE (d
) == ADDR_EXPR
)
4312 if (orig_var
!= var
)
4314 gcc_assert (is_variable_sized (orig_var
));
4315 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
4317 gimplify_assign (new_var
, x
, ilist
);
4318 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
4319 tree t
= build_fold_indirect_ref (new_var
);
4320 DECL_IGNORED_P (new_var
) = 0;
4321 TREE_THIS_NOTRAP (t
) = 1;
4322 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
4323 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
4327 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
4328 build_int_cst (ptype
, 0));
4329 SET_DECL_VALUE_EXPR (new_var
, x
);
4330 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4335 gcc_assert (orig_var
== var
);
4336 if (TREE_CODE (d
) == INDIRECT_REF
)
4338 x
= create_tmp_var (ptype
, name
);
4339 TREE_ADDRESSABLE (x
) = 1;
4340 gimplify_assign (x
, yb
, ilist
);
4341 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4343 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4344 gimplify_assign (new_var
, x
, ilist
);
4346 /* GOMP_taskgroup_reduction_register memsets the whole
4347 array to zero. If the initializer is zero, we don't
4348 need to initialize it again, just mark it as ever
4349 used unconditionally, i.e. cond = true. */
4351 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
4352 && initializer_zerop (omp_reduction_init (c
,
4355 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
4357 gimple_seq_add_stmt (ilist
, g
);
4360 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4364 if (!is_parallel_ctx (ctx
))
4366 tree condv
= create_tmp_var (boolean_type_node
);
4367 g
= gimple_build_assign (condv
,
4368 build_simple_mem_ref (cond
));
4369 gimple_seq_add_stmt (ilist
, g
);
4370 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
4371 g
= gimple_build_cond (NE_EXPR
, condv
,
4372 boolean_false_node
, end
, lab1
);
4373 gimple_seq_add_stmt (ilist
, g
);
4374 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
4376 g
= gimple_build_assign (build_simple_mem_ref (cond
),
4378 gimple_seq_add_stmt (ilist
, g
);
4381 tree y1
= create_tmp_var (ptype
);
4382 gimplify_assign (y1
, y
, ilist
);
4383 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
4384 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
4385 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
4386 if (task_reduction_needs_orig_p
)
4388 y3
= create_tmp_var (ptype
);
4390 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4391 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4392 size_int (task_reduction_cnt_full
4393 + task_reduction_cntorig
- 1),
4394 NULL_TREE
, NULL_TREE
);
4397 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4398 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
4401 gimplify_assign (y3
, ref
, ilist
);
4403 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
4407 y2
= create_tmp_var (ptype
);
4408 gimplify_assign (y2
, y
, ilist
);
4410 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4412 tree ref
= build_outer_var_ref (var
, ctx
);
4413 /* For ref build_outer_var_ref already performs this. */
4414 if (TREE_CODE (d
) == INDIRECT_REF
)
4415 gcc_assert (omp_is_reference (var
));
4416 else if (TREE_CODE (d
) == ADDR_EXPR
)
4417 ref
= build_fold_addr_expr (ref
);
4418 else if (omp_is_reference (var
))
4419 ref
= build_fold_addr_expr (ref
);
4420 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
4421 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
4422 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4424 y3
= create_tmp_var (ptype
);
4425 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
4429 y4
= create_tmp_var (ptype
);
4430 gimplify_assign (y4
, ref
, dlist
);
4434 tree i
= create_tmp_var (TREE_TYPE (v
));
4435 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
4436 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4437 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
4440 i2
= create_tmp_var (TREE_TYPE (v
));
4441 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
4442 body2
= create_artificial_label (UNKNOWN_LOCATION
);
4443 end2
= create_artificial_label (UNKNOWN_LOCATION
);
4444 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
4446 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4448 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4449 tree decl_placeholder
4450 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
4451 SET_DECL_VALUE_EXPR (decl_placeholder
,
4452 build_simple_mem_ref (y1
));
4453 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
4454 SET_DECL_VALUE_EXPR (placeholder
,
4455 y3
? build_simple_mem_ref (y3
)
4457 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4458 x
= lang_hooks
.decls
.omp_clause_default_ctor
4459 (c
, build_simple_mem_ref (y1
),
4460 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
4462 gimplify_and_add (x
, ilist
);
4463 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4465 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4466 lower_omp (&tseq
, ctx
);
4467 gimple_seq_add_seq (ilist
, tseq
);
4469 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4472 SET_DECL_VALUE_EXPR (decl_placeholder
,
4473 build_simple_mem_ref (y2
));
4474 SET_DECL_VALUE_EXPR (placeholder
,
4475 build_simple_mem_ref (y4
));
4476 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4477 lower_omp (&tseq
, ctx
);
4478 gimple_seq_add_seq (dlist
, tseq
);
4479 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4481 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4482 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
4485 x
= lang_hooks
.decls
.omp_clause_dtor
4486 (c
, build_simple_mem_ref (y2
));
4489 gimple_seq tseq
= NULL
;
4491 gimplify_stmt (&dtor
, &tseq
);
4492 gimple_seq_add_seq (dlist
, tseq
);
4498 x
= omp_reduction_init (c
, TREE_TYPE (type
));
4499 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4501 /* reduction(-:var) sums up the partial results, so it
4502 acts identically to reduction(+:var). */
4503 if (code
== MINUS_EXPR
)
4506 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
4509 x
= build2 (code
, TREE_TYPE (type
),
4510 build_simple_mem_ref (y4
),
4511 build_simple_mem_ref (y2
));
4512 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
4516 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
4517 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4518 gimple_seq_add_stmt (ilist
, g
);
4521 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
4522 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4523 gimple_seq_add_stmt (ilist
, g
);
4525 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
4526 build_int_cst (TREE_TYPE (i
), 1));
4527 gimple_seq_add_stmt (ilist
, g
);
4528 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
4529 gimple_seq_add_stmt (ilist
, g
);
4530 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
4533 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
4534 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4535 gimple_seq_add_stmt (dlist
, g
);
4538 g
= gimple_build_assign
4539 (y4
, POINTER_PLUS_EXPR
, y4
,
4540 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4541 gimple_seq_add_stmt (dlist
, g
);
4543 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
4544 build_int_cst (TREE_TYPE (i2
), 1));
4545 gimple_seq_add_stmt (dlist
, g
);
4546 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
4547 gimple_seq_add_stmt (dlist
, g
);
4548 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
4554 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4558 bool by_ref
= use_pointer_for_field (var
, ctx
);
4559 x
= build_receiver_ref (var
, by_ref
, ctx
);
4561 if (!omp_is_reference (var
))
4562 x
= build_fold_addr_expr (x
);
4563 x
= fold_convert (ptr_type_node
, x
);
4564 unsigned cnt
= task_reduction_cnt
- 1;
4565 if (!task_reduction_needs_orig_p
)
4566 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
4568 cnt
= task_reduction_cntorig
- 1;
4569 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4570 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4571 gimplify_assign (r
, x
, ilist
);
4576 tree type
= TREE_TYPE (new_var
);
4577 if (!omp_is_reference (var
))
4578 type
= build_pointer_type (type
);
4579 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4581 unsigned cnt
= task_reduction_cnt
- 1;
4582 if (!task_reduction_needs_orig_p
)
4583 cnt
+= (task_reduction_cntorig_full
4584 - task_reduction_cntorig
);
4586 cnt
= task_reduction_cntorig
- 1;
4587 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4588 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4592 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4594 if (ctx
->task_reductions
[1 + idx
])
4595 off
= fold_convert (sizetype
,
4596 ctx
->task_reductions
[1 + idx
]);
4598 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4600 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
4603 x
= fold_convert (type
, x
);
4605 if (omp_is_reference (var
))
4607 gimplify_assign (new_var
, x
, ilist
);
4609 new_var
= build_simple_mem_ref (new_var
);
4613 t
= create_tmp_var (type
);
4614 gimplify_assign (t
, x
, ilist
);
4615 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
4616 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4618 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
4619 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
4620 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4621 cond
= create_tmp_var (TREE_TYPE (t
));
4622 gimplify_assign (cond
, t
, ilist
);
4624 else if (is_variable_sized (var
))
4626 /* For variable sized types, we need to allocate the
4627 actual storage here. Call alloca and store the
4628 result in the pointer decl that we created elsewhere. */
4632 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
4637 ptr
= DECL_VALUE_EXPR (new_var
);
4638 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
4639 ptr
= TREE_OPERAND (ptr
, 0);
4640 gcc_assert (DECL_P (ptr
));
4641 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
4643 /* void *tmp = __builtin_alloca */
4644 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4645 stmt
= gimple_build_call (atmp
, 2, x
,
4646 size_int (DECL_ALIGN (var
)));
4647 tmp
= create_tmp_var_raw (ptr_type_node
);
4648 gimple_add_tmp_var (tmp
);
4649 gimple_call_set_lhs (stmt
, tmp
);
4651 gimple_seq_add_stmt (ilist
, stmt
);
4653 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
4654 gimplify_assign (ptr
, x
, ilist
);
4657 else if (omp_is_reference (var
)
4658 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
4659 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
4661 /* For references that are being privatized for Fortran,
4662 allocate new backing storage for the new pointer
4663 variable. This allows us to avoid changing all the
4664 code that expects a pointer to something that expects
4665 a direct variable. */
4669 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
4670 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
4672 x
= build_receiver_ref (var
, false, ctx
);
4673 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4675 else if (TREE_CONSTANT (x
))
4677 /* For reduction in SIMD loop, defer adding the
4678 initialization of the reference, because if we decide
4679 to use SIMD array for it, the initilization could cause
4680 expansion ICE. Ditto for other privatization clauses. */
4685 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
4687 gimple_add_tmp_var (x
);
4688 TREE_ADDRESSABLE (x
) = 1;
4689 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4695 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4696 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
4697 tree al
= size_int (TYPE_ALIGN (rtype
));
4698 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
4703 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4704 gimplify_assign (new_var
, x
, ilist
);
4707 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4709 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4710 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4711 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4719 switch (OMP_CLAUSE_CODE (c
))
4721 case OMP_CLAUSE_SHARED
:
4722 /* Ignore shared directives in teams construct inside
4723 target construct. */
4724 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4725 && !is_host_teams_ctx (ctx
))
4727 /* Shared global vars are just accessed directly. */
4728 if (is_global_var (new_var
))
4730 /* For taskloop firstprivate/lastprivate, represented
4731 as firstprivate and shared clause on the task, new_var
4732 is the firstprivate var. */
4733 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
4735 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4736 needs to be delayed until after fixup_child_record_type so
4737 that we get the correct type during the dereference. */
4738 by_ref
= use_pointer_for_field (var
, ctx
);
4739 x
= build_receiver_ref (var
, by_ref
, ctx
);
4740 SET_DECL_VALUE_EXPR (new_var
, x
);
4741 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4743 /* ??? If VAR is not passed by reference, and the variable
4744 hasn't been initialized yet, then we'll get a warning for
4745 the store into the omp_data_s structure. Ideally, we'd be
4746 able to notice this and not store anything at all, but
4747 we're generating code too early. Suppress the warning. */
4749 TREE_NO_WARNING (var
) = 1;
4752 case OMP_CLAUSE__CONDTEMP_
:
4753 if (is_parallel_ctx (ctx
))
4755 x
= build_receiver_ref (var
, false, ctx
);
4756 SET_DECL_VALUE_EXPR (new_var
, x
);
4757 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4759 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
4761 x
= build_zero_cst (TREE_TYPE (var
));
4766 case OMP_CLAUSE_LASTPRIVATE
:
4767 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4771 case OMP_CLAUSE_PRIVATE
:
4772 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
4773 x
= build_outer_var_ref (var
, ctx
);
4774 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
4776 if (is_task_ctx (ctx
))
4777 x
= build_receiver_ref (var
, false, ctx
);
4779 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
4785 nx
= lang_hooks
.decls
.omp_clause_default_ctor
4786 (c
, unshare_expr (new_var
), x
);
4789 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4790 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
4791 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4792 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
4793 || omp_is_reference (var
))
4794 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4797 if (omp_is_reference (var
))
4799 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4800 tree new_vard
= TREE_OPERAND (new_var
, 0);
4801 gcc_assert (DECL_P (new_vard
));
4802 SET_DECL_VALUE_EXPR (new_vard
,
4803 build_fold_addr_expr (lvar
));
4804 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4808 x
= lang_hooks
.decls
.omp_clause_default_ctor
4809 (c
, unshare_expr (ivar
), x
);
4810 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
4812 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
4813 unshare_expr (ivar
), x
);
4817 gimplify_and_add (x
, &llist
[0]);
4818 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4819 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
4824 gcc_assert (TREE_CODE (v
) == MEM_REF
);
4825 v
= TREE_OPERAND (v
, 0);
4826 gcc_assert (DECL_P (v
));
4828 v
= *ctx
->lastprivate_conditional_map
->get (v
);
4829 tree t
= create_tmp_var (TREE_TYPE (v
));
4830 tree z
= build_zero_cst (TREE_TYPE (v
));
4832 = build_outer_var_ref (var
, ctx
,
4833 OMP_CLAUSE_LASTPRIVATE
);
4834 gimple_seq_add_stmt (dlist
,
4835 gimple_build_assign (t
, z
));
4836 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
4837 tree civar
= DECL_VALUE_EXPR (v
);
4838 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
4839 civar
= unshare_expr (civar
);
4840 TREE_OPERAND (civar
, 1) = sctx
.idx
;
4841 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
4842 unshare_expr (civar
));
4843 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
4844 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
4845 orig_v
, unshare_expr (ivar
)));
4846 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
4848 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
4850 gimple_seq tseq
= NULL
;
4851 gimplify_and_add (x
, &tseq
);
4853 lower_omp (&tseq
, ctx
->outer
);
4854 gimple_seq_add_seq (&llist
[1], tseq
);
4858 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4861 gimple_seq tseq
= NULL
;
4864 gimplify_stmt (&dtor
, &tseq
);
4865 gimple_seq_add_seq (&llist
[1], tseq
);
4870 if (omp_is_reference (var
))
4872 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4873 tree new_vard
= TREE_OPERAND (new_var
, 0);
4874 gcc_assert (DECL_P (new_vard
));
4875 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
4876 x
= TYPE_SIZE_UNIT (type
);
4877 if (TREE_CONSTANT (x
))
4879 x
= create_tmp_var_raw (type
, get_name (var
));
4880 gimple_add_tmp_var (x
);
4881 TREE_ADDRESSABLE (x
) = 1;
4882 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4883 x
= fold_convert_loc (clause_loc
,
4884 TREE_TYPE (new_vard
), x
);
4885 gimplify_assign (new_vard
, x
, ilist
);
4890 gimplify_and_add (nx
, ilist
);
4894 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4897 gimple_seq tseq
= NULL
;
4900 gimplify_stmt (&dtor
, &tseq
);
4901 gimple_seq_add_seq (dlist
, tseq
);
4905 case OMP_CLAUSE_LINEAR
:
4906 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
4907 goto do_firstprivate
;
4908 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4911 x
= build_outer_var_ref (var
, ctx
);
4914 case OMP_CLAUSE_FIRSTPRIVATE
:
4915 if (is_task_ctx (ctx
))
4917 if ((omp_is_reference (var
)
4918 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
4919 || is_variable_sized (var
))
4921 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
4923 || use_pointer_for_field (var
, NULL
))
4925 x
= build_receiver_ref (var
, false, ctx
);
4926 SET_DECL_VALUE_EXPR (new_var
, x
);
4927 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4931 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
4932 && omp_is_reference (var
))
4934 x
= build_outer_var_ref (var
, ctx
);
4935 gcc_assert (TREE_CODE (x
) == MEM_REF
4936 && integer_zerop (TREE_OPERAND (x
, 1)));
4937 x
= TREE_OPERAND (x
, 0);
4938 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4939 (c
, unshare_expr (new_var
), x
);
4940 gimplify_and_add (x
, ilist
);
4944 x
= build_outer_var_ref (var
, ctx
);
4947 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4948 && gimple_omp_for_combined_into_p (ctx
->stmt
))
4950 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4951 tree stept
= TREE_TYPE (t
);
4952 tree ct
= omp_find_clause (clauses
,
4953 OMP_CLAUSE__LOOPTEMP_
);
4955 tree l
= OMP_CLAUSE_DECL (ct
);
4956 tree n1
= fd
->loop
.n1
;
4957 tree step
= fd
->loop
.step
;
4958 tree itype
= TREE_TYPE (l
);
4959 if (POINTER_TYPE_P (itype
))
4960 itype
= signed_type_for (itype
);
4961 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
4962 if (TYPE_UNSIGNED (itype
)
4963 && fd
->loop
.cond_code
== GT_EXPR
)
4964 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
4965 fold_build1 (NEGATE_EXPR
, itype
, l
),
4966 fold_build1 (NEGATE_EXPR
,
4969 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
4970 t
= fold_build2 (MULT_EXPR
, stept
,
4971 fold_convert (stept
, l
), t
);
4973 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4975 if (omp_is_reference (var
))
4977 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4978 tree new_vard
= TREE_OPERAND (new_var
, 0);
4979 gcc_assert (DECL_P (new_vard
));
4980 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
4981 nx
= TYPE_SIZE_UNIT (type
);
4982 if (TREE_CONSTANT (nx
))
4984 nx
= create_tmp_var_raw (type
,
4986 gimple_add_tmp_var (nx
);
4987 TREE_ADDRESSABLE (nx
) = 1;
4988 nx
= build_fold_addr_expr_loc (clause_loc
,
4990 nx
= fold_convert_loc (clause_loc
,
4991 TREE_TYPE (new_vard
),
4993 gimplify_assign (new_vard
, nx
, ilist
);
4997 x
= lang_hooks
.decls
.omp_clause_linear_ctor
4999 gimplify_and_add (x
, ilist
);
5003 if (POINTER_TYPE_P (TREE_TYPE (x
)))
5004 x
= fold_build2 (POINTER_PLUS_EXPR
,
5005 TREE_TYPE (x
), x
, t
);
5007 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5010 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
5011 || TREE_ADDRESSABLE (new_var
)
5012 || omp_is_reference (var
))
5013 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5016 if (omp_is_reference (var
))
5018 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5019 tree new_vard
= TREE_OPERAND (new_var
, 0);
5020 gcc_assert (DECL_P (new_vard
));
5021 SET_DECL_VALUE_EXPR (new_vard
,
5022 build_fold_addr_expr (lvar
));
5023 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5025 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
5027 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
5028 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
5029 gimplify_and_add (x
, ilist
);
5030 gimple_stmt_iterator gsi
5031 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5033 = gimple_build_assign (unshare_expr (lvar
), iv
);
5034 gsi_insert_before_without_update (&gsi
, g
,
5036 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5037 enum tree_code code
= PLUS_EXPR
;
5038 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
5039 code
= POINTER_PLUS_EXPR
;
5040 g
= gimple_build_assign (iv
, code
, iv
, t
);
5041 gsi_insert_before_without_update (&gsi
, g
,
5045 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5046 (c
, unshare_expr (ivar
), x
);
5047 gimplify_and_add (x
, &llist
[0]);
5048 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5051 gimple_seq tseq
= NULL
;
5054 gimplify_stmt (&dtor
, &tseq
);
5055 gimple_seq_add_seq (&llist
[1], tseq
);
5059 if (omp_is_reference (var
))
5061 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5062 tree new_vard
= TREE_OPERAND (new_var
, 0);
5063 gcc_assert (DECL_P (new_vard
));
5064 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5065 nx
= TYPE_SIZE_UNIT (type
);
5066 if (TREE_CONSTANT (nx
))
5068 nx
= create_tmp_var_raw (type
, get_name (var
));
5069 gimple_add_tmp_var (nx
);
5070 TREE_ADDRESSABLE (nx
) = 1;
5071 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
5072 nx
= fold_convert_loc (clause_loc
,
5073 TREE_TYPE (new_vard
), nx
);
5074 gimplify_assign (new_vard
, nx
, ilist
);
5078 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5079 (c
, unshare_expr (new_var
), x
);
5080 gimplify_and_add (x
, ilist
);
5083 case OMP_CLAUSE__LOOPTEMP_
:
5084 case OMP_CLAUSE__REDUCTEMP_
:
5085 gcc_assert (is_taskreg_ctx (ctx
));
5086 x
= build_outer_var_ref (var
, ctx
);
5087 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5088 gimplify_and_add (x
, ilist
);
5091 case OMP_CLAUSE_COPYIN
:
5092 by_ref
= use_pointer_for_field (var
, NULL
);
5093 x
= build_receiver_ref (var
, by_ref
, ctx
);
5094 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
5095 append_to_statement_list (x
, ©in_seq
);
5096 copyin_by_ref
|= by_ref
;
5099 case OMP_CLAUSE_REDUCTION
:
5100 case OMP_CLAUSE_IN_REDUCTION
:
5101 /* OpenACC reductions are initialized using the
5102 GOACC_REDUCTION internal function. */
5103 if (is_gimple_omp_oacc (ctx
->stmt
))
5105 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5107 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5109 tree ptype
= TREE_TYPE (placeholder
);
5112 x
= error_mark_node
;
5113 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
5114 && !task_reduction_needs_orig_p
)
5116 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5118 tree pptype
= build_pointer_type (ptype
);
5119 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5120 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5121 size_int (task_reduction_cnt_full
5122 + task_reduction_cntorig
- 1),
5123 NULL_TREE
, NULL_TREE
);
5127 = *ctx
->task_reduction_map
->get (c
);
5128 x
= task_reduction_read (ilist
, tskred_temp
,
5129 pptype
, 7 + 3 * idx
);
5131 x
= fold_convert (pptype
, x
);
5132 x
= build_simple_mem_ref (x
);
5137 x
= build_outer_var_ref (var
, ctx
);
5139 if (omp_is_reference (var
)
5140 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
5141 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5143 SET_DECL_VALUE_EXPR (placeholder
, x
);
5144 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5145 tree new_vard
= new_var
;
5146 if (omp_is_reference (var
))
5148 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5149 new_vard
= TREE_OPERAND (new_var
, 0);
5150 gcc_assert (DECL_P (new_vard
));
5153 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5156 if (new_vard
== new_var
)
5158 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
5159 SET_DECL_VALUE_EXPR (new_var
, ivar
);
5163 SET_DECL_VALUE_EXPR (new_vard
,
5164 build_fold_addr_expr (ivar
));
5165 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5167 x
= lang_hooks
.decls
.omp_clause_default_ctor
5168 (c
, unshare_expr (ivar
),
5169 build_outer_var_ref (var
, ctx
));
5171 gimplify_and_add (x
, &llist
[0]);
5172 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5174 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5175 lower_omp (&tseq
, ctx
);
5176 gimple_seq_add_seq (&llist
[0], tseq
);
5178 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5179 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5180 lower_omp (&tseq
, ctx
);
5181 gimple_seq_add_seq (&llist
[1], tseq
);
5182 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5183 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5184 if (new_vard
== new_var
)
5185 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5187 SET_DECL_VALUE_EXPR (new_vard
,
5188 build_fold_addr_expr (lvar
));
5189 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5194 gimplify_stmt (&dtor
, &tseq
);
5195 gimple_seq_add_seq (&llist
[1], tseq
);
5199 /* If this is a reference to constant size reduction var
5200 with placeholder, we haven't emitted the initializer
5201 for it because it is undesirable if SIMD arrays are used.
5202 But if they aren't used, we need to emit the deferred
5203 initialization now. */
5204 else if (omp_is_reference (var
) && is_simd
)
5205 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5207 tree lab2
= NULL_TREE
;
5211 if (!is_parallel_ctx (ctx
))
5213 tree condv
= create_tmp_var (boolean_type_node
);
5214 tree m
= build_simple_mem_ref (cond
);
5215 g
= gimple_build_assign (condv
, m
);
5216 gimple_seq_add_stmt (ilist
, g
);
5218 = create_artificial_label (UNKNOWN_LOCATION
);
5219 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5220 g
= gimple_build_cond (NE_EXPR
, condv
,
5223 gimple_seq_add_stmt (ilist
, g
);
5224 gimple_seq_add_stmt (ilist
,
5225 gimple_build_label (lab1
));
5227 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5229 gimple_seq_add_stmt (ilist
, g
);
5231 x
= lang_hooks
.decls
.omp_clause_default_ctor
5232 (c
, unshare_expr (new_var
),
5234 : build_outer_var_ref (var
, ctx
));
5236 gimplify_and_add (x
, ilist
);
5237 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5239 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5240 lower_omp (&tseq
, ctx
);
5241 gimple_seq_add_seq (ilist
, tseq
);
5243 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5246 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5247 lower_omp (&tseq
, ctx
);
5248 gimple_seq_add_seq (dlist
, tseq
);
5249 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5251 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5255 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5262 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
5263 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
5264 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5269 tree lab2
= NULL_TREE
;
5270 /* GOMP_taskgroup_reduction_register memsets the whole
5271 array to zero. If the initializer is zero, we don't
5272 need to initialize it again, just mark it as ever
5273 used unconditionally, i.e. cond = true. */
5274 if (initializer_zerop (x
))
5276 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5278 gimple_seq_add_stmt (ilist
, g
);
5283 if (!cond) { cond = true; new_var = x; } */
5284 if (!is_parallel_ctx (ctx
))
5286 tree condv
= create_tmp_var (boolean_type_node
);
5287 tree m
= build_simple_mem_ref (cond
);
5288 g
= gimple_build_assign (condv
, m
);
5289 gimple_seq_add_stmt (ilist
, g
);
5291 = create_artificial_label (UNKNOWN_LOCATION
);
5292 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5293 g
= gimple_build_cond (NE_EXPR
, condv
,
5296 gimple_seq_add_stmt (ilist
, g
);
5297 gimple_seq_add_stmt (ilist
,
5298 gimple_build_label (lab1
));
5300 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5302 gimple_seq_add_stmt (ilist
, g
);
5303 gimplify_assign (new_var
, x
, ilist
);
5305 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5309 /* reduction(-:var) sums up the partial results, so it
5310 acts identically to reduction(+:var). */
5311 if (code
== MINUS_EXPR
)
5314 tree new_vard
= new_var
;
5315 if (is_simd
&& omp_is_reference (var
))
5317 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5318 new_vard
= TREE_OPERAND (new_var
, 0);
5319 gcc_assert (DECL_P (new_vard
));
5322 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5325 tree ref
= build_outer_var_ref (var
, ctx
);
5327 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
5332 simt_lane
= create_tmp_var (unsigned_type_node
);
5333 x
= build_call_expr_internal_loc
5334 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
5335 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
5336 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
5337 gimplify_assign (ivar
, x
, &llist
[2]);
5339 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
5340 ref
= build_outer_var_ref (var
, ctx
);
5341 gimplify_assign (ref
, x
, &llist
[1]);
5343 if (new_vard
!= new_var
)
5345 SET_DECL_VALUE_EXPR (new_vard
,
5346 build_fold_addr_expr (lvar
));
5347 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5352 if (omp_is_reference (var
) && is_simd
)
5353 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5354 gimplify_assign (new_var
, x
, ilist
);
5357 tree ref
= build_outer_var_ref (var
, ctx
);
5359 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
5360 ref
= build_outer_var_ref (var
, ctx
);
5361 gimplify_assign (ref
, x
, dlist
);
5374 tree clobber
= build_constructor (TREE_TYPE (tskred_avar
), NULL
);
5375 TREE_THIS_VOLATILE (clobber
) = 1;
5376 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
5379 if (known_eq (sctx
.max_vf
, 1U))
5381 sctx
.is_simt
= false;
5382 if (ctx
->lastprivate_conditional_map
)
5384 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
5386 /* Signal to lower_omp_1 that it should use parent context. */
5387 ctx
->combined_into_simd_safelen0
= true;
5388 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5389 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5390 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5392 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
5394 = ctx
->lastprivate_conditional_map
->get (o
);
5395 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
->outer
);
5397 = ctx
->outer
->lastprivate_conditional_map
->get (po
);
5403 /* When not vectorized, treat lastprivate(conditional:) like
5404 normal lastprivate, as there will be just one simd lane
5405 writing the privatized variable. */
5406 delete ctx
->lastprivate_conditional_map
;
5407 ctx
->lastprivate_conditional_map
= NULL
;
5412 if (nonconst_simd_if
)
5414 if (sctx
.lane
== NULL_TREE
)
5416 sctx
.idx
= create_tmp_var (unsigned_type_node
);
5417 sctx
.lane
= create_tmp_var (unsigned_type_node
);
5419 /* FIXME: For now. */
5420 sctx
.is_simt
= false;
5423 if (sctx
.lane
|| sctx
.is_simt
)
5425 uid
= create_tmp_var (ptr_type_node
, "simduid");
5426 /* Don't want uninit warnings on simduid, it is always uninitialized,
5427 but we use it not for the value, but for the DECL_UID only. */
5428 TREE_NO_WARNING (uid
) = 1;
5429 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
5430 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
5431 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
5432 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
5434 /* Emit calls denoting privatized variables and initializing a pointer to
5435 structure that holds private variables as fields after ompdevlow pass. */
5438 sctx
.simt_eargs
[0] = uid
;
5440 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
5441 gimple_call_set_lhs (g
, uid
);
5442 gimple_seq_add_stmt (ilist
, g
);
5443 sctx
.simt_eargs
.release ();
5445 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
5446 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
5447 gimple_call_set_lhs (g
, simtrec
);
5448 gimple_seq_add_stmt (ilist
, g
);
5452 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
5453 1 + (nonconst_simd_if
!= NULL
),
5454 uid
, nonconst_simd_if
);
5455 gimple_call_set_lhs (g
, sctx
.lane
);
5456 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5457 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
5458 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
5459 build_int_cst (unsigned_type_node
, 0));
5460 gimple_seq_add_stmt (ilist
, g
);
5461 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5464 tree simt_vf
= create_tmp_var (unsigned_type_node
);
5465 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
5466 gimple_call_set_lhs (g
, simt_vf
);
5467 gimple_seq_add_stmt (dlist
, g
);
5469 tree t
= build_int_cst (unsigned_type_node
, 1);
5470 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
5471 gimple_seq_add_stmt (dlist
, g
);
5473 t
= build_int_cst (unsigned_type_node
, 0);
5474 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
5475 gimple_seq_add_stmt (dlist
, g
);
5477 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5478 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
5479 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5480 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
5481 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
5483 gimple_seq_add_seq (dlist
, llist
[2]);
5485 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
5486 gimple_seq_add_stmt (dlist
, g
);
5488 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
5489 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
5490 gimple_seq_add_stmt (dlist
, g
);
5492 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
5494 for (int i
= 0; i
< 2; i
++)
5497 tree vf
= create_tmp_var (unsigned_type_node
);
5498 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
5499 gimple_call_set_lhs (g
, vf
);
5500 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
5501 gimple_seq_add_stmt (seq
, g
);
5502 tree t
= build_int_cst (unsigned_type_node
, 0);
5503 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
5504 gimple_seq_add_stmt (seq
, g
);
5505 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5506 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
5507 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5508 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
5509 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
5510 gimple_seq_add_seq (seq
, llist
[i
]);
5511 t
= build_int_cst (unsigned_type_node
, 1);
5512 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
5513 gimple_seq_add_stmt (seq
, g
);
5514 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
5515 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
5516 gimple_seq_add_stmt (seq
, g
);
5517 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
5522 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
5524 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
5525 gimple_seq_add_stmt (dlist
, g
);
5528 /* The copyin sequence is not to be executed by the main thread, since
5529 that would result in self-copies. Perhaps not visible to scalars,
5530 but it certainly is to C++ operator=. */
5533 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
5535 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
5536 build_int_cst (TREE_TYPE (x
), 0));
5537 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
5538 gimplify_and_add (x
, ilist
);
5541 /* If any copyin variable is passed by reference, we must ensure the
5542 master thread doesn't modify it before it is copied over in all
5543 threads. Similarly for variables in both firstprivate and
5544 lastprivate clauses we need to ensure the lastprivate copying
5545 happens after firstprivate copying in all threads. And similarly
5546 for UDRs if initializer expression refers to omp_orig. */
5547 if (copyin_by_ref
|| lastprivate_firstprivate
|| reduction_omp_orig_ref
)
5549 /* Don't add any barrier for #pragma omp simd or
5550 #pragma omp distribute. */
5551 if (!is_task_ctx (ctx
)
5552 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
5553 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
5554 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
5557 /* If max_vf is non-zero, then we can use only a vectorization factor
5558 up to the max_vf we chose. So stick it into the safelen clause. */
5559 if (maybe_ne (sctx
.max_vf
, 0U))
5561 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
5562 OMP_CLAUSE_SAFELEN
);
5563 poly_uint64 safe_len
;
5565 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
5566 && maybe_gt (safe_len
, sctx
.max_vf
)))
5568 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
5569 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
5571 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
5572 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
5577 /* Create temporary variables for lastprivate(conditional:) implementation
5578 in context CTX with CLAUSES. */
5581 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
5583 tree iter_type
= NULL_TREE
;
5584 tree cond_ptr
= NULL_TREE
;
5585 tree iter_var
= NULL_TREE
;
5586 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5587 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
);
5588 tree next
= *clauses
;
5589 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5590 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5591 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5595 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
5597 if (iter_type
== NULL_TREE
)
5599 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
5600 iter_var
= create_tmp_var_raw (iter_type
);
5601 DECL_CONTEXT (iter_var
) = current_function_decl
;
5602 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
5603 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
5604 ctx
->block_vars
= iter_var
;
5606 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
5607 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
5608 OMP_CLAUSE_DECL (c3
) = iter_var
;
5609 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
5611 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
5613 next
= OMP_CLAUSE_CHAIN (cc
);
5614 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
5615 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
5616 ctx
->lastprivate_conditional_map
->put (o
, v
);
5619 if (iter_type
== NULL
)
5621 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
5623 struct omp_for_data fd
;
5624 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
5626 iter_type
= unsigned_type_for (fd
.iter_type
);
5628 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
5629 iter_type
= unsigned_type_node
;
5630 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
5634 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
5635 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
5639 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
5640 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
5641 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
5642 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
5643 ctx
->block_vars
= cond_ptr
;
5644 c2
= build_omp_clause (UNKNOWN_LOCATION
,
5645 OMP_CLAUSE__CONDTEMP_
);
5646 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
5647 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
5650 iter_var
= create_tmp_var_raw (iter_type
);
5651 DECL_CONTEXT (iter_var
) = current_function_decl
;
5652 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
5653 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
5654 ctx
->block_vars
= iter_var
;
5656 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
5657 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
5658 OMP_CLAUSE_DECL (c3
) = iter_var
;
5659 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
5660 OMP_CLAUSE_CHAIN (c2
) = c3
;
5661 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
5663 tree v
= create_tmp_var_raw (iter_type
);
5664 DECL_CONTEXT (v
) = current_function_decl
;
5665 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
5666 DECL_CHAIN (v
) = ctx
->block_vars
;
5667 ctx
->block_vars
= v
;
5668 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
5669 ctx
->lastprivate_conditional_map
->put (o
, v
);
5674 /* Generate code to implement the LASTPRIVATE clauses. This is used for
5675 both parallel and workshare constructs. PREDICATE may be NULL if it's
5676 always true. BODY_P is the sequence to insert early initialization
5677 if needed, STMT_LIST is where the non-conditional lastprivate handling
5678 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
5682 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
5683 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
5686 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
5687 bool par_clauses
= false;
5688 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
5689 unsigned HOST_WIDE_INT conditional_off
= 0;
5691 /* Early exit if there are no lastprivate or linear clauses. */
5692 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
5693 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
5694 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
5695 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
5697 if (clauses
== NULL
)
5699 /* If this was a workshare clause, see if it had been combined
5700 with its parallel. In that case, look for the clauses on the
5701 parallel statement itself. */
5702 if (is_parallel_ctx (ctx
))
5706 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
5709 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
5710 OMP_CLAUSE_LASTPRIVATE
);
5711 if (clauses
== NULL
)
5716 bool maybe_simt
= false;
5717 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5718 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
5720 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
5721 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
5723 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
5729 tree label_true
, arm1
, arm2
;
5730 enum tree_code pred_code
= TREE_CODE (predicate
);
5732 label
= create_artificial_label (UNKNOWN_LOCATION
);
5733 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
5734 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
5736 arm1
= TREE_OPERAND (predicate
, 0);
5737 arm2
= TREE_OPERAND (predicate
, 1);
5738 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
5739 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
5744 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
5745 arm2
= boolean_false_node
;
5746 pred_code
= NE_EXPR
;
5750 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
5751 c
= fold_convert (integer_type_node
, c
);
5752 simtcond
= create_tmp_var (integer_type_node
);
5753 gimplify_assign (simtcond
, c
, stmt_list
);
5754 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
5756 c
= create_tmp_var (integer_type_node
);
5757 gimple_call_set_lhs (g
, c
);
5758 gimple_seq_add_stmt (stmt_list
, g
);
5759 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
5763 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
5764 gimple_seq_add_stmt (stmt_list
, stmt
);
5765 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
5768 tree cond_ptr
= NULL_TREE
;
5769 for (c
= clauses
; c
;)
5772 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5773 gimple_seq
*this_stmt_list
= stmt_list
;
5774 tree lab2
= NULL_TREE
;
5776 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5777 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
5778 && ctx
->lastprivate_conditional_map
5779 && !ctx
->combined_into_simd_safelen0
)
5781 gcc_assert (body_p
);
5784 if (cond_ptr
== NULL_TREE
)
5786 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
5787 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
5789 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
5790 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
5791 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
5792 gimplify_assign (v
, build_zero_cst (type
), body_p
);
5793 this_stmt_list
= cstmt_list
;
5795 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
5797 mem
= build2 (MEM_REF
, type
, cond_ptr
,
5798 build_int_cst (TREE_TYPE (cond_ptr
),
5800 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
5803 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
5804 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
5805 tree mem2
= copy_node (mem
);
5806 gimple_seq seq
= NULL
;
5807 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
5808 gimple_seq_add_seq (this_stmt_list
, seq
);
5809 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
5810 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5811 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
5812 gimple_seq_add_stmt (this_stmt_list
, g
);
5813 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
5814 gimplify_assign (mem2
, v
, this_stmt_list
);
5817 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5818 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5819 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
5821 var
= OMP_CLAUSE_DECL (c
);
5822 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5823 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
5824 && is_taskloop_ctx (ctx
))
5826 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
5827 new_var
= lookup_decl (var
, ctx
->outer
);
5831 new_var
= lookup_decl (var
, ctx
);
5832 /* Avoid uninitialized warnings for lastprivate and
5833 for linear iterators. */
5835 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5836 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
5837 TREE_NO_WARNING (new_var
) = 1;
5840 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
5842 tree val
= DECL_VALUE_EXPR (new_var
);
5843 if (TREE_CODE (val
) == ARRAY_REF
5844 && VAR_P (TREE_OPERAND (val
, 0))
5845 && lookup_attribute ("omp simd array",
5846 DECL_ATTRIBUTES (TREE_OPERAND (val
,
5849 if (lastlane
== NULL
)
5851 lastlane
= create_tmp_var (unsigned_type_node
);
5853 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
5855 TREE_OPERAND (val
, 1));
5856 gimple_call_set_lhs (g
, lastlane
);
5857 gimple_seq_add_stmt (this_stmt_list
, g
);
5859 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
5860 TREE_OPERAND (val
, 0), lastlane
,
5861 NULL_TREE
, NULL_TREE
);
5864 else if (maybe_simt
)
5866 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
5867 ? DECL_VALUE_EXPR (new_var
)
5869 if (simtlast
== NULL
)
5871 simtlast
= create_tmp_var (unsigned_type_node
);
5872 gcall
*g
= gimple_build_call_internal
5873 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
5874 gimple_call_set_lhs (g
, simtlast
);
5875 gimple_seq_add_stmt (this_stmt_list
, g
);
5877 x
= build_call_expr_internal_loc
5878 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
5879 TREE_TYPE (val
), 2, val
, simtlast
);
5880 new_var
= unshare_expr (new_var
);
5881 gimplify_assign (new_var
, x
, this_stmt_list
);
5882 new_var
= unshare_expr (new_var
);
5885 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5886 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
5888 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
5889 gimple_seq_add_seq (this_stmt_list
,
5890 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
5891 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
5893 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5894 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
5896 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
5897 gimple_seq_add_seq (this_stmt_list
,
5898 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
5899 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
5903 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5904 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
5906 gcc_checking_assert (is_taskloop_ctx (ctx
));
5907 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
5909 if (is_global_var (ovar
))
5913 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
5914 if (omp_is_reference (var
))
5915 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5916 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
5917 gimplify_and_add (x
, this_stmt_list
);
5920 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
5924 c
= OMP_CLAUSE_CHAIN (c
);
5925 if (c
== NULL
&& !par_clauses
)
5927 /* If this was a workshare clause, see if it had been combined
5928 with its parallel. In that case, continue looking for the
5929 clauses also on the parallel statement itself. */
5930 if (is_parallel_ctx (ctx
))
5934 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
5937 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
5938 OMP_CLAUSE_LASTPRIVATE
);
5944 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
5947 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
5948 (which might be a placeholder). INNER is true if this is an inner
5949 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
5950 join markers. Generate the before-loop forking sequence in
5951 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
5952 general form of these sequences is
5954 GOACC_REDUCTION_SETUP
5956 GOACC_REDUCTION_INIT
5958 GOACC_REDUCTION_FINI
5960 GOACC_REDUCTION_TEARDOWN. */
5963 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
5964 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
5965 gimple_seq
*join_seq
, omp_context
*ctx
)
5967 gimple_seq before_fork
= NULL
;
5968 gimple_seq after_fork
= NULL
;
5969 gimple_seq before_join
= NULL
;
5970 gimple_seq after_join
= NULL
;
5971 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
5972 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
5973 unsigned offset
= 0;
5975 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5976 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5978 tree orig
= OMP_CLAUSE_DECL (c
);
5979 tree var
= maybe_lookup_decl (orig
, ctx
);
5980 tree ref_to_res
= NULL_TREE
;
5981 tree incoming
, outgoing
, v1
, v2
, v3
;
5982 bool is_private
= false;
5984 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
5985 if (rcode
== MINUS_EXPR
)
5987 else if (rcode
== TRUTH_ANDIF_EXPR
)
5988 rcode
= BIT_AND_EXPR
;
5989 else if (rcode
== TRUTH_ORIF_EXPR
)
5990 rcode
= BIT_IOR_EXPR
;
5991 tree op
= build_int_cst (unsigned_type_node
, rcode
);
5996 incoming
= outgoing
= var
;
6000 /* See if an outer construct also reduces this variable. */
6001 omp_context
*outer
= ctx
;
6003 while (omp_context
*probe
= outer
->outer
)
6005 enum gimple_code type
= gimple_code (probe
->stmt
);
6010 case GIMPLE_OMP_FOR
:
6011 cls
= gimple_omp_for_clauses (probe
->stmt
);
6014 case GIMPLE_OMP_TARGET
:
6015 if (gimple_omp_target_kind (probe
->stmt
)
6016 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
6019 cls
= gimple_omp_target_clauses (probe
->stmt
);
6027 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
6028 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
6029 && orig
== OMP_CLAUSE_DECL (cls
))
6031 incoming
= outgoing
= lookup_decl (orig
, probe
);
6032 goto has_outer_reduction
;
6034 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
6035 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
6036 && orig
== OMP_CLAUSE_DECL (cls
))
6044 /* This is the outermost construct with this reduction,
6045 see if there's a mapping for it. */
6046 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
6047 && maybe_lookup_field (orig
, outer
) && !is_private
)
6049 ref_to_res
= build_receiver_ref (orig
, false, outer
);
6050 if (omp_is_reference (orig
))
6051 ref_to_res
= build_simple_mem_ref (ref_to_res
);
6053 tree type
= TREE_TYPE (var
);
6054 if (POINTER_TYPE_P (type
))
6055 type
= TREE_TYPE (type
);
6058 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
6062 /* Try to look at enclosing contexts for reduction var,
6063 use original if no mapping found. */
6065 omp_context
*c
= ctx
->outer
;
6068 t
= maybe_lookup_decl (orig
, c
);
6071 incoming
= outgoing
= (t
? t
: orig
);
6074 has_outer_reduction
:;
6078 ref_to_res
= integer_zero_node
;
6080 if (omp_is_reference (orig
))
6082 tree type
= TREE_TYPE (var
);
6083 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
6087 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
6088 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
6091 v1
= create_tmp_var (type
, id
);
6092 v2
= create_tmp_var (type
, id
);
6093 v3
= create_tmp_var (type
, id
);
6095 gimplify_assign (v1
, var
, fork_seq
);
6096 gimplify_assign (v2
, var
, fork_seq
);
6097 gimplify_assign (v3
, var
, fork_seq
);
6099 var
= build_simple_mem_ref (var
);
6100 v1
= build_simple_mem_ref (v1
);
6101 v2
= build_simple_mem_ref (v2
);
6102 v3
= build_simple_mem_ref (v3
);
6103 outgoing
= build_simple_mem_ref (outgoing
);
6105 if (!TREE_CONSTANT (incoming
))
6106 incoming
= build_simple_mem_ref (incoming
);
6111 /* Determine position in reduction buffer, which may be used
6112 by target. The parser has ensured that this is not a
6113 variable-sized type. */
6114 fixed_size_mode mode
6115 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
6116 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
6117 offset
= (offset
+ align
- 1) & ~(align
- 1);
6118 tree off
= build_int_cst (sizetype
, offset
);
6119 offset
+= GET_MODE_SIZE (mode
);
6123 init_code
= build_int_cst (integer_type_node
,
6124 IFN_GOACC_REDUCTION_INIT
);
6125 fini_code
= build_int_cst (integer_type_node
,
6126 IFN_GOACC_REDUCTION_FINI
);
6127 setup_code
= build_int_cst (integer_type_node
,
6128 IFN_GOACC_REDUCTION_SETUP
);
6129 teardown_code
= build_int_cst (integer_type_node
,
6130 IFN_GOACC_REDUCTION_TEARDOWN
);
6134 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6135 TREE_TYPE (var
), 6, setup_code
,
6136 unshare_expr (ref_to_res
),
6137 incoming
, level
, op
, off
);
6139 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6140 TREE_TYPE (var
), 6, init_code
,
6141 unshare_expr (ref_to_res
),
6142 v1
, level
, op
, off
);
6144 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6145 TREE_TYPE (var
), 6, fini_code
,
6146 unshare_expr (ref_to_res
),
6147 v2
, level
, op
, off
);
6149 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6150 TREE_TYPE (var
), 6, teardown_code
,
6151 ref_to_res
, v3
, level
, op
, off
);
6153 gimplify_assign (v1
, setup_call
, &before_fork
);
6154 gimplify_assign (v2
, init_call
, &after_fork
);
6155 gimplify_assign (v3
, fini_call
, &before_join
);
6156 gimplify_assign (outgoing
, teardown_call
, &after_join
);
6159 /* Now stitch things together. */
6160 gimple_seq_add_seq (fork_seq
, before_fork
);
6162 gimple_seq_add_stmt (fork_seq
, fork
);
6163 gimple_seq_add_seq (fork_seq
, after_fork
);
6165 gimple_seq_add_seq (join_seq
, before_join
);
6167 gimple_seq_add_stmt (join_seq
, join
);
6168 gimple_seq_add_seq (join_seq
, after_join
);
6171 /* Generate code to implement the REDUCTION clauses, append it
6172 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6173 that should be emitted also inside of the critical section,
6174 in that case clear *CLIST afterwards, otherwise leave it as is
6175 and let the caller emit it itself. */
6178 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
6179 gimple_seq
*clist
, omp_context
*ctx
)
6181 gimple_seq sub_seq
= NULL
;
6186 /* OpenACC loop reductions are handled elsewhere. */
6187 if (is_gimple_omp_oacc (ctx
->stmt
))
6190 /* SIMD reductions are handled in lower_rec_input_clauses. */
6191 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6192 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
6195 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6196 update in that case, otherwise use a lock. */
6197 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
6198 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6199 && !OMP_CLAUSE_REDUCTION_TASK (c
))
6201 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
6202 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6204 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6214 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6216 tree var
, ref
, new_var
, orig_var
;
6217 enum tree_code code
;
6218 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6220 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
6221 || OMP_CLAUSE_REDUCTION_TASK (c
))
6224 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
6225 orig_var
= var
= OMP_CLAUSE_DECL (c
);
6226 if (TREE_CODE (var
) == MEM_REF
)
6228 var
= TREE_OPERAND (var
, 0);
6229 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
6230 var
= TREE_OPERAND (var
, 0);
6231 if (TREE_CODE (var
) == ADDR_EXPR
)
6232 var
= TREE_OPERAND (var
, 0);
6235 /* If this is a pointer or referenced based array
6236 section, the var could be private in the outer
6237 context e.g. on orphaned loop construct. Pretend this
6238 is private variable's outer reference. */
6239 ccode
= OMP_CLAUSE_PRIVATE
;
6240 if (TREE_CODE (var
) == INDIRECT_REF
)
6241 var
= TREE_OPERAND (var
, 0);
6244 if (is_variable_sized (var
))
6246 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
6247 var
= DECL_VALUE_EXPR (var
);
6248 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
6249 var
= TREE_OPERAND (var
, 0);
6250 gcc_assert (DECL_P (var
));
6253 new_var
= lookup_decl (var
, ctx
);
6254 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
6255 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6256 ref
= build_outer_var_ref (var
, ctx
, ccode
);
6257 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6259 /* reduction(-:var) sums up the partial results, so it acts
6260 identically to reduction(+:var). */
6261 if (code
== MINUS_EXPR
)
6266 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
6268 addr
= save_expr (addr
);
6269 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
6270 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
6271 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
6272 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
6273 gimplify_and_add (x
, stmt_seqp
);
6276 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6278 tree d
= OMP_CLAUSE_DECL (c
);
6279 tree type
= TREE_TYPE (d
);
6280 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
6281 tree i
= create_tmp_var (TREE_TYPE (v
));
6282 tree ptype
= build_pointer_type (TREE_TYPE (type
));
6283 tree bias
= TREE_OPERAND (d
, 1);
6284 d
= TREE_OPERAND (d
, 0);
6285 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
6287 tree b
= TREE_OPERAND (d
, 1);
6288 b
= maybe_lookup_decl (b
, ctx
);
6291 b
= TREE_OPERAND (d
, 1);
6292 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
6294 if (integer_zerop (bias
))
6298 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
6299 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
6300 TREE_TYPE (b
), b
, bias
);
6302 d
= TREE_OPERAND (d
, 0);
6304 /* For ref build_outer_var_ref already performs this, so
6305 only new_var needs a dereference. */
6306 if (TREE_CODE (d
) == INDIRECT_REF
)
6308 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6309 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
6311 else if (TREE_CODE (d
) == ADDR_EXPR
)
6313 if (orig_var
== var
)
6315 new_var
= build_fold_addr_expr (new_var
);
6316 ref
= build_fold_addr_expr (ref
);
6321 gcc_assert (orig_var
== var
);
6322 if (omp_is_reference (var
))
6323 ref
= build_fold_addr_expr (ref
);
6327 tree t
= maybe_lookup_decl (v
, ctx
);
6331 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
6332 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
6334 if (!integer_zerop (bias
))
6336 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
6337 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
6338 TREE_TYPE (new_var
), new_var
,
6339 unshare_expr (bias
));
6340 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
6341 TREE_TYPE (ref
), ref
, bias
);
6343 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
6344 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
6345 tree m
= create_tmp_var (ptype
);
6346 gimplify_assign (m
, new_var
, stmt_seqp
);
6348 m
= create_tmp_var (ptype
);
6349 gimplify_assign (m
, ref
, stmt_seqp
);
6351 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
6352 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6353 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6354 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
6355 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6356 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
6357 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6359 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6360 tree decl_placeholder
6361 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
6362 SET_DECL_VALUE_EXPR (placeholder
, out
);
6363 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6364 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
6365 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
6366 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
6367 gimple_seq_add_seq (&sub_seq
,
6368 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
6369 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6370 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
6371 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
6375 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
6376 out
= unshare_expr (out
);
6377 gimplify_assign (out
, x
, &sub_seq
);
6379 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
6380 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
6381 gimple_seq_add_stmt (&sub_seq
, g
);
6382 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
6383 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
6384 gimple_seq_add_stmt (&sub_seq
, g
);
6385 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
6386 build_int_cst (TREE_TYPE (i
), 1));
6387 gimple_seq_add_stmt (&sub_seq
, g
);
6388 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
6389 gimple_seq_add_stmt (&sub_seq
, g
);
6390 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
6392 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6394 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6396 if (omp_is_reference (var
)
6397 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
6399 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
6400 SET_DECL_VALUE_EXPR (placeholder
, ref
);
6401 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6402 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
6403 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
6404 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6405 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
6409 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
6410 ref
= build_outer_var_ref (var
, ctx
);
6411 gimplify_assign (ref
, x
, &sub_seq
);
6415 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
6417 gimple_seq_add_stmt (stmt_seqp
, stmt
);
6419 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
6423 gimple_seq_add_seq (stmt_seqp
, *clist
);
6427 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
6429 gimple_seq_add_stmt (stmt_seqp
, stmt
);
6433 /* Generate code to implement the COPYPRIVATE clauses. */
6436 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
6441 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6443 tree var
, new_var
, ref
, x
;
6445 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6447 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
6450 var
= OMP_CLAUSE_DECL (c
);
6451 by_ref
= use_pointer_for_field (var
, NULL
);
6453 ref
= build_sender_ref (var
, ctx
);
6454 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
6457 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
6458 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
6460 gimplify_assign (ref
, x
, slist
);
6462 ref
= build_receiver_ref (var
, false, ctx
);
6465 ref
= fold_convert_loc (clause_loc
,
6466 build_pointer_type (TREE_TYPE (new_var
)),
6468 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
6470 if (omp_is_reference (var
))
6472 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
6473 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
6474 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6476 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
6477 gimplify_and_add (x
, rlist
);
6482 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6483 and REDUCTION from the sender (aka parent) side. */
6486 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
6490 int ignored_looptemp
= 0;
6491 bool is_taskloop
= false;
6493 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6494 by GOMP_taskloop. */
6495 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
6497 ignored_looptemp
= 2;
6501 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6503 tree val
, ref
, x
, var
;
6504 bool by_ref
, do_in
= false, do_out
= false;
6505 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6507 switch (OMP_CLAUSE_CODE (c
))
6509 case OMP_CLAUSE_PRIVATE
:
6510 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
6513 case OMP_CLAUSE_FIRSTPRIVATE
:
6514 case OMP_CLAUSE_COPYIN
:
6515 case OMP_CLAUSE_LASTPRIVATE
:
6516 case OMP_CLAUSE_IN_REDUCTION
:
6517 case OMP_CLAUSE__REDUCTEMP_
:
6519 case OMP_CLAUSE_REDUCTION
:
6520 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
6523 case OMP_CLAUSE_SHARED
:
6524 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
6527 case OMP_CLAUSE__LOOPTEMP_
:
6528 if (ignored_looptemp
)
6538 val
= OMP_CLAUSE_DECL (c
);
6539 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6540 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
6541 && TREE_CODE (val
) == MEM_REF
)
6543 val
= TREE_OPERAND (val
, 0);
6544 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
6545 val
= TREE_OPERAND (val
, 0);
6546 if (TREE_CODE (val
) == INDIRECT_REF
6547 || TREE_CODE (val
) == ADDR_EXPR
)
6548 val
= TREE_OPERAND (val
, 0);
6549 if (is_variable_sized (val
))
6553 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6554 outer taskloop region. */
6555 omp_context
*ctx_for_o
= ctx
;
6557 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
6558 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
6559 ctx_for_o
= ctx
->outer
;
6561 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
6563 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
6564 && is_global_var (var
)
6565 && (val
== OMP_CLAUSE_DECL (c
)
6566 || !is_task_ctx (ctx
)
6567 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
6568 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
6569 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
6570 != POINTER_TYPE
)))))
6573 t
= omp_member_access_dummy_var (var
);
6576 var
= DECL_VALUE_EXPR (var
);
6577 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
6579 var
= unshare_and_remap (var
, t
, o
);
6581 var
= unshare_expr (var
);
6584 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
6586 /* Handle taskloop firstprivate/lastprivate, where the
6587 lastprivate on GIMPLE_OMP_TASK is represented as
6588 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6589 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
6590 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
6591 if (use_pointer_for_field (val
, ctx
))
6592 var
= build_fold_addr_expr (var
);
6593 gimplify_assign (x
, var
, ilist
);
6594 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
6598 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
6599 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
6600 || val
== OMP_CLAUSE_DECL (c
))
6601 && is_variable_sized (val
))
6603 by_ref
= use_pointer_for_field (val
, NULL
);
6605 switch (OMP_CLAUSE_CODE (c
))
6607 case OMP_CLAUSE_FIRSTPRIVATE
:
6608 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
6610 && is_task_ctx (ctx
))
6611 TREE_NO_WARNING (var
) = 1;
6615 case OMP_CLAUSE_PRIVATE
:
6616 case OMP_CLAUSE_COPYIN
:
6617 case OMP_CLAUSE__LOOPTEMP_
:
6618 case OMP_CLAUSE__REDUCTEMP_
:
6622 case OMP_CLAUSE_LASTPRIVATE
:
6623 if (by_ref
|| omp_is_reference (val
))
6625 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
6632 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
6637 case OMP_CLAUSE_REDUCTION
:
6638 case OMP_CLAUSE_IN_REDUCTION
:
6640 if (val
== OMP_CLAUSE_DECL (c
))
6642 if (is_task_ctx (ctx
))
6643 by_ref
= use_pointer_for_field (val
, ctx
);
6645 do_out
= !(by_ref
|| omp_is_reference (val
));
6648 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
6657 ref
= build_sender_ref (val
, ctx
);
6658 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
6659 gimplify_assign (ref
, x
, ilist
);
6660 if (is_task_ctx (ctx
))
6661 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
6666 ref
= build_sender_ref (val
, ctx
);
6667 gimplify_assign (var
, ref
, olist
);
6672 /* Generate code to implement SHARED from the sender (aka parent)
6673 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6674 list things that got automatically shared. */
6677 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
6679 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
6681 if (ctx
->record_type
== NULL
)
6684 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
6685 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
6687 ovar
= DECL_ABSTRACT_ORIGIN (f
);
6688 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
6691 nvar
= maybe_lookup_decl (ovar
, ctx
);
6692 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
6695 /* If CTX is a nested parallel directive. Find the immediately
6696 enclosing parallel or workshare construct that contains a
6697 mapping for OVAR. */
6698 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
6700 t
= omp_member_access_dummy_var (var
);
6703 var
= DECL_VALUE_EXPR (var
);
6704 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
6706 var
= unshare_and_remap (var
, t
, o
);
6708 var
= unshare_expr (var
);
6711 if (use_pointer_for_field (ovar
, ctx
))
6713 x
= build_sender_ref (ovar
, ctx
);
6714 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
6715 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
6717 gcc_assert (is_parallel_ctx (ctx
)
6718 && DECL_ARTIFICIAL (ovar
));
6719 /* _condtemp_ clause. */
6720 var
= build_constructor (TREE_TYPE (x
), NULL
);
6723 var
= build_fold_addr_expr (var
);
6724 gimplify_assign (x
, var
, ilist
);
6728 x
= build_sender_ref (ovar
, ctx
);
6729 gimplify_assign (x
, var
, ilist
);
6731 if (!TREE_READONLY (var
)
6732 /* We don't need to receive a new reference to a result
6733 or parm decl. In fact we may not store to it as we will
6734 invalidate any pending RSO and generate wrong gimple
6736 && !((TREE_CODE (var
) == RESULT_DECL
6737 || TREE_CODE (var
) == PARM_DECL
)
6738 && DECL_BY_REFERENCE (var
)))
6740 x
= build_sender_ref (ovar
, ctx
);
6741 gimplify_assign (var
, x
, olist
);
6747 /* Emit an OpenACC head marker call, encapulating the partitioning and
6748 other information that must be processed by the target compiler.
6749 Return the maximum number of dimensions the associated loop might
6750 be partitioned over. */
6753 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
6754 gimple_seq
*seq
, omp_context
*ctx
)
6756 unsigned levels
= 0;
6758 tree gang_static
= NULL_TREE
;
6759 auto_vec
<tree
, 5> args
;
6761 args
.quick_push (build_int_cst
6762 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
6763 args
.quick_push (ddvar
);
6764 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6766 switch (OMP_CLAUSE_CODE (c
))
6768 case OMP_CLAUSE_GANG
:
6769 tag
|= OLF_DIM_GANG
;
6770 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
6771 /* static:* is represented by -1, and we can ignore it, as
6772 scheduling is always static. */
6773 if (gang_static
&& integer_minus_onep (gang_static
))
6774 gang_static
= NULL_TREE
;
6778 case OMP_CLAUSE_WORKER
:
6779 tag
|= OLF_DIM_WORKER
;
6783 case OMP_CLAUSE_VECTOR
:
6784 tag
|= OLF_DIM_VECTOR
;
6788 case OMP_CLAUSE_SEQ
:
6792 case OMP_CLAUSE_AUTO
:
6796 case OMP_CLAUSE_INDEPENDENT
:
6797 tag
|= OLF_INDEPENDENT
;
6800 case OMP_CLAUSE_TILE
:
6811 if (DECL_P (gang_static
))
6812 gang_static
= build_outer_var_ref (gang_static
, ctx
);
6813 tag
|= OLF_GANG_STATIC
;
6816 /* In a parallel region, loops are implicitly INDEPENDENT. */
6817 omp_context
*tgt
= enclosing_target_ctx (ctx
);
6818 if (!tgt
|| is_oacc_parallel (tgt
))
6819 tag
|= OLF_INDEPENDENT
;
6822 /* Tiling could use all 3 levels. */
6826 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
6827 Ensure at least one level, or 2 for possible auto
6829 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
6830 << OLF_DIM_BASE
) | OLF_SEQ
));
6832 if (levels
< 1u + maybe_auto
)
6833 levels
= 1u + maybe_auto
;
6836 args
.quick_push (build_int_cst (integer_type_node
, levels
));
6837 args
.quick_push (build_int_cst (integer_type_node
, tag
));
6839 args
.quick_push (gang_static
);
6841 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
6842 gimple_set_location (call
, loc
);
6843 gimple_set_lhs (call
, ddvar
);
6844 gimple_seq_add_stmt (seq
, call
);
6849 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
6850 partitioning level of the enclosed region. */
6853 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
6854 tree tofollow
, gimple_seq
*seq
)
6856 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
6857 : IFN_UNIQUE_OACC_TAIL_MARK
);
6858 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
6859 int nargs
= 2 + (tofollow
!= NULL_TREE
);
6860 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
6861 marker
, ddvar
, tofollow
);
6862 gimple_set_location (call
, loc
);
6863 gimple_set_lhs (call
, ddvar
);
6864 gimple_seq_add_stmt (seq
, call
);
6867 /* Generate the before and after OpenACC loop sequences. CLAUSES are
6868 the loop clauses, from which we extract reductions. Initialize
6872 lower_oacc_head_tail (location_t loc
, tree clauses
,
6873 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
6876 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
6877 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
6879 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
6880 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
6881 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
6884 for (unsigned done
= 1; count
; count
--, done
++)
6886 gimple_seq fork_seq
= NULL
;
6887 gimple_seq join_seq
= NULL
;
6889 tree place
= build_int_cst (integer_type_node
, -1);
6890 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
6891 fork_kind
, ddvar
, place
);
6892 gimple_set_location (fork
, loc
);
6893 gimple_set_lhs (fork
, ddvar
);
6895 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
6896 join_kind
, ddvar
, place
);
6897 gimple_set_location (join
, loc
);
6898 gimple_set_lhs (join
, ddvar
);
6900 /* Mark the beginning of this level sequence. */
6902 lower_oacc_loop_marker (loc
, ddvar
, true,
6903 build_int_cst (integer_type_node
, count
),
6905 lower_oacc_loop_marker (loc
, ddvar
, false,
6906 build_int_cst (integer_type_node
, done
),
6909 lower_oacc_reductions (loc
, clauses
, place
, inner
,
6910 fork
, join
, &fork_seq
, &join_seq
, ctx
);
6912 /* Append this level to head. */
6913 gimple_seq_add_seq (head
, fork_seq
);
6914 /* Prepend it to tail. */
6915 gimple_seq_add_seq (&join_seq
, *tail
);
6921 /* Mark the end of the sequence. */
6922 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
6923 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
6926 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
6927 catch handler and return it. This prevents programs from violating the
6928 structured block semantics with throws. */
6931 maybe_catch_exception (gimple_seq body
)
6936 if (!flag_exceptions
)
6939 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
6940 decl
= lang_hooks
.eh_protect_cleanup_actions ();
6942 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
6944 g
= gimple_build_eh_must_not_throw (decl
);
6945 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
6948 return gimple_seq_alloc_with_stmt (g
);
6952 /* Routines to lower OMP directives into OMP-GIMPLE. */
6954 /* If ctx is a worksharing context inside of a cancellable parallel
6955 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
6956 and conditional branch to parallel's cancel_label to handle
6957 cancellation in the implicit barrier. */
6960 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
6963 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
6964 if (gimple_omp_return_nowait_p (omp_return
))
6966 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
6967 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
6968 && outer
->cancellable
)
6970 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
6971 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
6972 tree lhs
= create_tmp_var (c_bool_type
);
6973 gimple_omp_return_set_lhs (omp_return
, lhs
);
6974 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
6975 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
6976 fold_convert (c_bool_type
,
6977 boolean_false_node
),
6978 outer
->cancel_label
, fallthru_label
);
6979 gimple_seq_add_stmt (body
, g
);
6980 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
6982 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
6986 /* Find the first task_reduction or reduction clause or return NULL
6987 if there are none. */
6990 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
6991 enum omp_clause_code ccode
)
6995 clauses
= omp_find_clause (clauses
, ccode
);
6996 if (clauses
== NULL_TREE
)
6998 if (ccode
!= OMP_CLAUSE_REDUCTION
6999 || code
== OMP_TASKLOOP
7000 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
7002 clauses
= OMP_CLAUSE_CHAIN (clauses
);
7006 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
7007 gimple_seq
*, gimple_seq
*);
7009 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7010 CTX is the enclosing OMP context for the current statement. */
7013 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7015 tree block
, control
;
7016 gimple_stmt_iterator tgsi
;
7017 gomp_sections
*stmt
;
7019 gbind
*new_stmt
, *bind
;
7020 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
7022 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
7024 push_gimplify_context ();
7030 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
7031 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
7032 tree rtmp
= NULL_TREE
;
7035 tree type
= build_pointer_type (pointer_sized_int_node
);
7036 tree temp
= create_tmp_var (type
);
7037 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
7038 OMP_CLAUSE_DECL (c
) = temp
;
7039 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
7040 gimple_omp_sections_set_clauses (stmt
, c
);
7041 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
7042 gimple_omp_sections_clauses (stmt
),
7043 &ilist
, &tred_dlist
);
7045 rtmp
= make_ssa_name (type
);
7046 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
7049 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
7050 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
7052 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
7053 &ilist
, &dlist
, ctx
, NULL
);
7055 control
= create_tmp_var (unsigned_type_node
, ".section");
7056 gimple_omp_sections_set_control (stmt
, control
);
7058 new_body
= gimple_omp_body (stmt
);
7059 gimple_omp_set_body (stmt
, NULL
);
7060 tgsi
= gsi_start (new_body
);
7061 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
7066 sec_start
= gsi_stmt (tgsi
);
7067 sctx
= maybe_lookup_ctx (sec_start
);
7070 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
7071 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
7072 GSI_CONTINUE_LINKING
);
7073 gimple_omp_set_body (sec_start
, NULL
);
7075 if (gsi_one_before_end_p (tgsi
))
7077 gimple_seq l
= NULL
;
7078 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
7079 &ilist
, &l
, &clist
, ctx
);
7080 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
7081 gimple_omp_section_set_last (sec_start
);
7084 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
7085 GSI_CONTINUE_LINKING
);
7088 block
= make_node (BLOCK
);
7089 bind
= gimple_build_bind (NULL
, new_body
, block
);
7092 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
7096 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
7097 gcall
*g
= gimple_build_call (fndecl
, 0);
7098 gimple_seq_add_stmt (&olist
, g
);
7099 gimple_seq_add_seq (&olist
, clist
);
7100 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
7101 g
= gimple_build_call (fndecl
, 0);
7102 gimple_seq_add_stmt (&olist
, g
);
7105 block
= make_node (BLOCK
);
7106 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
7107 gsi_replace (gsi_p
, new_stmt
, true);
7109 pop_gimplify_context (new_stmt
);
7110 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
7111 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
7112 if (BLOCK_VARS (block
))
7113 TREE_USED (block
) = 1;
7116 gimple_seq_add_seq (&new_body
, ilist
);
7117 gimple_seq_add_stmt (&new_body
, stmt
);
7118 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
7119 gimple_seq_add_stmt (&new_body
, bind
);
7121 t
= gimple_build_omp_continue (control
, control
);
7122 gimple_seq_add_stmt (&new_body
, t
);
7124 gimple_seq_add_seq (&new_body
, olist
);
7125 if (ctx
->cancellable
)
7126 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7127 gimple_seq_add_seq (&new_body
, dlist
);
7129 new_body
= maybe_catch_exception (new_body
);
7131 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
7132 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7133 t
= gimple_build_omp_return (nowait
);
7134 gimple_seq_add_stmt (&new_body
, t
);
7135 gimple_seq_add_seq (&new_body
, tred_dlist
);
7136 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
7139 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
7141 gimple_bind_set_body (new_stmt
, new_body
);
7145 /* A subroutine of lower_omp_single. Expand the simple form of
7146 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7148 if (GOMP_single_start ())
7150 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7152 FIXME. It may be better to delay expanding the logic of this until
7153 pass_expand_omp. The expanded logic may make the job more difficult
7154 to a synchronization analysis pass. */
7157 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
7159 location_t loc
= gimple_location (single_stmt
);
7160 tree tlabel
= create_artificial_label (loc
);
7161 tree flabel
= create_artificial_label (loc
);
7162 gimple
*call
, *cond
;
7165 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
7166 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
7167 call
= gimple_build_call (decl
, 0);
7168 gimple_call_set_lhs (call
, lhs
);
7169 gimple_seq_add_stmt (pre_p
, call
);
7171 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
7172 fold_convert_loc (loc
, TREE_TYPE (lhs
),
7175 gimple_seq_add_stmt (pre_p
, cond
);
7176 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
7177 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7178 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
7182 /* A subroutine of lower_omp_single. Expand the simple form of
7183 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7185 #pragma omp single copyprivate (a, b, c)
7187 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7190 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7196 GOMP_single_copy_end (©out);
7207 FIXME. It may be better to delay expanding the logic of this until
7208 pass_expand_omp. The expanded logic may make the job more difficult
7209 to a synchronization analysis pass. */
7212 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
7215 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
7216 gimple_seq copyin_seq
;
7217 location_t loc
= gimple_location (single_stmt
);
7219 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
7221 ptr_type
= build_pointer_type (ctx
->record_type
);
7222 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
7224 l0
= create_artificial_label (loc
);
7225 l1
= create_artificial_label (loc
);
7226 l2
= create_artificial_label (loc
);
7228 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
7229 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
7230 t
= fold_convert_loc (loc
, ptr_type
, t
);
7231 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
7233 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
7234 build_int_cst (ptr_type
, 0));
7235 t
= build3 (COND_EXPR
, void_type_node
, t
,
7236 build_and_jump (&l0
), build_and_jump (&l1
));
7237 gimplify_and_add (t
, pre_p
);
7239 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
7241 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7244 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
7247 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
7248 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
7249 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
7250 gimplify_and_add (t
, pre_p
);
7252 t
= build_and_jump (&l2
);
7253 gimplify_and_add (t
, pre_p
);
7255 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
7257 gimple_seq_add_seq (pre_p
, copyin_seq
);
7259 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
7263 /* Expand code for an OpenMP single directive. */
7266 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7269 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
7271 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
7273 push_gimplify_context ();
7275 block
= make_node (BLOCK
);
7276 bind
= gimple_build_bind (NULL
, NULL
, block
);
7277 gsi_replace (gsi_p
, bind
, true);
7280 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
7281 &bind_body
, &dlist
, ctx
, NULL
);
7282 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
7284 gimple_seq_add_stmt (&bind_body
, single_stmt
);
7286 if (ctx
->record_type
)
7287 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
7289 lower_omp_single_simple (single_stmt
, &bind_body
);
7291 gimple_omp_set_body (single_stmt
, NULL
);
7293 gimple_seq_add_seq (&bind_body
, dlist
);
7295 bind_body
= maybe_catch_exception (bind_body
);
7297 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
7298 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7299 gimple
*g
= gimple_build_omp_return (nowait
);
7300 gimple_seq_add_stmt (&bind_body_tail
, g
);
7301 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
7302 if (ctx
->record_type
)
7304 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
7305 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
7306 TREE_THIS_VOLATILE (clobber
) = 1;
7307 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
7308 clobber
), GSI_SAME_STMT
);
7310 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
7311 gimple_bind_set_body (bind
, bind_body
);
7313 pop_gimplify_context (bind
);
7315 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7316 BLOCK_VARS (block
) = ctx
->block_vars
;
7317 if (BLOCK_VARS (block
))
7318 TREE_USED (block
) = 1;
7322 /* Expand code for an OpenMP master directive. */
7325 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7327 tree block
, lab
= NULL
, x
, bfn_decl
;
7328 gimple
*stmt
= gsi_stmt (*gsi_p
);
7330 location_t loc
= gimple_location (stmt
);
7333 push_gimplify_context ();
7335 block
= make_node (BLOCK
);
7336 bind
= gimple_build_bind (NULL
, NULL
, block
);
7337 gsi_replace (gsi_p
, bind
, true);
7338 gimple_bind_add_stmt (bind
, stmt
);
7340 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
7341 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
7342 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
7343 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
7345 gimplify_and_add (x
, &tseq
);
7346 gimple_bind_add_seq (bind
, tseq
);
7348 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
7349 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
7350 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
7351 gimple_omp_set_body (stmt
, NULL
);
7353 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
7355 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
7357 pop_gimplify_context (bind
);
7359 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7360 BLOCK_VARS (block
) = ctx
->block_vars
;
7363 /* Helper function for lower_omp_task_reductions. For a specific PASS
7364 find out the current clause it should be processed, or return false
7365 if all have been processed already. */
7368 omp_task_reduction_iterate (int pass
, enum tree_code code
,
7369 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
7370 tree
*type
, tree
*next
)
7372 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
7374 if (ccode
== OMP_CLAUSE_REDUCTION
7375 && code
!= OMP_TASKLOOP
7376 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
7378 *decl
= OMP_CLAUSE_DECL (*c
);
7379 *type
= TREE_TYPE (*decl
);
7380 if (TREE_CODE (*decl
) == MEM_REF
)
7387 if (omp_is_reference (*decl
))
7388 *type
= TREE_TYPE (*type
);
7389 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
7392 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
7401 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7402 OMP_TASKGROUP only with task modifier). Register mapping of those in
7403 START sequence and reducing them and unregister them in the END sequence. */
7406 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
7407 gimple_seq
*start
, gimple_seq
*end
)
7409 enum omp_clause_code ccode
7410 = (code
== OMP_TASKGROUP
7411 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
7412 tree cancellable
= NULL_TREE
;
7413 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
7414 if (clauses
== NULL_TREE
)
7416 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7418 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
7419 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
7420 && outer
->cancellable
)
7422 cancellable
= error_mark_node
;
7425 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
7428 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
7429 tree
*last
= &TYPE_FIELDS (record_type
);
7433 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
7435 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
7438 DECL_CHAIN (field
) = ifield
;
7439 last
= &DECL_CHAIN (ifield
);
7440 DECL_CONTEXT (field
) = record_type
;
7441 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
7442 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
7443 DECL_CONTEXT (ifield
) = record_type
;
7444 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
7445 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
7447 for (int pass
= 0; pass
< 2; pass
++)
7449 tree decl
, type
, next
;
7450 for (tree c
= clauses
;
7451 omp_task_reduction_iterate (pass
, code
, ccode
,
7452 &c
, &decl
, &type
, &next
); c
= next
)
7455 tree new_type
= type
;
7457 new_type
= remap_type (type
, &ctx
->outer
->cb
);
7459 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
7460 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
7462 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
7464 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
7465 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
7466 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
7469 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
7470 DECL_CONTEXT (field
) = record_type
;
7471 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
7472 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
7474 last
= &DECL_CHAIN (field
);
7476 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
7478 DECL_CONTEXT (bfield
) = record_type
;
7479 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
7480 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
7482 last
= &DECL_CHAIN (bfield
);
7486 layout_type (record_type
);
7488 /* Build up an array which registers with the runtime all the reductions
7489 and deregisters them at the end. Format documented in libgomp/task.c. */
7490 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
7491 tree avar
= create_tmp_var_raw (atype
);
7492 gimple_add_tmp_var (avar
);
7493 TREE_ADDRESSABLE (avar
) = 1;
7494 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
7495 NULL_TREE
, NULL_TREE
);
7496 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
7497 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7498 gimple_seq seq
= NULL
;
7499 tree sz
= fold_convert (pointer_sized_int_node
,
7500 TYPE_SIZE_UNIT (record_type
));
7502 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
7503 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
7504 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
7505 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
7506 ctx
->task_reductions
.create (1 + cnt
);
7507 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
7508 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
7510 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
7511 gimple_seq_add_seq (start
, seq
);
7512 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
7513 NULL_TREE
, NULL_TREE
);
7514 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
7515 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
7516 NULL_TREE
, NULL_TREE
);
7517 t
= build_int_cst (pointer_sized_int_node
,
7518 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
7519 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7520 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
7521 NULL_TREE
, NULL_TREE
);
7522 t
= build_int_cst (pointer_sized_int_node
, -1);
7523 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7524 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
7525 NULL_TREE
, NULL_TREE
);
7526 t
= build_int_cst (pointer_sized_int_node
, 0);
7527 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7529 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7530 and for each task reduction checks a bool right after the private variable
7531 within that thread's chunk; if the bool is clear, it hasn't been
7532 initialized and thus isn't going to be reduced nor destructed, otherwise
7533 reduce and destruct it. */
7534 tree idx
= create_tmp_var (size_type_node
);
7535 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
7536 tree num_thr_sz
= create_tmp_var (size_type_node
);
7537 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7538 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7539 tree lab3
= NULL_TREE
;
7541 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7543 /* For worksharing constructs, only perform it in the master thread,
7544 with the exception of cancelled implicit barriers - then only handle
7545 the current thread. */
7546 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
7547 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
7548 tree thr_num
= create_tmp_var (integer_type_node
);
7549 g
= gimple_build_call (t
, 0);
7550 gimple_call_set_lhs (g
, thr_num
);
7551 gimple_seq_add_stmt (end
, g
);
7555 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7556 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
7557 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
7558 if (code
== OMP_FOR
)
7559 c
= gimple_omp_for_clauses (ctx
->stmt
);
7560 else /* if (code == OMP_SECTIONS) */
7561 c
= gimple_omp_sections_clauses (ctx
->stmt
);
7562 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
7564 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
7566 gimple_seq_add_stmt (end
, g
);
7567 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7568 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
7569 gimple_seq_add_stmt (end
, g
);
7570 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
7571 build_one_cst (TREE_TYPE (idx
)));
7572 gimple_seq_add_stmt (end
, g
);
7573 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
7574 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
7576 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
7577 gimple_seq_add_stmt (end
, g
);
7578 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
7580 if (code
!= OMP_PARALLEL
)
7582 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
7583 tree num_thr
= create_tmp_var (integer_type_node
);
7584 g
= gimple_build_call (t
, 0);
7585 gimple_call_set_lhs (g
, num_thr
);
7586 gimple_seq_add_stmt (end
, g
);
7587 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
7588 gimple_seq_add_stmt (end
, g
);
7590 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
7594 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7595 OMP_CLAUSE__REDUCTEMP_
);
7596 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
7597 t
= fold_convert (size_type_node
, t
);
7598 gimplify_assign (num_thr_sz
, t
, end
);
7600 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
7601 NULL_TREE
, NULL_TREE
);
7602 tree data
= create_tmp_var (pointer_sized_int_node
);
7603 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
7604 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
7606 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
7607 ptr
= create_tmp_var (build_pointer_type (record_type
));
7609 ptr
= create_tmp_var (ptr_type_node
);
7610 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
7612 tree field
= TYPE_FIELDS (record_type
);
7615 field
= DECL_CHAIN (DECL_CHAIN (field
));
7616 for (int pass
= 0; pass
< 2; pass
++)
7618 tree decl
, type
, next
;
7619 for (tree c
= clauses
;
7620 omp_task_reduction_iterate (pass
, code
, ccode
,
7621 &c
, &decl
, &type
, &next
); c
= next
)
7623 tree var
= decl
, ref
;
7624 if (TREE_CODE (decl
) == MEM_REF
)
7626 var
= TREE_OPERAND (var
, 0);
7627 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7628 var
= TREE_OPERAND (var
, 0);
7630 if (TREE_CODE (var
) == ADDR_EXPR
)
7631 var
= TREE_OPERAND (var
, 0);
7632 else if (TREE_CODE (var
) == INDIRECT_REF
)
7633 var
= TREE_OPERAND (var
, 0);
7634 tree orig_var
= var
;
7635 if (is_variable_sized (var
))
7637 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7638 var
= DECL_VALUE_EXPR (var
);
7639 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7640 var
= TREE_OPERAND (var
, 0);
7641 gcc_assert (DECL_P (var
));
7643 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
7644 if (orig_var
!= var
)
7645 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
7646 else if (TREE_CODE (v
) == ADDR_EXPR
)
7647 t
= build_fold_addr_expr (t
);
7648 else if (TREE_CODE (v
) == INDIRECT_REF
)
7649 t
= build_fold_indirect_ref (t
);
7650 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
7652 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
7653 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7654 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
7656 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
7657 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
7658 fold_convert (size_type_node
,
7659 TREE_OPERAND (decl
, 1)));
7663 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
7664 if (!omp_is_reference (decl
))
7665 t
= build_fold_addr_expr (t
);
7667 t
= fold_convert (pointer_sized_int_node
, t
);
7669 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
7670 gimple_seq_add_seq (start
, seq
);
7671 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
7672 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
7673 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7674 t
= unshare_expr (byte_position (field
));
7675 t
= fold_convert (pointer_sized_int_node
, t
);
7676 ctx
->task_reduction_map
->put (c
, cnt
);
7677 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
7680 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
7681 gimple_seq_add_seq (start
, seq
);
7682 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
7683 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
7684 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7686 tree bfield
= DECL_CHAIN (field
);
7688 if (code
== OMP_PARALLEL
|| code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7689 /* In parallel or worksharing all threads unconditionally
7690 initialize all their task reduction private variables. */
7691 cond
= boolean_true_node
;
7692 else if (TREE_TYPE (ptr
) == ptr_type_node
)
7694 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
7695 unshare_expr (byte_position (bfield
)));
7697 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
7698 gimple_seq_add_seq (end
, seq
);
7699 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
7700 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
7701 build_int_cst (pbool
, 0));
7704 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
7705 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
7706 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
7707 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
7708 tree condv
= create_tmp_var (boolean_type_node
);
7709 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
7710 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
7712 gimple_seq_add_stmt (end
, g
);
7713 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
7714 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
7716 /* If this reduction doesn't need destruction and parallel
7717 has been cancelled, there is nothing to do for this
7718 reduction, so jump around the merge operation. */
7719 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7720 g
= gimple_build_cond (NE_EXPR
, cancellable
,
7721 build_zero_cst (TREE_TYPE (cancellable
)),
7723 gimple_seq_add_stmt (end
, g
);
7724 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7728 if (TREE_TYPE (ptr
) == ptr_type_node
)
7730 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
7731 unshare_expr (byte_position (field
)));
7733 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
7734 gimple_seq_add_seq (end
, seq
);
7735 tree pbool
= build_pointer_type (TREE_TYPE (field
));
7736 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
7737 build_int_cst (pbool
, 0));
7740 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
7741 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
7743 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7744 if (TREE_CODE (decl
) != MEM_REF
&& omp_is_reference (decl
))
7745 ref
= build_simple_mem_ref (ref
);
7746 /* reduction(-:var) sums up the partial results, so it acts
7747 identically to reduction(+:var). */
7748 if (rcode
== MINUS_EXPR
)
7750 if (TREE_CODE (decl
) == MEM_REF
)
7752 tree type
= TREE_TYPE (new_var
);
7753 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7754 tree i
= create_tmp_var (TREE_TYPE (v
));
7755 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7758 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7759 tree vv
= create_tmp_var (TREE_TYPE (v
));
7760 gimplify_assign (vv
, v
, start
);
7763 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
7764 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
7765 new_var
= build_fold_addr_expr (new_var
);
7766 new_var
= fold_convert (ptype
, new_var
);
7767 ref
= fold_convert (ptype
, ref
);
7768 tree m
= create_tmp_var (ptype
);
7769 gimplify_assign (m
, new_var
, end
);
7771 m
= create_tmp_var (ptype
);
7772 gimplify_assign (m
, ref
, end
);
7774 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
7775 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7776 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
7777 gimple_seq_add_stmt (end
, gimple_build_label (body
));
7778 tree priv
= build_simple_mem_ref (new_var
);
7779 tree out
= build_simple_mem_ref (ref
);
7780 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7782 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7783 tree decl_placeholder
7784 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7785 tree lab6
= NULL_TREE
;
7788 /* If this reduction needs destruction and parallel
7789 has been cancelled, jump around the merge operation
7790 to the destruction. */
7791 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7792 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
7793 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
7794 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
7796 gimple_seq_add_stmt (end
, g
);
7797 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7799 SET_DECL_VALUE_EXPR (placeholder
, out
);
7800 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7801 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7802 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7803 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7804 gimple_seq_add_seq (end
,
7805 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7806 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7807 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
7809 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7810 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7813 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
7814 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
7817 gimple_seq tseq
= NULL
;
7818 gimplify_stmt (&x
, &tseq
);
7819 gimple_seq_add_seq (end
, tseq
);
7824 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
7825 out
= unshare_expr (out
);
7826 gimplify_assign (out
, x
, end
);
7829 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7830 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7831 gimple_seq_add_stmt (end
, g
);
7832 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7833 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7834 gimple_seq_add_stmt (end
, g
);
7835 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7836 build_int_cst (TREE_TYPE (i
), 1));
7837 gimple_seq_add_stmt (end
, g
);
7838 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
7839 gimple_seq_add_stmt (end
, g
);
7840 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
7842 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7844 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7845 tree oldv
= NULL_TREE
;
7846 tree lab6
= NULL_TREE
;
7849 /* If this reduction needs destruction and parallel
7850 has been cancelled, jump around the merge operation
7851 to the destruction. */
7852 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7853 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
7854 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
7855 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
7857 gimple_seq_add_stmt (end
, g
);
7858 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7860 if (omp_is_reference (decl
)
7861 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7863 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
7864 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
7865 tree refv
= create_tmp_var (TREE_TYPE (ref
));
7866 gimplify_assign (refv
, ref
, end
);
7867 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
7868 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7869 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7870 tree d
= maybe_lookup_decl (decl
, ctx
);
7872 if (DECL_HAS_VALUE_EXPR_P (d
))
7873 oldv
= DECL_VALUE_EXPR (d
);
7874 if (omp_is_reference (var
))
7876 tree v
= fold_convert (TREE_TYPE (d
),
7877 build_fold_addr_expr (new_var
));
7878 SET_DECL_VALUE_EXPR (d
, v
);
7881 SET_DECL_VALUE_EXPR (d
, new_var
);
7882 DECL_HAS_VALUE_EXPR_P (d
) = 1;
7883 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7885 SET_DECL_VALUE_EXPR (d
, oldv
);
7888 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
7889 DECL_HAS_VALUE_EXPR_P (d
) = 0;
7891 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7892 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7893 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
7894 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7896 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
7897 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
7900 gimple_seq tseq
= NULL
;
7901 gimplify_stmt (&x
, &tseq
);
7902 gimple_seq_add_seq (end
, tseq
);
7907 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
7908 ref
= unshare_expr (ref
);
7909 gimplify_assign (ref
, x
, end
);
7911 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
7913 field
= DECL_CHAIN (bfield
);
7917 if (code
== OMP_TASKGROUP
)
7919 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
7920 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
7921 gimple_seq_add_stmt (start
, g
);
7926 if (code
== OMP_FOR
)
7927 c
= gimple_omp_for_clauses (ctx
->stmt
);
7928 else if (code
== OMP_SECTIONS
)
7929 c
= gimple_omp_sections_clauses (ctx
->stmt
);
7931 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
7932 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
7933 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
7934 build_fold_addr_expr (avar
));
7935 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
7938 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
7939 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
7941 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
7942 gimple_seq_add_stmt (end
, g
);
7943 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
7944 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7946 enum built_in_function bfn
7947 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
7948 t
= builtin_decl_explicit (bfn
);
7949 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
7953 arg
= create_tmp_var (c_bool_type
);
7954 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
7958 arg
= build_int_cst (c_bool_type
, 0);
7959 g
= gimple_build_call (t
, 1, arg
);
7963 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
7964 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
7966 gimple_seq_add_stmt (end
, g
);
7967 t
= build_constructor (atype
, NULL
);
7968 TREE_THIS_VOLATILE (t
) = 1;
7969 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
7972 /* Expand code for an OpenMP taskgroup directive. */
7975 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7977 gimple
*stmt
= gsi_stmt (*gsi_p
);
7980 gimple_seq dseq
= NULL
;
7981 tree block
= make_node (BLOCK
);
7983 bind
= gimple_build_bind (NULL
, NULL
, block
);
7984 gsi_replace (gsi_p
, bind
, true);
7985 gimple_bind_add_stmt (bind
, stmt
);
7987 push_gimplify_context ();
7989 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
7991 gimple_bind_add_stmt (bind
, x
);
7993 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
7994 gimple_omp_taskgroup_clauses (stmt
),
7995 gimple_bind_body_ptr (bind
), &dseq
);
7997 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
7998 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
7999 gimple_omp_set_body (stmt
, NULL
);
8001 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8002 gimple_bind_add_seq (bind
, dseq
);
8004 pop_gimplify_context (bind
);
8006 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8007 BLOCK_VARS (block
) = ctx
->block_vars
;
8011 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8014 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
8017 struct omp_for_data fd
;
8018 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
8021 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
8022 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
8023 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
8027 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8028 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
8029 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
8030 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8032 /* Merge depend clauses from multiple adjacent
8033 #pragma omp ordered depend(sink:...) constructs
8034 into one #pragma omp ordered depend(sink:...), so that
8035 we can optimize them together. */
8036 gimple_stmt_iterator gsi
= *gsi_p
;
8038 while (!gsi_end_p (gsi
))
8040 gimple
*stmt
= gsi_stmt (gsi
);
8041 if (is_gimple_debug (stmt
)
8042 || gimple_code (stmt
) == GIMPLE_NOP
)
8047 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
8049 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
8050 c
= gimple_omp_ordered_clauses (ord_stmt2
);
8052 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
8053 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8056 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
8058 gsi_remove (&gsi
, true);
8062 /* Canonicalize sink dependence clauses into one folded clause if
8065 The basic algorithm is to create a sink vector whose first
8066 element is the GCD of all the first elements, and whose remaining
8067 elements are the minimum of the subsequent columns.
8069 We ignore dependence vectors whose first element is zero because
8070 such dependencies are known to be executed by the same thread.
8072 We take into account the direction of the loop, so a minimum
8073 becomes a maximum if the loop is iterating forwards. We also
8074 ignore sink clauses where the loop direction is unknown, or where
8075 the offsets are clearly invalid because they are not a multiple
8076 of the loop increment.
8080 #pragma omp for ordered(2)
8081 for (i=0; i < N; ++i)
8082 for (j=0; j < M; ++j)
8084 #pragma omp ordered \
8085 depend(sink:i-8,j-2) \
8086 depend(sink:i,j-1) \ // Completely ignored because i+0.
8087 depend(sink:i-4,j-3) \
8088 depend(sink:i-6,j-4)
8089 #pragma omp ordered depend(source)
8094 depend(sink:-gcd(8,4,6),-min(2,3,4))
8099 /* FIXME: Computing GCD's where the first element is zero is
8100 non-trivial in the presence of collapsed loops. Do this later. */
8101 if (fd
.collapse
> 1)
8104 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
8106 /* wide_int is not a POD so it must be default-constructed. */
8107 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
8108 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
8110 tree folded_dep
= NULL_TREE
;
8111 /* TRUE if the first dimension's offset is negative. */
8112 bool neg_offset_p
= false;
8114 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8116 while ((c
= *list_p
) != NULL
)
8118 bool remove
= false;
8120 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
8121 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8122 goto next_ordered_clause
;
8125 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
8126 vec
&& TREE_CODE (vec
) == TREE_LIST
;
8127 vec
= TREE_CHAIN (vec
), ++i
)
8129 gcc_assert (i
< len
);
8131 /* omp_extract_for_data has canonicalized the condition. */
8132 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
8133 || fd
.loops
[i
].cond_code
== GT_EXPR
);
8134 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
8135 bool maybe_lexically_later
= true;
8137 /* While the committee makes up its mind, bail if we have any
8138 non-constant steps. */
8139 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
8140 goto lower_omp_ordered_ret
;
8142 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
8143 if (POINTER_TYPE_P (itype
))
8145 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
8146 TYPE_PRECISION (itype
),
8149 /* Ignore invalid offsets that are not multiples of the step. */
8150 if (!wi::multiple_of_p (wi::abs (offset
),
8151 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
8154 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
8155 "ignoring sink clause with offset that is not "
8156 "a multiple of the loop step");
8158 goto next_ordered_clause
;
8161 /* Calculate the first dimension. The first dimension of
8162 the folded dependency vector is the GCD of the first
8163 elements, while ignoring any first elements whose offset
8167 /* Ignore dependence vectors whose first dimension is 0. */
8171 goto next_ordered_clause
;
8175 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
8177 error_at (OMP_CLAUSE_LOCATION (c
),
8178 "first offset must be in opposite direction "
8179 "of loop iterations");
8180 goto lower_omp_ordered_ret
;
8184 neg_offset_p
= forward
;
8185 /* Initialize the first time around. */
8186 if (folded_dep
== NULL_TREE
)
8189 folded_deps
[0] = offset
;
8192 folded_deps
[0] = wi::gcd (folded_deps
[0],
8196 /* Calculate minimum for the remaining dimensions. */
8199 folded_deps
[len
+ i
- 1] = offset
;
8200 if (folded_dep
== c
)
8201 folded_deps
[i
] = offset
;
8202 else if (maybe_lexically_later
8203 && !wi::eq_p (folded_deps
[i
], offset
))
8205 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
8209 for (j
= 1; j
<= i
; j
++)
8210 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
8213 maybe_lexically_later
= false;
8217 gcc_assert (i
== len
);
8221 next_ordered_clause
:
8223 *list_p
= OMP_CLAUSE_CHAIN (c
);
8225 list_p
= &OMP_CLAUSE_CHAIN (c
);
8231 folded_deps
[0] = -folded_deps
[0];
8233 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
8234 if (POINTER_TYPE_P (itype
))
8237 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
8238 = wide_int_to_tree (itype
, folded_deps
[0]);
8239 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
8240 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
8243 lower_omp_ordered_ret
:
8245 /* Ordered without clauses is #pragma omp threads, while we want
8246 a nop instead if we remove all clauses. */
8247 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
8248 gsi_replace (gsi_p
, gimple_build_nop (), true);
8252 /* Expand code for an OpenMP ordered directive. */
8255 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8258 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
8259 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
8262 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8264 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8267 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
8268 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8269 OMP_CLAUSE_THREADS
);
8271 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8274 /* FIXME: This is needs to be moved to the expansion to verify various
8275 conditions only testable on cfg with dominators computed, and also
8276 all the depend clauses to be merged still might need to be available
8277 for the runtime checks. */
8279 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
8283 push_gimplify_context ();
8285 block
= make_node (BLOCK
);
8286 bind
= gimple_build_bind (NULL
, NULL
, block
);
8287 gsi_replace (gsi_p
, bind
, true);
8288 gimple_bind_add_stmt (bind
, stmt
);
8292 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
8293 build_int_cst (NULL_TREE
, threads
));
8294 cfun
->has_simduid_loops
= true;
8297 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
8299 gimple_bind_add_stmt (bind
, x
);
8301 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
8304 counter
= create_tmp_var (integer_type_node
);
8305 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
8306 gimple_call_set_lhs (g
, counter
);
8307 gimple_bind_add_stmt (bind
, g
);
8309 body
= create_artificial_label (UNKNOWN_LOCATION
);
8310 test
= create_artificial_label (UNKNOWN_LOCATION
);
8311 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
8313 tree simt_pred
= create_tmp_var (integer_type_node
);
8314 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
8315 gimple_call_set_lhs (g
, simt_pred
);
8316 gimple_bind_add_stmt (bind
, g
);
8318 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
8319 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
8320 gimple_bind_add_stmt (bind
, g
);
8322 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
8324 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8325 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8326 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8327 gimple_omp_set_body (stmt
, NULL
);
8331 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
8332 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
8333 gimple_bind_add_stmt (bind
, g
);
8335 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
8336 tree nonneg
= create_tmp_var (integer_type_node
);
8337 gimple_seq tseq
= NULL
;
8338 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
8339 gimple_bind_add_seq (bind
, tseq
);
8341 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
8342 gimple_call_set_lhs (g
, nonneg
);
8343 gimple_bind_add_stmt (bind
, g
);
8345 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
8346 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
8347 gimple_bind_add_stmt (bind
, g
);
8349 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
8352 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
8353 build_int_cst (NULL_TREE
, threads
));
8355 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
8357 gimple_bind_add_stmt (bind
, x
);
8359 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8361 pop_gimplify_context (bind
);
8363 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8364 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8368 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
8369 substitution of a couple of function calls. But in the NAMED case,
8370 requires that languages coordinate a symbol name. It is therefore
8371 best put here in common code. */
8373 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
8376 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8379 tree name
, lock
, unlock
;
8380 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
8382 location_t loc
= gimple_location (stmt
);
8385 name
= gimple_omp_critical_name (stmt
);
8390 if (!critical_name_mutexes
)
8391 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
8393 tree
*n
= critical_name_mutexes
->get (name
);
8398 decl
= create_tmp_var_raw (ptr_type_node
);
8400 new_str
= ACONCAT ((".gomp_critical_user_",
8401 IDENTIFIER_POINTER (name
), NULL
));
8402 DECL_NAME (decl
) = get_identifier (new_str
);
8403 TREE_PUBLIC (decl
) = 1;
8404 TREE_STATIC (decl
) = 1;
8405 DECL_COMMON (decl
) = 1;
8406 DECL_ARTIFICIAL (decl
) = 1;
8407 DECL_IGNORED_P (decl
) = 1;
8409 varpool_node::finalize_decl (decl
);
8411 critical_name_mutexes
->put (name
, decl
);
8416 /* If '#pragma omp critical' is inside offloaded region or
8417 inside function marked as offloadable, the symbol must be
8418 marked as offloadable too. */
8420 if (cgraph_node::get (current_function_decl
)->offloadable
)
8421 varpool_node::get_create (decl
)->offloadable
= 1;
8423 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
8424 if (is_gimple_omp_offloaded (octx
->stmt
))
8426 varpool_node::get_create (decl
)->offloadable
= 1;
8430 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
8431 lock
= build_call_expr_loc (loc
, lock
, 1,
8432 build_fold_addr_expr_loc (loc
, decl
));
8434 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
8435 unlock
= build_call_expr_loc (loc
, unlock
, 1,
8436 build_fold_addr_expr_loc (loc
, decl
));
8440 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
8441 lock
= build_call_expr_loc (loc
, lock
, 0);
8443 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
8444 unlock
= build_call_expr_loc (loc
, unlock
, 0);
8447 push_gimplify_context ();
8449 block
= make_node (BLOCK
);
8450 bind
= gimple_build_bind (NULL
, NULL
, block
);
8451 gsi_replace (gsi_p
, bind
, true);
8452 gimple_bind_add_stmt (bind
, stmt
);
8454 tbody
= gimple_bind_body (bind
);
8455 gimplify_and_add (lock
, &tbody
);
8456 gimple_bind_set_body (bind
, tbody
);
8458 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8459 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8460 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8461 gimple_omp_set_body (stmt
, NULL
);
8463 tbody
= gimple_bind_body (bind
);
8464 gimplify_and_add (unlock
, &tbody
);
8465 gimple_bind_set_body (bind
, tbody
);
8467 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8469 pop_gimplify_context (bind
);
8470 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8471 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8474 /* A subroutine of lower_omp_for. Generate code to emit the predicate
8475 for a lastprivate clause. Given a loop control predicate of (V
8476 cond N2), we gate the clause on (!(V cond N2)). The lowered form
8477 is appended to *DLIST, iterator initialization is appended to
8478 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
8479 to be emitted in a critical section. */
8482 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
8483 gimple_seq
*dlist
, gimple_seq
*clist
,
8484 struct omp_context
*ctx
)
8486 tree clauses
, cond
, vinit
;
8487 enum tree_code cond_code
;
8490 cond_code
= fd
->loop
.cond_code
;
8491 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
8493 /* When possible, use a strict equality expression. This can let VRP
8494 type optimizations deduce the value and remove a copy. */
8495 if (tree_fits_shwi_p (fd
->loop
.step
))
8497 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
8498 if (step
== 1 || step
== -1)
8499 cond_code
= EQ_EXPR
;
8502 if (gimple_omp_for_kind (fd
->for_stmt
) == GF_OMP_FOR_KIND_GRID_LOOP
8503 || gimple_omp_for_grid_phony (fd
->for_stmt
))
8504 cond
= omp_grid_lastprivate_predicate (fd
);
8507 tree n2
= fd
->loop
.n2
;
8508 if (fd
->collapse
> 1
8509 && TREE_CODE (n2
) != INTEGER_CST
8510 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
8512 struct omp_context
*taskreg_ctx
= NULL
;
8513 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
8515 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
8516 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
8517 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
8519 if (gimple_omp_for_combined_into_p (gfor
))
8521 gcc_assert (ctx
->outer
->outer
8522 && is_parallel_ctx (ctx
->outer
->outer
));
8523 taskreg_ctx
= ctx
->outer
->outer
;
8527 struct omp_for_data outer_fd
;
8528 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
8529 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
8532 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
8533 taskreg_ctx
= ctx
->outer
->outer
;
8535 else if (is_taskreg_ctx (ctx
->outer
))
8536 taskreg_ctx
= ctx
->outer
;
8540 tree taskreg_clauses
8541 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
8542 tree innerc
= omp_find_clause (taskreg_clauses
,
8543 OMP_CLAUSE__LOOPTEMP_
);
8544 gcc_assert (innerc
);
8545 for (i
= 0; i
< fd
->collapse
; i
++)
8547 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
8548 OMP_CLAUSE__LOOPTEMP_
);
8549 gcc_assert (innerc
);
8551 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
8552 OMP_CLAUSE__LOOPTEMP_
);
8554 n2
= fold_convert (TREE_TYPE (n2
),
8555 lookup_decl (OMP_CLAUSE_DECL (innerc
),
8559 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
8562 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
8564 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
8565 if (!gimple_seq_empty_p (stmts
))
8567 gimple_seq_add_seq (&stmts
, *dlist
);
8570 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
8571 vinit
= fd
->loop
.n1
;
8572 if (cond_code
== EQ_EXPR
8573 && tree_fits_shwi_p (fd
->loop
.n2
)
8574 && ! integer_zerop (fd
->loop
.n2
))
8575 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
8577 vinit
= unshare_expr (vinit
);
8579 /* Initialize the iterator variable, so that threads that don't execute
8580 any iterations don't execute the lastprivate clauses by accident. */
8581 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
8586 /* Lower code for an OMP loop directive. */
8589 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8592 struct omp_for_data fd
, *fdp
= NULL
;
8593 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
8595 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
8596 gimple_seq cnt_list
= NULL
, clist
= NULL
;
8597 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
8600 push_gimplify_context ();
8602 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
8604 block
= make_node (BLOCK
);
8605 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8606 /* Replace at gsi right away, so that 'stmt' is no member
8607 of a sequence anymore as we're going to add to a different
8609 gsi_replace (gsi_p
, new_stmt
, true);
8611 /* Move declaration of temporaries in the loop body before we make
8613 omp_for_body
= gimple_omp_body (stmt
);
8614 if (!gimple_seq_empty_p (omp_for_body
)
8615 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
8618 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
8619 tree vars
= gimple_bind_vars (inner_bind
);
8620 gimple_bind_append_vars (new_stmt
, vars
);
8621 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
8622 keep them on the inner_bind and it's block. */
8623 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
8624 if (gimple_bind_block (inner_bind
))
8625 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
8628 if (gimple_omp_for_combined_into_p (stmt
))
8630 omp_extract_for_data (stmt
, &fd
, NULL
);
8633 /* We need two temporaries with fd.loop.v type (istart/iend)
8634 and then (fd.collapse - 1) temporaries with the same
8635 type for count2 ... countN-1 vars if not constant. */
8637 tree type
= fd
.iter_type
;
8639 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
8640 count
+= fd
.collapse
- 1;
8642 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
8643 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
8644 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
8649 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
8650 OMP_CLAUSE__LOOPTEMP_
);
8652 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
8653 OMP_CLAUSE__LOOPTEMP_
);
8654 for (i
= 0; i
< count
; i
++)
8659 gcc_assert (outerc
);
8660 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
8661 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
8662 OMP_CLAUSE__LOOPTEMP_
);
8666 /* If there are 2 adjacent SIMD stmts, one with _simt_
8667 clause, another without, make sure they have the same
8668 decls in _looptemp_ clauses, because the outer stmt
8669 they are combined into will look up just one inner_stmt. */
8671 temp
= OMP_CLAUSE_DECL (simtc
);
8673 temp
= create_tmp_var (type
);
8674 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
8676 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
8677 OMP_CLAUSE_DECL (*pc
) = temp
;
8678 pc
= &OMP_CLAUSE_CHAIN (*pc
);
8680 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
8681 OMP_CLAUSE__LOOPTEMP_
);
8686 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
8690 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
8691 OMP_CLAUSE_REDUCTION
);
8692 tree rtmp
= NULL_TREE
;
8695 tree type
= build_pointer_type (pointer_sized_int_node
);
8696 tree temp
= create_tmp_var (type
);
8697 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8698 OMP_CLAUSE_DECL (c
) = temp
;
8699 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
8700 gimple_omp_for_set_clauses (stmt
, c
);
8701 lower_omp_task_reductions (ctx
, OMP_FOR
,
8702 gimple_omp_for_clauses (stmt
),
8703 &tred_ilist
, &tred_dlist
);
8705 rtmp
= make_ssa_name (type
);
8706 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
8709 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
8712 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
8714 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
8715 gimple_omp_for_pre_body (stmt
));
8717 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8719 /* Lower the header expressions. At this point, we can assume that
8720 the header is of the form:
8722 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8724 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8725 using the .omp_data_s mapping, if needed. */
8726 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
8728 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
8729 if (!is_gimple_min_invariant (*rhs_p
))
8730 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
8731 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
8732 recompute_tree_invariant_for_addr_expr (*rhs_p
);
8734 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
8735 if (!is_gimple_min_invariant (*rhs_p
))
8736 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
8737 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
8738 recompute_tree_invariant_for_addr_expr (*rhs_p
);
8740 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
8741 if (!is_gimple_min_invariant (*rhs_p
))
8742 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
8745 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
8747 gimple_seq_add_seq (&body
, cnt_list
);
8749 /* Once lowered, extract the bounds and clauses. */
8750 omp_extract_for_data (stmt
, &fd
, NULL
);
8752 if (is_gimple_omp_oacc (ctx
->stmt
)
8753 && !ctx_in_oacc_kernels_region (ctx
))
8754 lower_oacc_head_tail (gimple_location (stmt
),
8755 gimple_omp_for_clauses (stmt
),
8756 &oacc_head
, &oacc_tail
, ctx
);
8758 /* Add OpenACC partitioning and reduction markers just before the loop. */
8760 gimple_seq_add_seq (&body
, oacc_head
);
8762 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
8764 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
8765 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
8766 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
8767 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8769 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
8770 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
8771 OMP_CLAUSE_LINEAR_STEP (c
)
8772 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
8776 bool phony_loop
= (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
8777 && gimple_omp_for_grid_phony (stmt
));
8779 gimple_seq_add_stmt (&body
, stmt
);
8780 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
8783 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
8786 /* After the loop, add exit clauses. */
8787 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
8791 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8792 gcall
*g
= gimple_build_call (fndecl
, 0);
8793 gimple_seq_add_stmt (&body
, g
);
8794 gimple_seq_add_seq (&body
, clist
);
8795 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8796 g
= gimple_build_call (fndecl
, 0);
8797 gimple_seq_add_stmt (&body
, g
);
8800 if (ctx
->cancellable
)
8801 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
8803 gimple_seq_add_seq (&body
, dlist
);
8807 gimple_seq_add_seq (&tred_ilist
, body
);
8811 body
= maybe_catch_exception (body
);
8815 /* Region exit marker goes at the end of the loop body. */
8816 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
8817 gimple_seq_add_stmt (&body
, g
);
8819 gimple_seq_add_seq (&body
, tred_dlist
);
8821 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
8824 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8827 /* Add OpenACC joining and reduction markers just after the loop. */
8829 gimple_seq_add_seq (&body
, oacc_tail
);
8831 pop_gimplify_context (new_stmt
);
8833 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8834 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
8835 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
8836 if (BLOCK_VARS (block
))
8837 TREE_USED (block
) = 1;
8839 gimple_bind_set_body (new_stmt
, body
);
8840 gimple_omp_set_body (stmt
, NULL
);
8841 gimple_omp_for_set_pre_body (stmt
, NULL
);
8844 /* Callback for walk_stmts. Check if the current statement only contains
8845 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
8848 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
8849 bool *handled_ops_p
,
8850 struct walk_stmt_info
*wi
)
8852 int *info
= (int *) wi
->info
;
8853 gimple
*stmt
= gsi_stmt (*gsi_p
);
8855 *handled_ops_p
= true;
8856 switch (gimple_code (stmt
))
8862 case GIMPLE_OMP_FOR
:
8863 case GIMPLE_OMP_SECTIONS
:
8864 *info
= *info
== 0 ? 1 : -1;
8873 struct omp_taskcopy_context
8875 /* This field must be at the beginning, as we do "inheritance": Some
8876 callback functions for tree-inline.c (e.g., omp_copy_decl)
8877 receive a copy_body_data pointer that is up-casted to an
8878 omp_context pointer. */
8884 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
8886 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
8888 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
8889 return create_tmp_var (TREE_TYPE (var
));
8895 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
8897 tree name
, new_fields
= NULL
, type
, f
;
8899 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8900 name
= DECL_NAME (TYPE_NAME (orig_type
));
8901 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
8902 TYPE_DECL
, name
, type
);
8903 TYPE_NAME (type
) = name
;
8905 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
8907 tree new_f
= copy_node (f
);
8908 DECL_CONTEXT (new_f
) = type
;
8909 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
8910 TREE_CHAIN (new_f
) = new_fields
;
8911 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
8912 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
8913 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
8916 tcctx
->cb
.decl_map
->put (f
, new_f
);
8918 TYPE_FIELDS (type
) = nreverse (new_fields
);
8923 /* Create task copyfn. */
8926 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
8928 struct function
*child_cfun
;
8929 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
8930 tree record_type
, srecord_type
, bind
, list
;
8931 bool record_needs_remap
= false, srecord_needs_remap
= false;
8933 struct omp_taskcopy_context tcctx
;
8934 location_t loc
= gimple_location (task_stmt
);
8935 size_t looptempno
= 0;
8937 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
8938 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
8939 gcc_assert (child_cfun
->cfg
== NULL
);
8940 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
8942 /* Reset DECL_CONTEXT on function arguments. */
8943 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
8944 DECL_CONTEXT (t
) = child_fn
;
8946 /* Populate the function. */
8947 push_gimplify_context ();
8948 push_cfun (child_cfun
);
8950 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
8951 TREE_SIDE_EFFECTS (bind
) = 1;
8953 DECL_SAVED_TREE (child_fn
) = bind
;
8954 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
8956 /* Remap src and dst argument types if needed. */
8957 record_type
= ctx
->record_type
;
8958 srecord_type
= ctx
->srecord_type
;
8959 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
8960 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
8962 record_needs_remap
= true;
8965 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
8966 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
8968 srecord_needs_remap
= true;
8972 if (record_needs_remap
|| srecord_needs_remap
)
8974 memset (&tcctx
, '\0', sizeof (tcctx
));
8975 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
8976 tcctx
.cb
.dst_fn
= child_fn
;
8977 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
8978 gcc_checking_assert (tcctx
.cb
.src_node
);
8979 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
8980 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
8981 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
8982 tcctx
.cb
.eh_lp_nr
= 0;
8983 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
8984 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
8987 if (record_needs_remap
)
8988 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
8989 if (srecord_needs_remap
)
8990 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
8993 tcctx
.cb
.decl_map
= NULL
;
8995 arg
= DECL_ARGUMENTS (child_fn
);
8996 TREE_TYPE (arg
) = build_pointer_type (record_type
);
8997 sarg
= DECL_CHAIN (arg
);
8998 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
9000 /* First pass: initialize temporaries used in record_type and srecord_type
9001 sizes and field offsets. */
9002 if (tcctx
.cb
.decl_map
)
9003 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9004 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
9008 decl
= OMP_CLAUSE_DECL (c
);
9009 p
= tcctx
.cb
.decl_map
->get (decl
);
9012 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
9013 sf
= (tree
) n
->value
;
9014 sf
= *tcctx
.cb
.decl_map
->get (sf
);
9015 src
= build_simple_mem_ref_loc (loc
, sarg
);
9016 src
= omp_build_component_ref (src
, sf
);
9017 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
9018 append_to_statement_list (t
, &list
);
9021 /* Second pass: copy shared var pointers and copy construct non-VLA
9022 firstprivate vars. */
9023 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9024 switch (OMP_CLAUSE_CODE (c
))
9027 case OMP_CLAUSE_SHARED
:
9028 decl
= OMP_CLAUSE_DECL (c
);
9029 key
= (splay_tree_key
) decl
;
9030 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
9031 key
= (splay_tree_key
) &DECL_UID (decl
);
9032 n
= splay_tree_lookup (ctx
->field_map
, key
);
9035 f
= (tree
) n
->value
;
9036 if (tcctx
.cb
.decl_map
)
9037 f
= *tcctx
.cb
.decl_map
->get (f
);
9038 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
9039 sf
= (tree
) n
->value
;
9040 if (tcctx
.cb
.decl_map
)
9041 sf
= *tcctx
.cb
.decl_map
->get (sf
);
9042 src
= build_simple_mem_ref_loc (loc
, sarg
);
9043 src
= omp_build_component_ref (src
, sf
);
9044 dst
= build_simple_mem_ref_loc (loc
, arg
);
9045 dst
= omp_build_component_ref (dst
, f
);
9046 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
9047 append_to_statement_list (t
, &list
);
9049 case OMP_CLAUSE_REDUCTION
:
9050 case OMP_CLAUSE_IN_REDUCTION
:
9051 decl
= OMP_CLAUSE_DECL (c
);
9052 if (TREE_CODE (decl
) == MEM_REF
)
9054 decl
= TREE_OPERAND (decl
, 0);
9055 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
9056 decl
= TREE_OPERAND (decl
, 0);
9057 if (TREE_CODE (decl
) == INDIRECT_REF
9058 || TREE_CODE (decl
) == ADDR_EXPR
)
9059 decl
= TREE_OPERAND (decl
, 0);
9061 key
= (splay_tree_key
) decl
;
9062 n
= splay_tree_lookup (ctx
->field_map
, key
);
9065 f
= (tree
) n
->value
;
9066 if (tcctx
.cb
.decl_map
)
9067 f
= *tcctx
.cb
.decl_map
->get (f
);
9068 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
9069 sf
= (tree
) n
->value
;
9070 if (tcctx
.cb
.decl_map
)
9071 sf
= *tcctx
.cb
.decl_map
->get (sf
);
9072 src
= build_simple_mem_ref_loc (loc
, sarg
);
9073 src
= omp_build_component_ref (src
, sf
);
9074 if (decl
!= OMP_CLAUSE_DECL (c
)
9075 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
9076 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
9077 src
= build_simple_mem_ref_loc (loc
, src
);
9078 dst
= build_simple_mem_ref_loc (loc
, arg
);
9079 dst
= omp_build_component_ref (dst
, f
);
9080 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
9081 append_to_statement_list (t
, &list
);
9083 case OMP_CLAUSE__LOOPTEMP_
:
9084 /* Fields for first two _looptemp_ clauses are initialized by
9085 GOMP_taskloop*, the rest are handled like firstprivate. */
9092 case OMP_CLAUSE__REDUCTEMP_
:
9093 case OMP_CLAUSE_FIRSTPRIVATE
:
9094 decl
= OMP_CLAUSE_DECL (c
);
9095 if (is_variable_sized (decl
))
9097 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
9100 f
= (tree
) n
->value
;
9101 if (tcctx
.cb
.decl_map
)
9102 f
= *tcctx
.cb
.decl_map
->get (f
);
9103 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
9106 sf
= (tree
) n
->value
;
9107 if (tcctx
.cb
.decl_map
)
9108 sf
= *tcctx
.cb
.decl_map
->get (sf
);
9109 src
= build_simple_mem_ref_loc (loc
, sarg
);
9110 src
= omp_build_component_ref (src
, sf
);
9111 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
9112 src
= build_simple_mem_ref_loc (loc
, src
);
9116 dst
= build_simple_mem_ref_loc (loc
, arg
);
9117 dst
= omp_build_component_ref (dst
, f
);
9118 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
9119 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
9121 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
9122 append_to_statement_list (t
, &list
);
9124 case OMP_CLAUSE_PRIVATE
:
9125 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
9127 decl
= OMP_CLAUSE_DECL (c
);
9128 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
9129 f
= (tree
) n
->value
;
9130 if (tcctx
.cb
.decl_map
)
9131 f
= *tcctx
.cb
.decl_map
->get (f
);
9132 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
9135 sf
= (tree
) n
->value
;
9136 if (tcctx
.cb
.decl_map
)
9137 sf
= *tcctx
.cb
.decl_map
->get (sf
);
9138 src
= build_simple_mem_ref_loc (loc
, sarg
);
9139 src
= omp_build_component_ref (src
, sf
);
9140 if (use_pointer_for_field (decl
, NULL
))
9141 src
= build_simple_mem_ref_loc (loc
, src
);
9145 dst
= build_simple_mem_ref_loc (loc
, arg
);
9146 dst
= omp_build_component_ref (dst
, f
);
9147 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
9148 append_to_statement_list (t
, &list
);
9154 /* Last pass: handle VLA firstprivates. */
9155 if (tcctx
.cb
.decl_map
)
9156 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9157 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
9161 decl
= OMP_CLAUSE_DECL (c
);
9162 if (!is_variable_sized (decl
))
9164 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
9167 f
= (tree
) n
->value
;
9168 f
= *tcctx
.cb
.decl_map
->get (f
);
9169 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
9170 ind
= DECL_VALUE_EXPR (decl
);
9171 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
9172 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
9173 n
= splay_tree_lookup (ctx
->sfield_map
,
9174 (splay_tree_key
) TREE_OPERAND (ind
, 0));
9175 sf
= (tree
) n
->value
;
9176 sf
= *tcctx
.cb
.decl_map
->get (sf
);
9177 src
= build_simple_mem_ref_loc (loc
, sarg
);
9178 src
= omp_build_component_ref (src
, sf
);
9179 src
= build_simple_mem_ref_loc (loc
, src
);
9180 dst
= build_simple_mem_ref_loc (loc
, arg
);
9181 dst
= omp_build_component_ref (dst
, f
);
9182 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
9183 append_to_statement_list (t
, &list
);
9184 n
= splay_tree_lookup (ctx
->field_map
,
9185 (splay_tree_key
) TREE_OPERAND (ind
, 0));
9186 df
= (tree
) n
->value
;
9187 df
= *tcctx
.cb
.decl_map
->get (df
);
9188 ptr
= build_simple_mem_ref_loc (loc
, arg
);
9189 ptr
= omp_build_component_ref (ptr
, df
);
9190 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
9191 build_fold_addr_expr_loc (loc
, dst
));
9192 append_to_statement_list (t
, &list
);
9195 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
9196 append_to_statement_list (t
, &list
);
9198 if (tcctx
.cb
.decl_map
)
9199 delete tcctx
.cb
.decl_map
;
9200 pop_gimplify_context (NULL
);
9201 BIND_EXPR_BODY (bind
) = list
;
9206 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
9210 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
9212 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
9213 gcc_assert (clauses
);
9214 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9215 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
9216 switch (OMP_CLAUSE_DEPEND_KIND (c
))
9218 case OMP_CLAUSE_DEPEND_LAST
:
9219 /* Lowering already done at gimplification. */
9221 case OMP_CLAUSE_DEPEND_IN
:
9224 case OMP_CLAUSE_DEPEND_OUT
:
9225 case OMP_CLAUSE_DEPEND_INOUT
:
9228 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
9231 case OMP_CLAUSE_DEPEND_DEPOBJ
:
9234 case OMP_CLAUSE_DEPEND_SOURCE
:
9235 case OMP_CLAUSE_DEPEND_SINK
:
9240 if (cnt
[1] || cnt
[3])
9242 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
9243 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
9244 tree array
= create_tmp_var (type
);
9245 TREE_ADDRESSABLE (array
) = 1;
9246 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
9250 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
9251 gimple_seq_add_stmt (iseq
, g
);
9252 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
9255 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
9256 gimple_seq_add_stmt (iseq
, g
);
9257 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
9259 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
9260 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
9261 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
9262 gimple_seq_add_stmt (iseq
, g
);
9264 for (i
= 0; i
< 4; i
++)
9268 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9269 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
9273 switch (OMP_CLAUSE_DEPEND_KIND (c
))
9275 case OMP_CLAUSE_DEPEND_IN
:
9279 case OMP_CLAUSE_DEPEND_OUT
:
9280 case OMP_CLAUSE_DEPEND_INOUT
:
9284 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
9288 case OMP_CLAUSE_DEPEND_DEPOBJ
:
9295 tree t
= OMP_CLAUSE_DECL (c
);
9296 t
= fold_convert (ptr_type_node
, t
);
9297 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
9298 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
9299 NULL_TREE
, NULL_TREE
);
9300 g
= gimple_build_assign (r
, t
);
9301 gimple_seq_add_stmt (iseq
, g
);
9304 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
9305 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
9306 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
9307 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
9309 tree clobber
= build_constructor (type
, NULL
);
9310 TREE_THIS_VOLATILE (clobber
) = 1;
9311 g
= gimple_build_assign (array
, clobber
);
9312 gimple_seq_add_stmt (oseq
, g
);
9315 /* Lower the OpenMP parallel or task directive in the current statement
9316 in GSI_P. CTX holds context information for the directive. */
9319 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9323 gimple
*stmt
= gsi_stmt (*gsi_p
);
9324 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
9325 gimple_seq par_body
;
9326 location_t loc
= gimple_location (stmt
);
9328 clauses
= gimple_omp_taskreg_clauses (stmt
);
9329 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
9330 && gimple_omp_task_taskwait_p (stmt
))
9338 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
9339 par_body
= gimple_bind_body (par_bind
);
9341 child_fn
= ctx
->cb
.dst_fn
;
9342 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
9343 && !gimple_omp_parallel_combined_p (stmt
))
9345 struct walk_stmt_info wi
;
9348 memset (&wi
, 0, sizeof (wi
));
9351 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
9353 gimple_omp_parallel_set_combined_p (stmt
, true);
9355 gimple_seq dep_ilist
= NULL
;
9356 gimple_seq dep_olist
= NULL
;
9357 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
9358 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
9360 push_gimplify_context ();
9361 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
9362 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
9363 &dep_ilist
, &dep_olist
);
9366 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
9367 && gimple_omp_task_taskwait_p (stmt
))
9371 gsi_replace (gsi_p
, dep_bind
, true);
9372 gimple_bind_add_seq (dep_bind
, dep_ilist
);
9373 gimple_bind_add_stmt (dep_bind
, stmt
);
9374 gimple_bind_add_seq (dep_bind
, dep_olist
);
9375 pop_gimplify_context (dep_bind
);
9380 if (ctx
->srecord_type
)
9381 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
9383 gimple_seq tskred_ilist
= NULL
;
9384 gimple_seq tskred_olist
= NULL
;
9385 if ((is_task_ctx (ctx
)
9386 && gimple_omp_task_taskloop_p (ctx
->stmt
)
9387 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
9388 OMP_CLAUSE_REDUCTION
))
9389 || (is_parallel_ctx (ctx
)
9390 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
9391 OMP_CLAUSE__REDUCTEMP_
)))
9393 if (dep_bind
== NULL
)
9395 push_gimplify_context ();
9396 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
9398 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
9400 gimple_omp_taskreg_clauses (ctx
->stmt
),
9401 &tskred_ilist
, &tskred_olist
);
9404 push_gimplify_context ();
9406 gimple_seq par_olist
= NULL
;
9407 gimple_seq par_ilist
= NULL
;
9408 gimple_seq par_rlist
= NULL
;
9409 bool phony_construct
= gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
9410 && gimple_omp_parallel_grid_phony (as_a
<gomp_parallel
*> (stmt
));
9411 if (phony_construct
&& ctx
->record_type
)
9413 gcc_checking_assert (!ctx
->receiver_decl
);
9414 ctx
->receiver_decl
= create_tmp_var
9415 (build_reference_type (ctx
->record_type
), ".omp_rec");
9417 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
9418 lower_omp (&par_body
, ctx
);
9419 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
)
9420 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
9422 /* Declare all the variables created by mapping and the variables
9423 declared in the scope of the parallel body. */
9424 record_vars_into (ctx
->block_vars
, child_fn
);
9425 maybe_remove_omp_member_access_dummy_vars (par_bind
);
9426 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
9428 if (ctx
->record_type
)
9431 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
9432 : ctx
->record_type
, ".omp_data_o");
9433 DECL_NAMELESS (ctx
->sender_decl
) = 1;
9434 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
9435 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
9438 gimple_seq olist
= NULL
;
9439 gimple_seq ilist
= NULL
;
9440 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
9441 lower_send_shared_vars (&ilist
, &olist
, ctx
);
9443 if (ctx
->record_type
)
9445 tree clobber
= build_constructor (TREE_TYPE (ctx
->sender_decl
), NULL
);
9446 TREE_THIS_VOLATILE (clobber
) = 1;
9447 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
9451 /* Once all the expansions are done, sequence all the different
9452 fragments inside gimple_omp_body. */
9454 gimple_seq new_body
= NULL
;
9456 if (ctx
->record_type
)
9458 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
9459 /* fixup_child_record_type might have changed receiver_decl's type. */
9460 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
9461 gimple_seq_add_stmt (&new_body
,
9462 gimple_build_assign (ctx
->receiver_decl
, t
));
9465 gimple_seq_add_seq (&new_body
, par_ilist
);
9466 gimple_seq_add_seq (&new_body
, par_body
);
9467 gimple_seq_add_seq (&new_body
, par_rlist
);
9468 if (ctx
->cancellable
)
9469 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
9470 gimple_seq_add_seq (&new_body
, par_olist
);
9471 new_body
= maybe_catch_exception (new_body
);
9472 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
9473 gimple_seq_add_stmt (&new_body
,
9474 gimple_build_omp_continue (integer_zero_node
,
9475 integer_zero_node
));
9476 if (!phony_construct
)
9478 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
9479 gimple_omp_set_body (stmt
, new_body
);
9482 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
9483 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
9485 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
9486 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
9487 gimple_bind_add_seq (bind
, ilist
);
9488 if (!phony_construct
)
9489 gimple_bind_add_stmt (bind
, stmt
);
9491 gimple_bind_add_seq (bind
, new_body
);
9492 gimple_bind_add_seq (bind
, olist
);
9494 pop_gimplify_context (NULL
);
9498 gimple_bind_add_seq (dep_bind
, dep_ilist
);
9499 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
9500 gimple_bind_add_stmt (dep_bind
, bind
);
9501 gimple_bind_add_seq (dep_bind
, tskred_olist
);
9502 gimple_bind_add_seq (dep_bind
, dep_olist
);
9503 pop_gimplify_context (dep_bind
);
9507 /* Lower the GIMPLE_OMP_TARGET in the current statement
9508 in GSI_P. CTX holds context information for the directive. */
9511 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9514 tree child_fn
, t
, c
;
9515 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
9516 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
9517 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
9518 location_t loc
= gimple_location (stmt
);
9519 bool offloaded
, data_region
;
9520 unsigned int map_cnt
= 0;
9522 offloaded
= is_gimple_omp_offloaded (stmt
);
9523 switch (gimple_omp_target_kind (stmt
))
9525 case GF_OMP_TARGET_KIND_REGION
:
9526 case GF_OMP_TARGET_KIND_UPDATE
:
9527 case GF_OMP_TARGET_KIND_ENTER_DATA
:
9528 case GF_OMP_TARGET_KIND_EXIT_DATA
:
9529 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
9530 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
9531 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
9532 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
9533 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
9534 data_region
= false;
9536 case GF_OMP_TARGET_KIND_DATA
:
9537 case GF_OMP_TARGET_KIND_OACC_DATA
:
9538 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
9545 clauses
= gimple_omp_target_clauses (stmt
);
9547 gimple_seq dep_ilist
= NULL
;
9548 gimple_seq dep_olist
= NULL
;
9549 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
9551 push_gimplify_context ();
9552 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
9553 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
9554 &dep_ilist
, &dep_olist
);
9561 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
9562 tgt_body
= gimple_bind_body (tgt_bind
);
9564 else if (data_region
)
9565 tgt_body
= gimple_omp_body (stmt
);
9566 child_fn
= ctx
->cb
.dst_fn
;
9568 push_gimplify_context ();
9571 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9572 switch (OMP_CLAUSE_CODE (c
))
9578 case OMP_CLAUSE_MAP
:
9580 /* First check what we're prepared to handle in the following. */
9581 switch (OMP_CLAUSE_MAP_KIND (c
))
9583 case GOMP_MAP_ALLOC
:
9586 case GOMP_MAP_TOFROM
:
9587 case GOMP_MAP_POINTER
:
9588 case GOMP_MAP_TO_PSET
:
9589 case GOMP_MAP_DELETE
:
9590 case GOMP_MAP_RELEASE
:
9591 case GOMP_MAP_ALWAYS_TO
:
9592 case GOMP_MAP_ALWAYS_FROM
:
9593 case GOMP_MAP_ALWAYS_TOFROM
:
9594 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9595 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9596 case GOMP_MAP_STRUCT
:
9597 case GOMP_MAP_ALWAYS_POINTER
:
9599 case GOMP_MAP_FORCE_ALLOC
:
9600 case GOMP_MAP_FORCE_TO
:
9601 case GOMP_MAP_FORCE_FROM
:
9602 case GOMP_MAP_FORCE_TOFROM
:
9603 case GOMP_MAP_FORCE_PRESENT
:
9604 case GOMP_MAP_FORCE_DEVICEPTR
:
9605 case GOMP_MAP_DEVICE_RESIDENT
:
9607 gcc_assert (is_gimple_omp_oacc (stmt
));
9615 case OMP_CLAUSE_FROM
:
9617 var
= OMP_CLAUSE_DECL (c
);
9620 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
9621 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
9622 && (OMP_CLAUSE_MAP_KIND (c
)
9623 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
9629 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
9631 tree var2
= DECL_VALUE_EXPR (var
);
9632 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
9633 var2
= TREE_OPERAND (var2
, 0);
9634 gcc_assert (DECL_P (var2
));
9639 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9640 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9641 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9643 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
9645 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
9646 && varpool_node::get_create (var
)->offloadable
)
9649 tree type
= build_pointer_type (TREE_TYPE (var
));
9650 tree new_var
= lookup_decl (var
, ctx
);
9651 x
= create_tmp_var_raw (type
, get_name (new_var
));
9652 gimple_add_tmp_var (x
);
9653 x
= build_simple_mem_ref (x
);
9654 SET_DECL_VALUE_EXPR (new_var
, x
);
9655 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9660 if (!maybe_lookup_field (var
, ctx
))
9663 /* Don't remap oacc parallel reduction variables, because the
9664 intermediate result must be local to each gang. */
9665 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9666 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
9668 x
= build_receiver_ref (var
, true, ctx
);
9669 tree new_var
= lookup_decl (var
, ctx
);
9671 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9672 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
9673 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
9674 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
9675 x
= build_simple_mem_ref (x
);
9676 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
9678 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
9679 if (omp_is_reference (new_var
)
9680 && TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
)
9682 /* Create a local object to hold the instance
9684 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
9685 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
9686 tree inst
= create_tmp_var (type
, id
);
9687 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
9688 x
= build_fold_addr_expr (inst
);
9690 gimplify_assign (new_var
, x
, &fplist
);
9692 else if (DECL_P (new_var
))
9694 SET_DECL_VALUE_EXPR (new_var
, x
);
9695 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9703 case OMP_CLAUSE_FIRSTPRIVATE
:
9704 if (is_oacc_parallel (ctx
))
9705 goto oacc_firstprivate
;
9707 var
= OMP_CLAUSE_DECL (c
);
9708 if (!omp_is_reference (var
)
9709 && !is_gimple_reg_type (TREE_TYPE (var
)))
9711 tree new_var
= lookup_decl (var
, ctx
);
9712 if (is_variable_sized (var
))
9714 tree pvar
= DECL_VALUE_EXPR (var
);
9715 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
9716 pvar
= TREE_OPERAND (pvar
, 0);
9717 gcc_assert (DECL_P (pvar
));
9718 tree new_pvar
= lookup_decl (pvar
, ctx
);
9719 x
= build_fold_indirect_ref (new_pvar
);
9720 TREE_THIS_NOTRAP (x
) = 1;
9723 x
= build_receiver_ref (var
, true, ctx
);
9724 SET_DECL_VALUE_EXPR (new_var
, x
);
9725 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9729 case OMP_CLAUSE_PRIVATE
:
9730 if (is_gimple_omp_oacc (ctx
->stmt
))
9732 var
= OMP_CLAUSE_DECL (c
);
9733 if (is_variable_sized (var
))
9735 tree new_var
= lookup_decl (var
, ctx
);
9736 tree pvar
= DECL_VALUE_EXPR (var
);
9737 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
9738 pvar
= TREE_OPERAND (pvar
, 0);
9739 gcc_assert (DECL_P (pvar
));
9740 tree new_pvar
= lookup_decl (pvar
, ctx
);
9741 x
= build_fold_indirect_ref (new_pvar
);
9742 TREE_THIS_NOTRAP (x
) = 1;
9743 SET_DECL_VALUE_EXPR (new_var
, x
);
9744 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9748 case OMP_CLAUSE_USE_DEVICE_PTR
:
9749 case OMP_CLAUSE_IS_DEVICE_PTR
:
9750 var
= OMP_CLAUSE_DECL (c
);
9752 if (is_variable_sized (var
))
9754 tree new_var
= lookup_decl (var
, ctx
);
9755 tree pvar
= DECL_VALUE_EXPR (var
);
9756 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
9757 pvar
= TREE_OPERAND (pvar
, 0);
9758 gcc_assert (DECL_P (pvar
));
9759 tree new_pvar
= lookup_decl (pvar
, ctx
);
9760 x
= build_fold_indirect_ref (new_pvar
);
9761 TREE_THIS_NOTRAP (x
) = 1;
9762 SET_DECL_VALUE_EXPR (new_var
, x
);
9763 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9765 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
9767 tree new_var
= lookup_decl (var
, ctx
);
9768 tree type
= build_pointer_type (TREE_TYPE (var
));
9769 x
= create_tmp_var_raw (type
, get_name (new_var
));
9770 gimple_add_tmp_var (x
);
9771 x
= build_simple_mem_ref (x
);
9772 SET_DECL_VALUE_EXPR (new_var
, x
);
9773 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9777 tree new_var
= lookup_decl (var
, ctx
);
9778 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
9779 gimple_add_tmp_var (x
);
9780 SET_DECL_VALUE_EXPR (new_var
, x
);
9781 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9788 target_nesting_level
++;
9789 lower_omp (&tgt_body
, ctx
);
9790 target_nesting_level
--;
9792 else if (data_region
)
9793 lower_omp (&tgt_body
, ctx
);
9797 /* Declare all the variables created by mapping and the variables
9798 declared in the scope of the target body. */
9799 record_vars_into (ctx
->block_vars
, child_fn
);
9800 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
9801 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
9806 if (ctx
->record_type
)
9809 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
9810 DECL_NAMELESS (ctx
->sender_decl
) = 1;
9811 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
9812 t
= make_tree_vec (3);
9813 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
9815 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
9817 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
9818 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
9819 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
9820 tree tkind_type
= short_unsigned_type_node
;
9821 int talign_shift
= 8;
9823 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
9825 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
9826 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
9827 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
9828 gimple_omp_target_set_data_arg (stmt
, t
);
9830 vec
<constructor_elt
, va_gc
> *vsize
;
9831 vec
<constructor_elt
, va_gc
> *vkind
;
9832 vec_alloc (vsize
, map_cnt
);
9833 vec_alloc (vkind
, map_cnt
);
9834 unsigned int map_idx
= 0;
9836 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9837 switch (OMP_CLAUSE_CODE (c
))
9839 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
9840 unsigned int talign
;
9845 case OMP_CLAUSE_MAP
:
9847 case OMP_CLAUSE_FROM
:
9848 oacc_firstprivate_map
:
9850 ovar
= OMP_CLAUSE_DECL (c
);
9851 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9852 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9853 || (OMP_CLAUSE_MAP_KIND (c
)
9854 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
9858 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9859 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
9861 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
9862 == get_base_address (ovar
));
9863 nc
= OMP_CLAUSE_CHAIN (c
);
9864 ovar
= OMP_CLAUSE_DECL (nc
);
9868 tree x
= build_sender_ref (ovar
, ctx
);
9870 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
9871 gimplify_assign (x
, v
, &ilist
);
9877 if (DECL_SIZE (ovar
)
9878 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
9880 tree ovar2
= DECL_VALUE_EXPR (ovar
);
9881 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
9882 ovar2
= TREE_OPERAND (ovar2
, 0);
9883 gcc_assert (DECL_P (ovar2
));
9886 if (!maybe_lookup_field (ovar
, ctx
))
9890 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
9891 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
9892 talign
= DECL_ALIGN_UNIT (ovar
);
9895 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
9896 x
= build_sender_ref (ovar
, ctx
);
9898 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9899 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
9900 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
9901 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
9903 gcc_assert (offloaded
);
9905 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
9906 mark_addressable (avar
);
9907 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
9908 talign
= DECL_ALIGN_UNIT (avar
);
9909 avar
= build_fold_addr_expr (avar
);
9910 gimplify_assign (x
, avar
, &ilist
);
9912 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
9914 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
9915 if (!omp_is_reference (var
))
9917 if (is_gimple_reg (var
)
9918 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
9919 TREE_NO_WARNING (var
) = 1;
9920 var
= build_fold_addr_expr (var
);
9923 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
9924 gimplify_assign (x
, var
, &ilist
);
9926 else if (is_gimple_reg (var
))
9928 gcc_assert (offloaded
);
9929 tree avar
= create_tmp_var (TREE_TYPE (var
));
9930 mark_addressable (avar
);
9931 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
9932 if (GOMP_MAP_COPY_TO_P (map_kind
)
9933 || map_kind
== GOMP_MAP_POINTER
9934 || map_kind
== GOMP_MAP_TO_PSET
9935 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
9937 /* If we need to initialize a temporary
9938 with VAR because it is not addressable, and
9939 the variable hasn't been initialized yet, then
9940 we'll get a warning for the store to avar.
9941 Don't warn in that case, the mapping might
9943 TREE_NO_WARNING (var
) = 1;
9944 gimplify_assign (avar
, var
, &ilist
);
9946 avar
= build_fold_addr_expr (avar
);
9947 gimplify_assign (x
, avar
, &ilist
);
9948 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
9949 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
9950 && !TYPE_READONLY (TREE_TYPE (var
)))
9952 x
= unshare_expr (x
);
9953 x
= build_simple_mem_ref (x
);
9954 gimplify_assign (var
, x
, &olist
);
9959 var
= build_fold_addr_expr (var
);
9960 gimplify_assign (x
, var
, &ilist
);
9964 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
9966 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
9967 s
= TREE_TYPE (ovar
);
9968 if (TREE_CODE (s
) == REFERENCE_TYPE
)
9970 s
= TYPE_SIZE_UNIT (s
);
9973 s
= OMP_CLAUSE_SIZE (c
);
9975 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
9976 s
= fold_convert (size_type_node
, s
);
9977 purpose
= size_int (map_idx
++);
9978 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
9979 if (TREE_CODE (s
) != INTEGER_CST
)
9980 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
9982 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
9983 switch (OMP_CLAUSE_CODE (c
))
9985 case OMP_CLAUSE_MAP
:
9986 tkind
= OMP_CLAUSE_MAP_KIND (c
);
9988 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
9991 case GOMP_MAP_ALLOC
:
9994 case GOMP_MAP_TOFROM
:
9995 case GOMP_MAP_ALWAYS_TO
:
9996 case GOMP_MAP_ALWAYS_FROM
:
9997 case GOMP_MAP_ALWAYS_TOFROM
:
9998 case GOMP_MAP_RELEASE
:
9999 case GOMP_MAP_FORCE_TO
:
10000 case GOMP_MAP_FORCE_FROM
:
10001 case GOMP_MAP_FORCE_TOFROM
:
10002 case GOMP_MAP_FORCE_PRESENT
:
10003 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
10005 case GOMP_MAP_DELETE
:
10006 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
10010 if (tkind_zero
!= tkind
)
10012 if (integer_zerop (s
))
10013 tkind
= tkind_zero
;
10014 else if (integer_nonzerop (s
))
10015 tkind_zero
= tkind
;
10018 case OMP_CLAUSE_FIRSTPRIVATE
:
10019 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
10020 tkind
= GOMP_MAP_TO
;
10021 tkind_zero
= tkind
;
10023 case OMP_CLAUSE_TO
:
10024 tkind
= GOMP_MAP_TO
;
10025 tkind_zero
= tkind
;
10027 case OMP_CLAUSE_FROM
:
10028 tkind
= GOMP_MAP_FROM
;
10029 tkind_zero
= tkind
;
10032 gcc_unreachable ();
10034 gcc_checking_assert (tkind
10035 < (HOST_WIDE_INT_C (1U) << talign_shift
));
10036 gcc_checking_assert (tkind_zero
10037 < (HOST_WIDE_INT_C (1U) << talign_shift
));
10038 talign
= ceil_log2 (talign
);
10039 tkind
|= talign
<< talign_shift
;
10040 tkind_zero
|= talign
<< talign_shift
;
10041 gcc_checking_assert (tkind
10042 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
10043 gcc_checking_assert (tkind_zero
10044 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
10045 if (tkind
== tkind_zero
)
10046 x
= build_int_cstu (tkind_type
, tkind
);
10049 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
10050 x
= build3 (COND_EXPR
, tkind_type
,
10051 fold_build2 (EQ_EXPR
, boolean_type_node
,
10052 unshare_expr (s
), size_zero_node
),
10053 build_int_cstu (tkind_type
, tkind_zero
),
10054 build_int_cstu (tkind_type
, tkind
));
10056 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
10061 case OMP_CLAUSE_FIRSTPRIVATE
:
10062 if (is_oacc_parallel (ctx
))
10063 goto oacc_firstprivate_map
;
10064 ovar
= OMP_CLAUSE_DECL (c
);
10065 if (omp_is_reference (ovar
))
10066 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
10068 talign
= DECL_ALIGN_UNIT (ovar
);
10069 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
10070 x
= build_sender_ref (ovar
, ctx
);
10071 tkind
= GOMP_MAP_FIRSTPRIVATE
;
10072 type
= TREE_TYPE (ovar
);
10073 if (omp_is_reference (ovar
))
10074 type
= TREE_TYPE (type
);
10075 if ((INTEGRAL_TYPE_P (type
)
10076 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
10077 || TREE_CODE (type
) == POINTER_TYPE
)
10079 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
10081 if (omp_is_reference (var
))
10082 t
= build_simple_mem_ref (var
);
10083 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
10084 TREE_NO_WARNING (var
) = 1;
10085 if (TREE_CODE (type
) != POINTER_TYPE
)
10086 t
= fold_convert (pointer_sized_int_node
, t
);
10087 t
= fold_convert (TREE_TYPE (x
), t
);
10088 gimplify_assign (x
, t
, &ilist
);
10090 else if (omp_is_reference (var
))
10091 gimplify_assign (x
, var
, &ilist
);
10092 else if (is_gimple_reg (var
))
10094 tree avar
= create_tmp_var (TREE_TYPE (var
));
10095 mark_addressable (avar
);
10096 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
10097 TREE_NO_WARNING (var
) = 1;
10098 gimplify_assign (avar
, var
, &ilist
);
10099 avar
= build_fold_addr_expr (avar
);
10100 gimplify_assign (x
, avar
, &ilist
);
10104 var
= build_fold_addr_expr (var
);
10105 gimplify_assign (x
, var
, &ilist
);
10107 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
10109 else if (omp_is_reference (ovar
))
10110 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
10112 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
10113 s
= fold_convert (size_type_node
, s
);
10114 purpose
= size_int (map_idx
++);
10115 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
10116 if (TREE_CODE (s
) != INTEGER_CST
)
10117 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
10119 gcc_checking_assert (tkind
10120 < (HOST_WIDE_INT_C (1U) << talign_shift
));
10121 talign
= ceil_log2 (talign
);
10122 tkind
|= talign
<< talign_shift
;
10123 gcc_checking_assert (tkind
10124 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
10125 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
10126 build_int_cstu (tkind_type
, tkind
));
10129 case OMP_CLAUSE_USE_DEVICE_PTR
:
10130 case OMP_CLAUSE_IS_DEVICE_PTR
:
10131 ovar
= OMP_CLAUSE_DECL (c
);
10132 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
10133 x
= build_sender_ref (ovar
, ctx
);
10134 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
10135 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
10137 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
10138 type
= TREE_TYPE (ovar
);
10139 if (TREE_CODE (type
) == ARRAY_TYPE
)
10140 var
= build_fold_addr_expr (var
);
10143 if (omp_is_reference (ovar
))
10145 type
= TREE_TYPE (type
);
10146 if (TREE_CODE (type
) != ARRAY_TYPE
)
10147 var
= build_simple_mem_ref (var
);
10148 var
= fold_convert (TREE_TYPE (x
), var
);
10151 gimplify_assign (x
, var
, &ilist
);
10153 purpose
= size_int (map_idx
++);
10154 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
10155 gcc_checking_assert (tkind
10156 < (HOST_WIDE_INT_C (1U) << talign_shift
));
10157 gcc_checking_assert (tkind
10158 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
10159 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
10160 build_int_cstu (tkind_type
, tkind
));
10164 gcc_assert (map_idx
== map_cnt
);
10166 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
10167 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
10168 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
10169 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
10170 for (int i
= 1; i
<= 2; i
++)
10171 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
10173 gimple_seq initlist
= NULL
;
10174 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
10175 TREE_VEC_ELT (t
, i
)),
10176 &initlist
, true, NULL_TREE
);
10177 gimple_seq_add_seq (&ilist
, initlist
);
10179 tree clobber
= build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, i
)),
10181 TREE_THIS_VOLATILE (clobber
) = 1;
10182 gimple_seq_add_stmt (&olist
,
10183 gimple_build_assign (TREE_VEC_ELT (t
, i
),
10187 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
10188 TREE_THIS_VOLATILE (clobber
) = 1;
10189 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
10193 /* Once all the expansions are done, sequence all the different
10194 fragments inside gimple_omp_body. */
10199 && ctx
->record_type
)
10201 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
10202 /* fixup_child_record_type might have changed receiver_decl's type. */
10203 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
10204 gimple_seq_add_stmt (&new_body
,
10205 gimple_build_assign (ctx
->receiver_decl
, t
));
10207 gimple_seq_add_seq (&new_body
, fplist
);
10209 if (offloaded
|| data_region
)
10211 tree prev
= NULL_TREE
;
10212 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10213 switch (OMP_CLAUSE_CODE (c
))
10218 case OMP_CLAUSE_FIRSTPRIVATE
:
10219 if (is_gimple_omp_oacc (ctx
->stmt
))
10221 var
= OMP_CLAUSE_DECL (c
);
10222 if (omp_is_reference (var
)
10223 || is_gimple_reg_type (TREE_TYPE (var
)))
10225 tree new_var
= lookup_decl (var
, ctx
);
10227 type
= TREE_TYPE (var
);
10228 if (omp_is_reference (var
))
10229 type
= TREE_TYPE (type
);
10230 if ((INTEGRAL_TYPE_P (type
)
10231 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
10232 || TREE_CODE (type
) == POINTER_TYPE
)
10234 x
= build_receiver_ref (var
, false, ctx
);
10235 if (TREE_CODE (type
) != POINTER_TYPE
)
10236 x
= fold_convert (pointer_sized_int_node
, x
);
10237 x
= fold_convert (type
, x
);
10238 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
10240 if (omp_is_reference (var
))
10242 tree v
= create_tmp_var_raw (type
, get_name (var
));
10243 gimple_add_tmp_var (v
);
10244 TREE_ADDRESSABLE (v
) = 1;
10245 gimple_seq_add_stmt (&new_body
,
10246 gimple_build_assign (v
, x
));
10247 x
= build_fold_addr_expr (v
);
10249 gimple_seq_add_stmt (&new_body
,
10250 gimple_build_assign (new_var
, x
));
10254 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
10255 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
10257 gimple_seq_add_stmt (&new_body
,
10258 gimple_build_assign (new_var
, x
));
10261 else if (is_variable_sized (var
))
10263 tree pvar
= DECL_VALUE_EXPR (var
);
10264 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
10265 pvar
= TREE_OPERAND (pvar
, 0);
10266 gcc_assert (DECL_P (pvar
));
10267 tree new_var
= lookup_decl (pvar
, ctx
);
10268 x
= build_receiver_ref (var
, false, ctx
);
10269 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
10270 gimple_seq_add_stmt (&new_body
,
10271 gimple_build_assign (new_var
, x
));
10274 case OMP_CLAUSE_PRIVATE
:
10275 if (is_gimple_omp_oacc (ctx
->stmt
))
10277 var
= OMP_CLAUSE_DECL (c
);
10278 if (omp_is_reference (var
))
10280 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10281 tree new_var
= lookup_decl (var
, ctx
);
10282 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
10283 if (TREE_CONSTANT (x
))
10285 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
10287 gimple_add_tmp_var (x
);
10288 TREE_ADDRESSABLE (x
) = 1;
10289 x
= build_fold_addr_expr_loc (clause_loc
, x
);
10294 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
10295 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
10296 gimple_seq_add_stmt (&new_body
,
10297 gimple_build_assign (new_var
, x
));
10300 case OMP_CLAUSE_USE_DEVICE_PTR
:
10301 case OMP_CLAUSE_IS_DEVICE_PTR
:
10302 var
= OMP_CLAUSE_DECL (c
);
10303 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
10304 x
= build_sender_ref (var
, ctx
);
10306 x
= build_receiver_ref (var
, false, ctx
);
10307 if (is_variable_sized (var
))
10309 tree pvar
= DECL_VALUE_EXPR (var
);
10310 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
10311 pvar
= TREE_OPERAND (pvar
, 0);
10312 gcc_assert (DECL_P (pvar
));
10313 tree new_var
= lookup_decl (pvar
, ctx
);
10314 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
10315 gimple_seq_add_stmt (&new_body
,
10316 gimple_build_assign (new_var
, x
));
10318 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
10320 tree new_var
= lookup_decl (var
, ctx
);
10321 new_var
= DECL_VALUE_EXPR (new_var
);
10322 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
10323 new_var
= TREE_OPERAND (new_var
, 0);
10324 gcc_assert (DECL_P (new_var
));
10325 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
10326 gimple_seq_add_stmt (&new_body
,
10327 gimple_build_assign (new_var
, x
));
10331 tree type
= TREE_TYPE (var
);
10332 tree new_var
= lookup_decl (var
, ctx
);
10333 if (omp_is_reference (var
))
10335 type
= TREE_TYPE (type
);
10336 if (TREE_CODE (type
) != ARRAY_TYPE
)
10338 tree v
= create_tmp_var_raw (type
, get_name (var
));
10339 gimple_add_tmp_var (v
);
10340 TREE_ADDRESSABLE (v
) = 1;
10341 x
= fold_convert (type
, x
);
10342 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
10344 gimple_seq_add_stmt (&new_body
,
10345 gimple_build_assign (v
, x
));
10346 x
= build_fold_addr_expr (v
);
10349 new_var
= DECL_VALUE_EXPR (new_var
);
10350 x
= fold_convert (TREE_TYPE (new_var
), x
);
10351 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
10352 gimple_seq_add_stmt (&new_body
,
10353 gimple_build_assign (new_var
, x
));
10357 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
10358 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
10359 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
10360 or references to VLAs. */
10361 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10362 switch (OMP_CLAUSE_CODE (c
))
10367 case OMP_CLAUSE_MAP
:
10368 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
10369 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
10371 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10372 poly_int64 offset
= 0;
10374 var
= OMP_CLAUSE_DECL (c
);
10376 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
10377 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
10379 && varpool_node::get_create (var
)->offloadable
)
10381 if (TREE_CODE (var
) == INDIRECT_REF
10382 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
10383 var
= TREE_OPERAND (var
, 0);
10384 if (TREE_CODE (var
) == COMPONENT_REF
)
10386 var
= get_addr_base_and_unit_offset (var
, &offset
);
10387 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
10389 else if (DECL_SIZE (var
)
10390 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
10392 tree var2
= DECL_VALUE_EXPR (var
);
10393 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
10394 var2
= TREE_OPERAND (var2
, 0);
10395 gcc_assert (DECL_P (var2
));
10398 tree new_var
= lookup_decl (var
, ctx
), x
;
10399 tree type
= TREE_TYPE (new_var
);
10401 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
10402 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
10405 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
10407 new_var
= build2 (MEM_REF
, type
,
10408 build_fold_addr_expr (new_var
),
10409 build_int_cst (build_pointer_type (type
),
10412 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
10414 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
10415 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
10416 new_var
= build2 (MEM_REF
, type
,
10417 build_fold_addr_expr (new_var
),
10418 build_int_cst (build_pointer_type (type
),
10422 is_ref
= omp_is_reference (var
);
10423 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
10425 bool ref_to_array
= false;
10428 type
= TREE_TYPE (type
);
10429 if (TREE_CODE (type
) == ARRAY_TYPE
)
10431 type
= build_pointer_type (type
);
10432 ref_to_array
= true;
10435 else if (TREE_CODE (type
) == ARRAY_TYPE
)
10437 tree decl2
= DECL_VALUE_EXPR (new_var
);
10438 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
10439 decl2
= TREE_OPERAND (decl2
, 0);
10440 gcc_assert (DECL_P (decl2
));
10442 type
= TREE_TYPE (new_var
);
10444 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
10445 x
= fold_convert_loc (clause_loc
, type
, x
);
10446 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
10448 tree bias
= OMP_CLAUSE_SIZE (c
);
10450 bias
= lookup_decl (bias
, ctx
);
10451 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
10452 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
10454 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
10455 TREE_TYPE (x
), x
, bias
);
10458 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
10459 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
10460 if (is_ref
&& !ref_to_array
)
10462 tree t
= create_tmp_var_raw (type
, get_name (var
));
10463 gimple_add_tmp_var (t
);
10464 TREE_ADDRESSABLE (t
) = 1;
10465 gimple_seq_add_stmt (&new_body
,
10466 gimple_build_assign (t
, x
));
10467 x
= build_fold_addr_expr_loc (clause_loc
, t
);
10469 gimple_seq_add_stmt (&new_body
,
10470 gimple_build_assign (new_var
, x
));
10473 else if (OMP_CLAUSE_CHAIN (c
)
10474 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
10476 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
10477 == GOMP_MAP_FIRSTPRIVATE_POINTER
10478 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
10479 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
10482 case OMP_CLAUSE_PRIVATE
:
10483 var
= OMP_CLAUSE_DECL (c
);
10484 if (is_variable_sized (var
))
10486 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10487 tree new_var
= lookup_decl (var
, ctx
);
10488 tree pvar
= DECL_VALUE_EXPR (var
);
10489 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
10490 pvar
= TREE_OPERAND (pvar
, 0);
10491 gcc_assert (DECL_P (pvar
));
10492 tree new_pvar
= lookup_decl (pvar
, ctx
);
10493 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
10494 tree al
= size_int (DECL_ALIGN (var
));
10495 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
10496 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
10497 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
10498 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
10499 gimple_seq_add_stmt (&new_body
,
10500 gimple_build_assign (new_pvar
, x
));
10502 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
10504 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10505 tree new_var
= lookup_decl (var
, ctx
);
10506 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
10507 if (TREE_CONSTANT (x
))
10512 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
10513 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
10514 tree al
= size_int (TYPE_ALIGN (rtype
));
10515 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
10518 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
10519 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
10520 gimple_seq_add_stmt (&new_body
,
10521 gimple_build_assign (new_var
, x
));
10526 gimple_seq fork_seq
= NULL
;
10527 gimple_seq join_seq
= NULL
;
10529 if (is_oacc_parallel (ctx
))
10531 /* If there are reductions on the offloaded region itself, treat
10532 them as a dummy GANG loop. */
10533 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
10535 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
10536 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
10539 gimple_seq_add_seq (&new_body
, fork_seq
);
10540 gimple_seq_add_seq (&new_body
, tgt_body
);
10541 gimple_seq_add_seq (&new_body
, join_seq
);
10544 new_body
= maybe_catch_exception (new_body
);
10546 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
10547 gimple_omp_set_body (stmt
, new_body
);
10550 bind
= gimple_build_bind (NULL
, NULL
,
10551 tgt_bind
? gimple_bind_block (tgt_bind
)
10553 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
10554 gimple_bind_add_seq (bind
, ilist
);
10555 gimple_bind_add_stmt (bind
, stmt
);
10556 gimple_bind_add_seq (bind
, olist
);
10558 pop_gimplify_context (NULL
);
10562 gimple_bind_add_seq (dep_bind
, dep_ilist
);
10563 gimple_bind_add_stmt (dep_bind
, bind
);
10564 gimple_bind_add_seq (dep_bind
, dep_olist
);
10565 pop_gimplify_context (dep_bind
);
10569 /* Expand code for an OpenMP teams directive. */
10572 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10574 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
10575 push_gimplify_context ();
10577 tree block
= make_node (BLOCK
);
10578 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
10579 gsi_replace (gsi_p
, bind
, true);
10580 gimple_seq bind_body
= NULL
;
10581 gimple_seq dlist
= NULL
;
10582 gimple_seq olist
= NULL
;
10584 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
10585 OMP_CLAUSE_NUM_TEAMS
);
10586 if (num_teams
== NULL_TREE
)
10587 num_teams
= build_int_cst (unsigned_type_node
, 0);
10590 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
10591 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
10592 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
10594 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
10595 OMP_CLAUSE_THREAD_LIMIT
);
10596 if (thread_limit
== NULL_TREE
)
10597 thread_limit
= build_int_cst (unsigned_type_node
, 0);
10600 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
10601 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
10602 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
10606 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
10607 &bind_body
, &dlist
, ctx
, NULL
);
10608 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
10609 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
10611 if (!gimple_omp_teams_grid_phony (teams_stmt
))
10613 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
10614 location_t loc
= gimple_location (teams_stmt
);
10615 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
10616 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
10617 gimple_set_location (call
, loc
);
10618 gimple_seq_add_stmt (&bind_body
, call
);
10621 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
10622 gimple_omp_set_body (teams_stmt
, NULL
);
10623 gimple_seq_add_seq (&bind_body
, olist
);
10624 gimple_seq_add_seq (&bind_body
, dlist
);
10625 if (!gimple_omp_teams_grid_phony (teams_stmt
))
10626 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
10627 gimple_bind_set_body (bind
, bind_body
);
10629 pop_gimplify_context (bind
);
10631 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10632 BLOCK_VARS (block
) = ctx
->block_vars
;
10633 if (BLOCK_VARS (block
))
10634 TREE_USED (block
) = 1;
10637 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
10640 lower_omp_grid_body (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10642 gimple
*stmt
= gsi_stmt (*gsi_p
);
10643 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10644 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt
),
10645 gimple_build_omp_return (false));
10649 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
10650 regimplified. If DATA is non-NULL, lower_omp_1 is outside
10651 of OMP context, but with task_shared_vars set. */
10654 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
10659 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
10660 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
10663 if (task_shared_vars
10665 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
10668 /* If a global variable has been privatized, TREE_CONSTANT on
10669 ADDR_EXPR might be wrong. */
10670 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
10671 recompute_tree_invariant_for_addr_expr (t
);
10673 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
10677 /* Data to be communicated between lower_omp_regimplify_operands and
10678 lower_omp_regimplify_operands_p. */
10680 struct lower_omp_regimplify_operands_data
10686 /* Helper function for lower_omp_regimplify_operands. Find
10687 omp_member_access_dummy_var vars and adjust temporarily their
10688 DECL_VALUE_EXPRs if needed. */
10691 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
10694 tree t
= omp_member_access_dummy_var (*tp
);
10697 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
10698 lower_omp_regimplify_operands_data
*ldata
10699 = (lower_omp_regimplify_operands_data
*) wi
->info
;
10700 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
10703 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
10704 ldata
->decls
->safe_push (*tp
);
10705 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
10706 SET_DECL_VALUE_EXPR (*tp
, v
);
10709 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
10713 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
10714 of omp_member_access_dummy_var vars during regimplification. */
10717 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
10718 gimple_stmt_iterator
*gsi_p
)
10720 auto_vec
<tree
, 10> decls
;
10723 struct walk_stmt_info wi
;
10724 memset (&wi
, '\0', sizeof (wi
));
10725 struct lower_omp_regimplify_operands_data data
;
10727 data
.decls
= &decls
;
10729 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
10731 gimple_regimplify_operands (stmt
, gsi_p
);
10732 while (!decls
.is_empty ())
10734 tree t
= decls
.pop ();
10735 tree v
= decls
.pop ();
10736 SET_DECL_VALUE_EXPR (t
, v
);
10741 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10743 gimple
*stmt
= gsi_stmt (*gsi_p
);
10744 struct walk_stmt_info wi
;
10747 if (gimple_has_location (stmt
))
10748 input_location
= gimple_location (stmt
);
10750 if (task_shared_vars
)
10751 memset (&wi
, '\0', sizeof (wi
));
10753 /* If we have issued syntax errors, avoid doing any heavy lifting.
10754 Just replace the OMP directives with a NOP to avoid
10755 confusing RTL expansion. */
10756 if (seen_error () && is_gimple_omp (stmt
))
10758 gsi_replace (gsi_p
, gimple_build_nop (), true);
10762 switch (gimple_code (stmt
))
10766 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
10767 if ((ctx
|| task_shared_vars
)
10768 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
10769 lower_omp_regimplify_p
,
10770 ctx
? NULL
: &wi
, NULL
)
10771 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
10772 lower_omp_regimplify_p
,
10773 ctx
? NULL
: &wi
, NULL
)))
10774 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
10778 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
10780 case GIMPLE_EH_FILTER
:
10781 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
10784 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
10785 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
10787 case GIMPLE_TRANSACTION
:
10788 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
10792 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
10793 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
10795 case GIMPLE_OMP_PARALLEL
:
10796 case GIMPLE_OMP_TASK
:
10797 ctx
= maybe_lookup_ctx (stmt
);
10799 if (ctx
->cancellable
)
10800 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
10801 lower_omp_taskreg (gsi_p
, ctx
);
10803 case GIMPLE_OMP_FOR
:
10804 ctx
= maybe_lookup_ctx (stmt
);
10806 if (ctx
->cancellable
)
10807 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
10808 lower_omp_for (gsi_p
, ctx
);
10810 case GIMPLE_OMP_SECTIONS
:
10811 ctx
= maybe_lookup_ctx (stmt
);
10813 if (ctx
->cancellable
)
10814 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
10815 lower_omp_sections (gsi_p
, ctx
);
10817 case GIMPLE_OMP_SINGLE
:
10818 ctx
= maybe_lookup_ctx (stmt
);
10820 lower_omp_single (gsi_p
, ctx
);
10822 case GIMPLE_OMP_MASTER
:
10823 ctx
= maybe_lookup_ctx (stmt
);
10825 lower_omp_master (gsi_p
, ctx
);
10827 case GIMPLE_OMP_TASKGROUP
:
10828 ctx
= maybe_lookup_ctx (stmt
);
10830 lower_omp_taskgroup (gsi_p
, ctx
);
10832 case GIMPLE_OMP_ORDERED
:
10833 ctx
= maybe_lookup_ctx (stmt
);
10835 lower_omp_ordered (gsi_p
, ctx
);
10837 case GIMPLE_OMP_CRITICAL
:
10838 ctx
= maybe_lookup_ctx (stmt
);
10840 lower_omp_critical (gsi_p
, ctx
);
10842 case GIMPLE_OMP_ATOMIC_LOAD
:
10843 if ((ctx
|| task_shared_vars
)
10844 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
10845 as_a
<gomp_atomic_load
*> (stmt
)),
10846 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
10847 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
10849 case GIMPLE_OMP_TARGET
:
10850 ctx
= maybe_lookup_ctx (stmt
);
10852 lower_omp_target (gsi_p
, ctx
);
10854 case GIMPLE_OMP_TEAMS
:
10855 ctx
= maybe_lookup_ctx (stmt
);
10857 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
10858 lower_omp_taskreg (gsi_p
, ctx
);
10860 lower_omp_teams (gsi_p
, ctx
);
10862 case GIMPLE_OMP_GRID_BODY
:
10863 ctx
= maybe_lookup_ctx (stmt
);
10865 lower_omp_grid_body (gsi_p
, ctx
);
10869 call_stmt
= as_a
<gcall
*> (stmt
);
10870 fndecl
= gimple_call_fndecl (call_stmt
);
10872 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
10873 switch (DECL_FUNCTION_CODE (fndecl
))
10875 case BUILT_IN_GOMP_BARRIER
:
10879 case BUILT_IN_GOMP_CANCEL
:
10880 case BUILT_IN_GOMP_CANCELLATION_POINT
:
10883 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
10884 cctx
= cctx
->outer
;
10885 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
10886 if (!cctx
->cancellable
)
10888 if (DECL_FUNCTION_CODE (fndecl
)
10889 == BUILT_IN_GOMP_CANCELLATION_POINT
)
10891 stmt
= gimple_build_nop ();
10892 gsi_replace (gsi_p
, stmt
, false);
10896 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
10898 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
10899 gimple_call_set_fndecl (call_stmt
, fndecl
);
10900 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
10903 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
10904 gimple_call_set_lhs (call_stmt
, lhs
);
10905 tree fallthru_label
;
10906 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
10908 g
= gimple_build_label (fallthru_label
);
10909 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
10910 g
= gimple_build_cond (NE_EXPR
, lhs
,
10911 fold_convert (TREE_TYPE (lhs
),
10912 boolean_false_node
),
10913 cctx
->cancel_label
, fallthru_label
);
10914 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
10921 case GIMPLE_ASSIGN
:
10922 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
10924 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
10925 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
10926 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
10927 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
10928 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
10929 && (gimple_omp_target_kind (up
->stmt
)
10930 == GF_OMP_TARGET_KIND_DATA
)))
10932 else if (!up
->lastprivate_conditional_map
)
10934 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
10935 if (TREE_CODE (lhs
) == MEM_REF
10936 && DECL_P (TREE_OPERAND (lhs
, 0))
10937 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
10938 0))) == REFERENCE_TYPE
)
10939 lhs
= TREE_OPERAND (lhs
, 0);
10941 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
10944 if (up
->combined_into_simd_safelen0
)
10946 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
10947 clauses
= gimple_omp_for_clauses (up
->stmt
);
10949 clauses
= gimple_omp_sections_clauses (up
->stmt
);
10950 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
10951 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
10952 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
10953 OMP_CLAUSE__CONDTEMP_
);
10954 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
10955 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
10956 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
10963 if ((ctx
|| task_shared_vars
)
10964 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
10967 /* Just remove clobbers, this should happen only if we have
10968 "privatized" local addressable variables in SIMD regions,
10969 the clobber isn't needed in that case and gimplifying address
10970 of the ARRAY_REF into a pointer and creating MEM_REF based
10971 clobber would create worse code than we get with the clobber
10973 if (gimple_clobber_p (stmt
))
10975 gsi_replace (gsi_p
, gimple_build_nop (), true);
10978 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
10985 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
10987 location_t saved_location
= input_location
;
10988 gimple_stmt_iterator gsi
;
10989 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
10990 lower_omp_1 (&gsi
, ctx
);
10991 /* During gimplification, we haven't folded statments inside offloading
10992 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
10993 if (target_nesting_level
|| taskreg_nesting_level
)
10994 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
10996 input_location
= saved_location
;
10999 /* Main entry point. */
11001 static unsigned int
11002 execute_lower_omp (void)
11008 /* This pass always runs, to provide PROP_gimple_lomp.
11009 But often, there is nothing to do. */
11010 if (flag_openacc
== 0 && flag_openmp
== 0
11011 && flag_openmp_simd
== 0)
11014 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
11015 delete_omp_context
);
11017 body
= gimple_body (current_function_decl
);
11019 if (hsa_gen_requested_p ())
11020 omp_grid_gridify_all_targets (&body
);
11022 scan_omp (&body
, NULL
);
11023 gcc_assert (taskreg_nesting_level
== 0);
11024 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
11025 finish_taskreg_scan (ctx
);
11026 taskreg_contexts
.release ();
11028 if (all_contexts
->root
)
11030 if (task_shared_vars
)
11031 push_gimplify_context ();
11032 lower_omp (&body
, NULL
);
11033 if (task_shared_vars
)
11034 pop_gimplify_context (NULL
);
11039 splay_tree_delete (all_contexts
);
11040 all_contexts
= NULL
;
11042 BITMAP_FREE (task_shared_vars
);
11044 /* If current function is a method, remove artificial dummy VAR_DECL created
11045 for non-static data member privatization, they aren't needed for
11046 debuginfo nor anything else, have been already replaced everywhere in the
11047 IL and cause problems with LTO. */
11048 if (DECL_ARGUMENTS (current_function_decl
)
11049 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
11050 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
11052 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
11058 const pass_data pass_data_lower_omp
=
11060 GIMPLE_PASS
, /* type */
11061 "omplower", /* name */
11062 OPTGROUP_OMP
, /* optinfo_flags */
11063 TV_NONE
, /* tv_id */
11064 PROP_gimple_any
, /* properties_required */
11065 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
11066 0, /* properties_destroyed */
11067 0, /* todo_flags_start */
11068 0, /* todo_flags_finish */
11071 class pass_lower_omp
: public gimple_opt_pass
11074 pass_lower_omp (gcc::context
*ctxt
)
11075 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
11078 /* opt_pass methods: */
11079 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
11081 }; // class pass_lower_omp
11083 } // anon namespace
11086 make_pass_lower_omp (gcc::context
*ctxt
)
11088 return new pass_lower_omp (ctxt
);
11091 /* The following is a utility to diagnose structured block violations.
11092 It is not part of the "omplower" pass, as that's invoked too late. It
11093 should be invoked by the respective front ends after gimplification. */
11095 static splay_tree all_labels
;
11097 /* Check for mismatched contexts and generate an error if needed. Return
11098 true if an error is detected. */
11101 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
11102 gimple
*branch_ctx
, gimple
*label_ctx
)
11104 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
11105 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
11107 if (label_ctx
== branch_ctx
)
11110 const char* kind
= NULL
;
11114 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
11115 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
11117 gcc_checking_assert (kind
== NULL
);
11123 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
11127 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
11128 so we could traverse it and issue a correct "exit" or "enter" error
11129 message upon a structured block violation.
11131 We built the context by building a list with tree_cons'ing, but there is
11132 no easy counterpart in gimple tuples. It seems like far too much work
11133 for issuing exit/enter error messages. If someone really misses the
11134 distinct error message... patches welcome. */
11137 /* Try to avoid confusing the user by producing and error message
11138 with correct "exit" or "enter" verbiage. We prefer "exit"
11139 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
11140 if (branch_ctx
== NULL
)
11146 if (TREE_VALUE (label_ctx
) == branch_ctx
)
11151 label_ctx
= TREE_CHAIN (label_ctx
);
11156 error ("invalid exit from %s structured block", kind
);
11158 error ("invalid entry to %s structured block", kind
);
11161 /* If it's obvious we have an invalid entry, be specific about the error. */
11162 if (branch_ctx
== NULL
)
11163 error ("invalid entry to %s structured block", kind
);
11166 /* Otherwise, be vague and lazy, but efficient. */
11167 error ("invalid branch to/from %s structured block", kind
);
11170 gsi_replace (gsi_p
, gimple_build_nop (), false);
11174 /* Pass 1: Create a minimal tree of structured blocks, and record
11175 where each label is found. */
11178 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
11179 struct walk_stmt_info
*wi
)
11181 gimple
*context
= (gimple
*) wi
->info
;
11182 gimple
*inner_context
;
11183 gimple
*stmt
= gsi_stmt (*gsi_p
);
11185 *handled_ops_p
= true;
11187 switch (gimple_code (stmt
))
11191 case GIMPLE_OMP_PARALLEL
:
11192 case GIMPLE_OMP_TASK
:
11193 case GIMPLE_OMP_SECTIONS
:
11194 case GIMPLE_OMP_SINGLE
:
11195 case GIMPLE_OMP_SECTION
:
11196 case GIMPLE_OMP_MASTER
:
11197 case GIMPLE_OMP_ORDERED
:
11198 case GIMPLE_OMP_CRITICAL
:
11199 case GIMPLE_OMP_TARGET
:
11200 case GIMPLE_OMP_TEAMS
:
11201 case GIMPLE_OMP_TASKGROUP
:
11202 /* The minimal context here is just the current OMP construct. */
11203 inner_context
= stmt
;
11204 wi
->info
= inner_context
;
11205 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
11206 wi
->info
= context
;
11209 case GIMPLE_OMP_FOR
:
11210 inner_context
= stmt
;
11211 wi
->info
= inner_context
;
11212 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
11214 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
11215 diagnose_sb_1
, NULL
, wi
);
11216 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
11217 wi
->info
= context
;
11221 splay_tree_insert (all_labels
,
11222 (splay_tree_key
) gimple_label_label (
11223 as_a
<glabel
*> (stmt
)),
11224 (splay_tree_value
) context
);
11234 /* Pass 2: Check each branch and see if its context differs from that of
11235 the destination label's context. */
11238 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
11239 struct walk_stmt_info
*wi
)
11241 gimple
*context
= (gimple
*) wi
->info
;
11243 gimple
*stmt
= gsi_stmt (*gsi_p
);
11245 *handled_ops_p
= true;
11247 switch (gimple_code (stmt
))
11251 case GIMPLE_OMP_PARALLEL
:
11252 case GIMPLE_OMP_TASK
:
11253 case GIMPLE_OMP_SECTIONS
:
11254 case GIMPLE_OMP_SINGLE
:
11255 case GIMPLE_OMP_SECTION
:
11256 case GIMPLE_OMP_MASTER
:
11257 case GIMPLE_OMP_ORDERED
:
11258 case GIMPLE_OMP_CRITICAL
:
11259 case GIMPLE_OMP_TARGET
:
11260 case GIMPLE_OMP_TEAMS
:
11261 case GIMPLE_OMP_TASKGROUP
:
11263 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
11264 wi
->info
= context
;
11267 case GIMPLE_OMP_FOR
:
11269 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
11271 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
11272 diagnose_sb_2
, NULL
, wi
);
11273 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
11274 wi
->info
= context
;
11279 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
11280 tree lab
= gimple_cond_true_label (cond_stmt
);
11283 n
= splay_tree_lookup (all_labels
,
11284 (splay_tree_key
) lab
);
11285 diagnose_sb_0 (gsi_p
, context
,
11286 n
? (gimple
*) n
->value
: NULL
);
11288 lab
= gimple_cond_false_label (cond_stmt
);
11291 n
= splay_tree_lookup (all_labels
,
11292 (splay_tree_key
) lab
);
11293 diagnose_sb_0 (gsi_p
, context
,
11294 n
? (gimple
*) n
->value
: NULL
);
11301 tree lab
= gimple_goto_dest (stmt
);
11302 if (TREE_CODE (lab
) != LABEL_DECL
)
11305 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
11306 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
11310 case GIMPLE_SWITCH
:
11312 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
11314 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
11316 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
11317 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
11318 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
11324 case GIMPLE_RETURN
:
11325 diagnose_sb_0 (gsi_p
, context
, NULL
);
11335 static unsigned int
11336 diagnose_omp_structured_block_errors (void)
11338 struct walk_stmt_info wi
;
11339 gimple_seq body
= gimple_body (current_function_decl
);
11341 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
11343 memset (&wi
, 0, sizeof (wi
));
11344 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
11346 memset (&wi
, 0, sizeof (wi
));
11347 wi
.want_locations
= true;
11348 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
11350 gimple_set_body (current_function_decl
, body
);
11352 splay_tree_delete (all_labels
);
11360 const pass_data pass_data_diagnose_omp_blocks
=
11362 GIMPLE_PASS
, /* type */
11363 "*diagnose_omp_blocks", /* name */
11364 OPTGROUP_OMP
, /* optinfo_flags */
11365 TV_NONE
, /* tv_id */
11366 PROP_gimple_any
, /* properties_required */
11367 0, /* properties_provided */
11368 0, /* properties_destroyed */
11369 0, /* todo_flags_start */
11370 0, /* todo_flags_finish */
11373 class pass_diagnose_omp_blocks
: public gimple_opt_pass
11376 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
11377 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
11380 /* opt_pass methods: */
11381 virtual bool gate (function
*)
11383 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
11385 virtual unsigned int execute (function
*)
11387 return diagnose_omp_structured_block_errors ();
11390 }; // class pass_diagnose_omp_blocks
11392 } // anon namespace
11395 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
11397 return new pass_diagnose_omp_blocks (ctxt
);
11401 #include "gt-omp-low.h"