1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* And a hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* Nesting depth of this context. Used to beautify error messages re
127 invalid gotos. The outermost ctx is depth 1, with depth 0 being
128 reserved for the main body of the function. */
131 /* True if this parallel directive is nested within another. */
134 /* True if this construct can be cancelled. */
138 static splay_tree all_contexts
;
139 static int taskreg_nesting_level
;
140 static int target_nesting_level
;
141 static bitmap task_shared_vars
;
142 static vec
<omp_context
*> taskreg_contexts
;
144 static void scan_omp (gimple_seq
*, omp_context
*);
145 static tree
scan_omp_1_op (tree
*, int *, void *);
147 #define WALK_SUBSTMTS \
151 case GIMPLE_EH_FILTER: \
152 case GIMPLE_TRANSACTION: \
153 /* The sub-statements for these should be walked. */ \
154 *handled_ops_p = false; \
157 /* Return true if CTX corresponds to an oacc parallel region. */
160 is_oacc_parallel (omp_context
*ctx
)
162 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
163 return ((outer_type
== GIMPLE_OMP_TARGET
)
164 && (gimple_omp_target_kind (ctx
->stmt
)
165 == GF_OMP_TARGET_KIND_OACC_PARALLEL
));
168 /* Return true if CTX corresponds to an oacc kernels region. */
171 is_oacc_kernels (omp_context
*ctx
)
173 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
174 return ((outer_type
== GIMPLE_OMP_TARGET
)
175 && (gimple_omp_target_kind (ctx
->stmt
)
176 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
179 /* If DECL is the artificial dummy VAR_DECL created for non-static
180 data member privatization, return the underlying "this" parameter,
181 otherwise return NULL. */
184 omp_member_access_dummy_var (tree decl
)
187 || !DECL_ARTIFICIAL (decl
)
188 || !DECL_IGNORED_P (decl
)
189 || !DECL_HAS_VALUE_EXPR_P (decl
)
190 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
193 tree v
= DECL_VALUE_EXPR (decl
);
194 if (TREE_CODE (v
) != COMPONENT_REF
)
198 switch (TREE_CODE (v
))
204 case POINTER_PLUS_EXPR
:
205 v
= TREE_OPERAND (v
, 0);
208 if (DECL_CONTEXT (v
) == current_function_decl
209 && DECL_ARTIFICIAL (v
)
210 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
218 /* Helper for unshare_and_remap, called through walk_tree. */
221 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
223 tree
*pair
= (tree
*) data
;
226 *tp
= unshare_expr (pair
[1]);
229 else if (IS_TYPE_OR_DECL_P (*tp
))
234 /* Return unshare_expr (X) with all occurrences of FROM
238 unshare_and_remap (tree x
, tree from
, tree to
)
240 tree pair
[2] = { from
, to
};
241 x
= unshare_expr (x
);
242 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
246 /* Convenience function for calling scan_omp_1_op on tree operands. */
249 scan_omp_op (tree
*tp
, omp_context
*ctx
)
251 struct walk_stmt_info wi
;
253 memset (&wi
, 0, sizeof (wi
));
255 wi
.want_locations
= true;
257 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
260 static void lower_omp (gimple_seq
*, omp_context
*);
261 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
262 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
264 /* Return true if CTX is for an omp parallel. */
267 is_parallel_ctx (omp_context
*ctx
)
269 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
273 /* Return true if CTX is for an omp task. */
276 is_task_ctx (omp_context
*ctx
)
278 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
282 /* Return true if CTX is for an omp taskloop. */
285 is_taskloop_ctx (omp_context
*ctx
)
287 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
288 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
292 /* Return true if CTX is for a host omp teams. */
295 is_host_teams_ctx (omp_context
*ctx
)
297 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
298 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
301 /* Return true if CTX is for an omp parallel or omp task or host omp teams
302 (the last one is strictly not a task region in OpenMP speak, but we
303 need to treat it similarly). */
306 is_taskreg_ctx (omp_context
*ctx
)
308 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
311 /* Return true if EXPR is variable sized. */
314 is_variable_sized (const_tree expr
)
316 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
319 /* Lookup variables. The "maybe" form
320 allows for the variable form to not have been entered, otherwise we
321 assert that the variable must have been entered. */
324 lookup_decl (tree var
, omp_context
*ctx
)
326 tree
*n
= ctx
->cb
.decl_map
->get (var
);
331 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
333 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
334 return n
? *n
: NULL_TREE
;
338 lookup_field (tree var
, omp_context
*ctx
)
341 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
342 return (tree
) n
->value
;
346 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
349 n
= splay_tree_lookup (ctx
->sfield_map
350 ? ctx
->sfield_map
: ctx
->field_map
, key
);
351 return (tree
) n
->value
;
355 lookup_sfield (tree var
, omp_context
*ctx
)
357 return lookup_sfield ((splay_tree_key
) var
, ctx
);
361 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
364 n
= splay_tree_lookup (ctx
->field_map
, key
);
365 return n
? (tree
) n
->value
: NULL_TREE
;
369 maybe_lookup_field (tree var
, omp_context
*ctx
)
371 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
374 /* Return true if DECL should be copied by pointer. SHARED_CTX is
375 the parallel context if DECL is to be shared. */
378 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
380 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
381 || TYPE_ATOMIC (TREE_TYPE (decl
)))
384 /* We can only use copy-in/copy-out semantics for shared variables
385 when we know the value is not accessible from an outer scope. */
388 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
390 /* ??? Trivially accessible from anywhere. But why would we even
391 be passing an address in this case? Should we simply assert
392 this to be false, or should we have a cleanup pass that removes
393 these from the list of mappings? */
394 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
397 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
398 without analyzing the expression whether or not its location
399 is accessible to anyone else. In the case of nested parallel
400 regions it certainly may be. */
401 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
404 /* Do not use copy-in/copy-out for variables that have their
406 if (TREE_ADDRESSABLE (decl
))
409 /* lower_send_shared_vars only uses copy-in, but not copy-out
411 if (TREE_READONLY (decl
)
412 || ((TREE_CODE (decl
) == RESULT_DECL
413 || TREE_CODE (decl
) == PARM_DECL
)
414 && DECL_BY_REFERENCE (decl
)))
417 /* Disallow copy-in/out in nested parallel if
418 decl is shared in outer parallel, otherwise
419 each thread could store the shared variable
420 in its own copy-in location, making the
421 variable no longer really shared. */
422 if (shared_ctx
->is_nested
)
426 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
427 if (is_taskreg_ctx (up
) && maybe_lookup_decl (decl
, up
))
434 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
435 c
; c
= OMP_CLAUSE_CHAIN (c
))
436 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
437 && OMP_CLAUSE_DECL (c
) == decl
)
441 goto maybe_mark_addressable_and_ret
;
445 /* For tasks avoid using copy-in/out. As tasks can be
446 deferred or executed in different thread, when GOMP_task
447 returns, the task hasn't necessarily terminated. */
448 if (is_task_ctx (shared_ctx
))
451 maybe_mark_addressable_and_ret
:
452 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
453 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
455 /* Taking address of OUTER in lower_send_shared_vars
456 might need regimplification of everything that uses the
458 if (!task_shared_vars
)
459 task_shared_vars
= BITMAP_ALLOC (NULL
);
460 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
461 TREE_ADDRESSABLE (outer
) = 1;
470 /* Construct a new automatic decl similar to VAR. */
473 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
475 tree copy
= copy_var_decl (var
, name
, type
);
477 DECL_CONTEXT (copy
) = current_function_decl
;
478 DECL_CHAIN (copy
) = ctx
->block_vars
;
479 /* If VAR is listed in task_shared_vars, it means it wasn't
480 originally addressable and is just because task needs to take
481 it's address. But we don't need to take address of privatizations
483 if (TREE_ADDRESSABLE (var
)
485 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
486 TREE_ADDRESSABLE (copy
) = 0;
487 ctx
->block_vars
= copy
;
493 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
495 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
498 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
501 omp_build_component_ref (tree obj
, tree field
)
503 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
504 if (TREE_THIS_VOLATILE (field
))
505 TREE_THIS_VOLATILE (ret
) |= 1;
506 if (TREE_READONLY (field
))
507 TREE_READONLY (ret
) |= 1;
511 /* Build tree nodes to access the field for VAR on the receiver side. */
514 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
516 tree x
, field
= lookup_field (var
, ctx
);
518 /* If the receiver record type was remapped in the child function,
519 remap the field into the new record type. */
520 x
= maybe_lookup_field (field
, ctx
);
524 x
= build_simple_mem_ref (ctx
->receiver_decl
);
525 TREE_THIS_NOTRAP (x
) = 1;
526 x
= omp_build_component_ref (x
, field
);
529 x
= build_simple_mem_ref (x
);
530 TREE_THIS_NOTRAP (x
) = 1;
536 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
537 of a parallel, this is a component reference; for workshare constructs
538 this is some variable. */
541 build_outer_var_ref (tree var
, omp_context
*ctx
,
542 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
545 omp_context
*outer
= ctx
->outer
;
546 while (outer
&& gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
547 outer
= outer
->outer
;
549 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
551 else if (is_variable_sized (var
))
553 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
554 x
= build_outer_var_ref (x
, ctx
, code
);
555 x
= build_simple_mem_ref (x
);
557 else if (is_taskreg_ctx (ctx
))
559 bool by_ref
= use_pointer_for_field (var
, NULL
);
560 x
= build_receiver_ref (var
, by_ref
, ctx
);
562 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
563 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
564 || (code
== OMP_CLAUSE_PRIVATE
565 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
566 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
567 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
569 /* #pragma omp simd isn't a worksharing construct, and can reference
570 even private vars in its linear etc. clauses.
571 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
572 to private vars in all worksharing constructs. */
574 if (outer
&& is_taskreg_ctx (outer
))
575 x
= lookup_decl (var
, outer
);
577 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
581 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
585 = splay_tree_lookup (outer
->field_map
,
586 (splay_tree_key
) &DECL_UID (var
));
589 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
592 x
= lookup_decl (var
, outer
);
596 tree field
= (tree
) n
->value
;
597 /* If the receiver record type was remapped in the child function,
598 remap the field into the new record type. */
599 x
= maybe_lookup_field (field
, outer
);
603 x
= build_simple_mem_ref (outer
->receiver_decl
);
604 x
= omp_build_component_ref (x
, field
);
605 if (use_pointer_for_field (var
, outer
))
606 x
= build_simple_mem_ref (x
);
611 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_GRID_BODY
)
613 outer
= outer
->outer
;
615 && gimple_code (outer
->stmt
) != GIMPLE_OMP_GRID_BODY
);
617 x
= lookup_decl (var
, outer
);
619 else if (omp_is_reference (var
))
620 /* This can happen with orphaned constructs. If var is reference, it is
621 possible it is shared and as such valid. */
623 else if (omp_member_access_dummy_var (var
))
630 tree t
= omp_member_access_dummy_var (var
);
633 x
= DECL_VALUE_EXPR (var
);
634 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
636 x
= unshare_and_remap (x
, t
, o
);
638 x
= unshare_expr (x
);
642 if (omp_is_reference (var
))
643 x
= build_simple_mem_ref (x
);
648 /* Build tree nodes to access the field for VAR on the sender side. */
651 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
653 tree field
= lookup_sfield (key
, ctx
);
654 return omp_build_component_ref (ctx
->sender_decl
, field
);
658 build_sender_ref (tree var
, omp_context
*ctx
)
660 return build_sender_ref ((splay_tree_key
) var
, ctx
);
663 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
664 BASE_POINTERS_RESTRICT, declare the field with restrict. */
667 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
669 tree field
, type
, sfield
= NULL_TREE
;
670 splay_tree_key key
= (splay_tree_key
) var
;
674 key
= (splay_tree_key
) &DECL_UID (var
);
675 gcc_checking_assert (key
!= (splay_tree_key
) var
);
677 gcc_assert ((mask
& 1) == 0
678 || !splay_tree_lookup (ctx
->field_map
, key
));
679 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
680 || !splay_tree_lookup (ctx
->sfield_map
, key
));
681 gcc_assert ((mask
& 3) == 3
682 || !is_gimple_omp_oacc (ctx
->stmt
));
684 type
= TREE_TYPE (var
);
685 /* Prevent redeclaring the var in the split-off function with a restrict
686 pointer type. Note that we only clear type itself, restrict qualifiers in
687 the pointed-to type will be ignored by points-to analysis. */
688 if (POINTER_TYPE_P (type
)
689 && TYPE_RESTRICT (type
))
690 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
694 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
695 type
= build_pointer_type (build_pointer_type (type
));
698 type
= build_pointer_type (type
);
699 else if ((mask
& 3) == 1 && omp_is_reference (var
))
700 type
= TREE_TYPE (type
);
702 field
= build_decl (DECL_SOURCE_LOCATION (var
),
703 FIELD_DECL
, DECL_NAME (var
), type
);
705 /* Remember what variable this field was created for. This does have a
706 side effect of making dwarf2out ignore this member, so for helpful
707 debugging we clear it later in delete_omp_context. */
708 DECL_ABSTRACT_ORIGIN (field
) = var
;
709 if (type
== TREE_TYPE (var
))
711 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
712 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
713 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
716 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
720 insert_field_into_struct (ctx
->record_type
, field
);
721 if (ctx
->srecord_type
)
723 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
724 FIELD_DECL
, DECL_NAME (var
), type
);
725 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
726 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
727 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
728 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
729 insert_field_into_struct (ctx
->srecord_type
, sfield
);
734 if (ctx
->srecord_type
== NULL_TREE
)
738 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
739 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
740 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
742 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
743 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
744 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
745 insert_field_into_struct (ctx
->srecord_type
, sfield
);
746 splay_tree_insert (ctx
->sfield_map
,
747 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
748 (splay_tree_value
) sfield
);
752 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
753 : ctx
->srecord_type
, field
);
757 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
758 if ((mask
& 2) && ctx
->sfield_map
)
759 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
763 install_var_local (tree var
, omp_context
*ctx
)
765 tree new_var
= omp_copy_decl_1 (var
, ctx
);
766 insert_decl_map (&ctx
->cb
, var
, new_var
);
770 /* Adjust the replacement for DECL in CTX for the new context. This means
771 copying the DECL_VALUE_EXPR, and fixing up the type. */
774 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
778 new_decl
= lookup_decl (decl
, ctx
);
780 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
782 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
783 && DECL_HAS_VALUE_EXPR_P (decl
))
785 tree ve
= DECL_VALUE_EXPR (decl
);
786 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
787 SET_DECL_VALUE_EXPR (new_decl
, ve
);
788 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
791 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
793 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
794 if (size
== error_mark_node
)
795 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
796 DECL_SIZE (new_decl
) = size
;
798 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
799 if (size
== error_mark_node
)
800 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
801 DECL_SIZE_UNIT (new_decl
) = size
;
805 /* The callback for remap_decl. Search all containing contexts for a
806 mapping of the variable; this avoids having to duplicate the splay
807 tree ahead of time. We know a mapping doesn't already exist in the
808 given context. Create new mappings to implement default semantics. */
811 omp_copy_decl (tree var
, copy_body_data
*cb
)
813 omp_context
*ctx
= (omp_context
*) cb
;
816 if (TREE_CODE (var
) == LABEL_DECL
)
818 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
820 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
821 DECL_CONTEXT (new_var
) = current_function_decl
;
822 insert_decl_map (&ctx
->cb
, var
, new_var
);
826 while (!is_taskreg_ctx (ctx
))
831 new_var
= maybe_lookup_decl (var
, ctx
);
836 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
839 return error_mark_node
;
842 /* Create a new context, with OUTER_CTX being the surrounding context. */
845 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
847 omp_context
*ctx
= XCNEW (omp_context
);
849 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
850 (splay_tree_value
) ctx
);
855 ctx
->outer
= outer_ctx
;
856 ctx
->cb
= outer_ctx
->cb
;
857 ctx
->cb
.block
= NULL
;
858 ctx
->depth
= outer_ctx
->depth
+ 1;
862 ctx
->cb
.src_fn
= current_function_decl
;
863 ctx
->cb
.dst_fn
= current_function_decl
;
864 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
865 gcc_checking_assert (ctx
->cb
.src_node
);
866 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
867 ctx
->cb
.src_cfun
= cfun
;
868 ctx
->cb
.copy_decl
= omp_copy_decl
;
869 ctx
->cb
.eh_lp_nr
= 0;
870 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
871 ctx
->cb
.adjust_array_error_bounds
= true;
872 ctx
->cb
.dont_remap_vla_if_no_change
= true;
876 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
881 static gimple_seq
maybe_catch_exception (gimple_seq
);
883 /* Finalize task copyfn. */
886 finalize_task_copyfn (gomp_task
*task_stmt
)
888 struct function
*child_cfun
;
890 gimple_seq seq
= NULL
, new_seq
;
893 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
894 if (child_fn
== NULL_TREE
)
897 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
898 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
900 push_cfun (child_cfun
);
901 bind
= gimplify_body (child_fn
, false);
902 gimple_seq_add_stmt (&seq
, bind
);
903 new_seq
= maybe_catch_exception (seq
);
906 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
908 gimple_seq_add_stmt (&seq
, bind
);
910 gimple_set_body (child_fn
, seq
);
913 /* Inform the callgraph about the new function. */
914 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
915 node
->parallelized_function
= 1;
916 cgraph_node::add_new_function (child_fn
, false);
919 /* Destroy a omp_context data structures. Called through the splay tree
920 value delete callback. */
923 delete_omp_context (splay_tree_value value
)
925 omp_context
*ctx
= (omp_context
*) value
;
927 delete ctx
->cb
.decl_map
;
930 splay_tree_delete (ctx
->field_map
);
932 splay_tree_delete (ctx
->sfield_map
);
934 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
935 it produces corrupt debug information. */
936 if (ctx
->record_type
)
939 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
940 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
942 if (ctx
->srecord_type
)
945 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
946 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
949 if (is_task_ctx (ctx
))
950 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
952 if (ctx
->task_reduction_map
)
954 ctx
->task_reductions
.release ();
955 delete ctx
->task_reduction_map
;
961 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
965 fixup_child_record_type (omp_context
*ctx
)
967 tree f
, type
= ctx
->record_type
;
969 if (!ctx
->receiver_decl
)
971 /* ??? It isn't sufficient to just call remap_type here, because
972 variably_modified_type_p doesn't work the way we expect for
973 record types. Testing each field for whether it needs remapping
974 and creating a new record by hand works, however. */
975 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
976 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
980 tree name
, new_fields
= NULL
;
982 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
983 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
984 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
985 TYPE_DECL
, name
, type
);
986 TYPE_NAME (type
) = name
;
988 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
990 tree new_f
= copy_node (f
);
991 DECL_CONTEXT (new_f
) = type
;
992 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
993 DECL_CHAIN (new_f
) = new_fields
;
994 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
995 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
997 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1001 /* Arrange to be able to look up the receiver field
1002 given the sender field. */
1003 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1004 (splay_tree_value
) new_f
);
1006 TYPE_FIELDS (type
) = nreverse (new_fields
);
1010 /* In a target region we never modify any of the pointers in *.omp_data_i,
1011 so attempt to help the optimizers. */
1012 if (is_gimple_omp_offloaded (ctx
->stmt
))
1013 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1015 TREE_TYPE (ctx
->receiver_decl
)
1016 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1019 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1020 specified by CLAUSES. */
1023 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1026 bool scan_array_reductions
= false;
1028 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1032 switch (OMP_CLAUSE_CODE (c
))
1034 case OMP_CLAUSE_PRIVATE
:
1035 decl
= OMP_CLAUSE_DECL (c
);
1036 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1038 else if (!is_variable_sized (decl
))
1039 install_var_local (decl
, ctx
);
1042 case OMP_CLAUSE_SHARED
:
1043 decl
= OMP_CLAUSE_DECL (c
);
1044 /* Ignore shared directives in teams construct inside of
1045 target construct. */
1046 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1047 && !is_host_teams_ctx (ctx
))
1049 /* Global variables don't need to be copied,
1050 the receiver side will use them directly. */
1051 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1052 if (is_global_var (odecl
))
1054 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1057 gcc_assert (is_taskreg_ctx (ctx
));
1058 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1059 || !is_variable_sized (decl
));
1060 /* Global variables don't need to be copied,
1061 the receiver side will use them directly. */
1062 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1064 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1066 use_pointer_for_field (decl
, ctx
);
1069 by_ref
= use_pointer_for_field (decl
, NULL
);
1070 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1071 || TREE_ADDRESSABLE (decl
)
1073 || omp_is_reference (decl
))
1075 by_ref
= use_pointer_for_field (decl
, ctx
);
1076 install_var_field (decl
, by_ref
, 3, ctx
);
1077 install_var_local (decl
, ctx
);
1080 /* We don't need to copy const scalar vars back. */
1081 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1084 case OMP_CLAUSE_REDUCTION
:
1085 case OMP_CLAUSE_IN_REDUCTION
:
1086 decl
= OMP_CLAUSE_DECL (c
);
1087 if (TREE_CODE (decl
) == MEM_REF
)
1089 tree t
= TREE_OPERAND (decl
, 0);
1090 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1091 t
= TREE_OPERAND (t
, 0);
1092 if (TREE_CODE (t
) == INDIRECT_REF
1093 || TREE_CODE (t
) == ADDR_EXPR
)
1094 t
= TREE_OPERAND (t
, 0);
1095 install_var_local (t
, ctx
);
1096 if (is_taskreg_ctx (ctx
)
1097 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1098 || (is_task_ctx (ctx
)
1099 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1100 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1101 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1102 == POINTER_TYPE
)))))
1103 && !is_variable_sized (t
)
1104 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1105 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1106 && !is_task_ctx (ctx
))))
1108 by_ref
= use_pointer_for_field (t
, NULL
);
1109 if (is_task_ctx (ctx
)
1110 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1111 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1113 install_var_field (t
, false, 1, ctx
);
1114 install_var_field (t
, by_ref
, 2, ctx
);
1117 install_var_field (t
, by_ref
, 3, ctx
);
1121 if (is_task_ctx (ctx
)
1122 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1123 && OMP_CLAUSE_REDUCTION_TASK (c
)
1124 && is_parallel_ctx (ctx
)))
1126 /* Global variables don't need to be copied,
1127 the receiver side will use them directly. */
1128 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1130 by_ref
= use_pointer_for_field (decl
, ctx
);
1131 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1132 install_var_field (decl
, by_ref
, 3, ctx
);
1134 install_var_local (decl
, ctx
);
1137 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1138 && OMP_CLAUSE_REDUCTION_TASK (c
))
1140 install_var_local (decl
, ctx
);
1145 case OMP_CLAUSE_LASTPRIVATE
:
1146 /* Let the corresponding firstprivate clause create
1148 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1152 case OMP_CLAUSE_FIRSTPRIVATE
:
1153 case OMP_CLAUSE_LINEAR
:
1154 decl
= OMP_CLAUSE_DECL (c
);
1156 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1157 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1158 && is_gimple_omp_offloaded (ctx
->stmt
))
1160 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1161 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1162 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1163 install_var_field (decl
, true, 3, ctx
);
1165 install_var_field (decl
, false, 3, ctx
);
1167 if (is_variable_sized (decl
))
1169 if (is_task_ctx (ctx
))
1170 install_var_field (decl
, false, 1, ctx
);
1173 else if (is_taskreg_ctx (ctx
))
1176 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1177 by_ref
= use_pointer_for_field (decl
, NULL
);
1179 if (is_task_ctx (ctx
)
1180 && (global
|| by_ref
|| omp_is_reference (decl
)))
1182 install_var_field (decl
, false, 1, ctx
);
1184 install_var_field (decl
, by_ref
, 2, ctx
);
1187 install_var_field (decl
, by_ref
, 3, ctx
);
1189 install_var_local (decl
, ctx
);
1192 case OMP_CLAUSE_USE_DEVICE_PTR
:
1193 decl
= OMP_CLAUSE_DECL (c
);
1194 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1195 install_var_field (decl
, true, 3, ctx
);
1197 install_var_field (decl
, false, 3, ctx
);
1198 if (DECL_SIZE (decl
)
1199 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1201 tree decl2
= DECL_VALUE_EXPR (decl
);
1202 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1203 decl2
= TREE_OPERAND (decl2
, 0);
1204 gcc_assert (DECL_P (decl2
));
1205 install_var_local (decl2
, ctx
);
1207 install_var_local (decl
, ctx
);
1210 case OMP_CLAUSE_IS_DEVICE_PTR
:
1211 decl
= OMP_CLAUSE_DECL (c
);
1214 case OMP_CLAUSE__LOOPTEMP_
:
1215 case OMP_CLAUSE__REDUCTEMP_
:
1216 gcc_assert (is_taskreg_ctx (ctx
));
1217 decl
= OMP_CLAUSE_DECL (c
);
1218 install_var_field (decl
, false, 3, ctx
);
1219 install_var_local (decl
, ctx
);
1222 case OMP_CLAUSE_COPYPRIVATE
:
1223 case OMP_CLAUSE_COPYIN
:
1224 decl
= OMP_CLAUSE_DECL (c
);
1225 by_ref
= use_pointer_for_field (decl
, NULL
);
1226 install_var_field (decl
, by_ref
, 3, ctx
);
1229 case OMP_CLAUSE_FINAL
:
1231 case OMP_CLAUSE_NUM_THREADS
:
1232 case OMP_CLAUSE_NUM_TEAMS
:
1233 case OMP_CLAUSE_THREAD_LIMIT
:
1234 case OMP_CLAUSE_DEVICE
:
1235 case OMP_CLAUSE_SCHEDULE
:
1236 case OMP_CLAUSE_DIST_SCHEDULE
:
1237 case OMP_CLAUSE_DEPEND
:
1238 case OMP_CLAUSE_PRIORITY
:
1239 case OMP_CLAUSE_GRAINSIZE
:
1240 case OMP_CLAUSE_NUM_TASKS
:
1241 case OMP_CLAUSE_NUM_GANGS
:
1242 case OMP_CLAUSE_NUM_WORKERS
:
1243 case OMP_CLAUSE_VECTOR_LENGTH
:
1245 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1249 case OMP_CLAUSE_FROM
:
1250 case OMP_CLAUSE_MAP
:
1252 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1253 decl
= OMP_CLAUSE_DECL (c
);
1254 /* Global variables with "omp declare target" attribute
1255 don't need to be copied, the receiver side will use them
1256 directly. However, global variables with "omp declare target link"
1257 attribute need to be copied. Or when ALWAYS modifier is used. */
1258 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1260 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1261 && (OMP_CLAUSE_MAP_KIND (c
)
1262 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1263 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1264 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1265 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1266 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1267 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1268 && varpool_node::get_create (decl
)->offloadable
1269 && !lookup_attribute ("omp declare target link",
1270 DECL_ATTRIBUTES (decl
)))
1272 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1273 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1275 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1276 not offloaded; there is nothing to map for those. */
1277 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1278 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1279 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1282 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1283 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1284 || (OMP_CLAUSE_MAP_KIND (c
)
1285 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1287 if (TREE_CODE (decl
) == COMPONENT_REF
1288 || (TREE_CODE (decl
) == INDIRECT_REF
1289 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1290 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1291 == REFERENCE_TYPE
)))
1293 if (DECL_SIZE (decl
)
1294 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1296 tree decl2
= DECL_VALUE_EXPR (decl
);
1297 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1298 decl2
= TREE_OPERAND (decl2
, 0);
1299 gcc_assert (DECL_P (decl2
));
1300 install_var_local (decl2
, ctx
);
1302 install_var_local (decl
, ctx
);
1307 if (DECL_SIZE (decl
)
1308 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1310 tree decl2
= DECL_VALUE_EXPR (decl
);
1311 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1312 decl2
= TREE_OPERAND (decl2
, 0);
1313 gcc_assert (DECL_P (decl2
));
1314 install_var_field (decl2
, true, 3, ctx
);
1315 install_var_local (decl2
, ctx
);
1316 install_var_local (decl
, ctx
);
1320 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1321 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1322 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1323 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1324 install_var_field (decl
, true, 7, ctx
);
1326 install_var_field (decl
, true, 3, ctx
);
1327 if (is_gimple_omp_offloaded (ctx
->stmt
)
1328 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1329 install_var_local (decl
, ctx
);
1334 tree base
= get_base_address (decl
);
1335 tree nc
= OMP_CLAUSE_CHAIN (c
);
1338 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1339 && OMP_CLAUSE_DECL (nc
) == base
1340 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1341 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1343 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1344 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1350 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1351 decl
= OMP_CLAUSE_DECL (c
);
1353 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1354 (splay_tree_key
) decl
));
1356 = build_decl (OMP_CLAUSE_LOCATION (c
),
1357 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1358 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1359 insert_field_into_struct (ctx
->record_type
, field
);
1360 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1361 (splay_tree_value
) field
);
1366 case OMP_CLAUSE__GRIDDIM_
:
1369 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c
), ctx
->outer
);
1370 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c
), ctx
->outer
);
1374 case OMP_CLAUSE_NOWAIT
:
1375 case OMP_CLAUSE_ORDERED
:
1376 case OMP_CLAUSE_COLLAPSE
:
1377 case OMP_CLAUSE_UNTIED
:
1378 case OMP_CLAUSE_MERGEABLE
:
1379 case OMP_CLAUSE_PROC_BIND
:
1380 case OMP_CLAUSE_SAFELEN
:
1381 case OMP_CLAUSE_SIMDLEN
:
1382 case OMP_CLAUSE_THREADS
:
1383 case OMP_CLAUSE_SIMD
:
1384 case OMP_CLAUSE_NOGROUP
:
1385 case OMP_CLAUSE_DEFAULTMAP
:
1386 case OMP_CLAUSE_ASYNC
:
1387 case OMP_CLAUSE_WAIT
:
1388 case OMP_CLAUSE_GANG
:
1389 case OMP_CLAUSE_WORKER
:
1390 case OMP_CLAUSE_VECTOR
:
1391 case OMP_CLAUSE_INDEPENDENT
:
1392 case OMP_CLAUSE_AUTO
:
1393 case OMP_CLAUSE_SEQ
:
1394 case OMP_CLAUSE_TILE
:
1395 case OMP_CLAUSE__SIMT_
:
1396 case OMP_CLAUSE_DEFAULT
:
1397 case OMP_CLAUSE_NONTEMPORAL
:
1398 case OMP_CLAUSE_IF_PRESENT
:
1399 case OMP_CLAUSE_FINALIZE
:
1400 case OMP_CLAUSE_TASK_REDUCTION
:
1403 case OMP_CLAUSE_ALIGNED
:
1404 decl
= OMP_CLAUSE_DECL (c
);
1405 if (is_global_var (decl
)
1406 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1407 install_var_local (decl
, ctx
);
1410 case OMP_CLAUSE__CACHE_
:
1416 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1418 switch (OMP_CLAUSE_CODE (c
))
1420 case OMP_CLAUSE_LASTPRIVATE
:
1421 /* Let the corresponding firstprivate clause create
1423 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1424 scan_array_reductions
= true;
1425 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1429 case OMP_CLAUSE_FIRSTPRIVATE
:
1430 case OMP_CLAUSE_PRIVATE
:
1431 case OMP_CLAUSE_LINEAR
:
1432 case OMP_CLAUSE_IS_DEVICE_PTR
:
1433 decl
= OMP_CLAUSE_DECL (c
);
1434 if (is_variable_sized (decl
))
1436 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1437 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1438 && is_gimple_omp_offloaded (ctx
->stmt
))
1440 tree decl2
= DECL_VALUE_EXPR (decl
);
1441 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1442 decl2
= TREE_OPERAND (decl2
, 0);
1443 gcc_assert (DECL_P (decl2
));
1444 install_var_local (decl2
, ctx
);
1445 fixup_remapped_decl (decl2
, ctx
, false);
1447 install_var_local (decl
, ctx
);
1449 fixup_remapped_decl (decl
, ctx
,
1450 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1451 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1452 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1453 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1454 scan_array_reductions
= true;
1457 case OMP_CLAUSE_REDUCTION
:
1458 case OMP_CLAUSE_IN_REDUCTION
:
1459 decl
= OMP_CLAUSE_DECL (c
);
1460 if (TREE_CODE (decl
) != MEM_REF
)
1462 if (is_variable_sized (decl
))
1463 install_var_local (decl
, ctx
);
1464 fixup_remapped_decl (decl
, ctx
, false);
1466 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1467 scan_array_reductions
= true;
1470 case OMP_CLAUSE_TASK_REDUCTION
:
1471 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1472 scan_array_reductions
= true;
1475 case OMP_CLAUSE_SHARED
:
1476 /* Ignore shared directives in teams construct inside of
1477 target construct. */
1478 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1479 && !is_host_teams_ctx (ctx
))
1481 decl
= OMP_CLAUSE_DECL (c
);
1482 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1484 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1486 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1489 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1490 install_var_field (decl
, by_ref
, 11, ctx
);
1493 fixup_remapped_decl (decl
, ctx
, false);
1496 case OMP_CLAUSE_MAP
:
1497 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1499 decl
= OMP_CLAUSE_DECL (c
);
1501 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1502 && (OMP_CLAUSE_MAP_KIND (c
)
1503 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1504 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1505 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1506 && varpool_node::get_create (decl
)->offloadable
)
1510 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1511 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1512 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1513 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1515 tree new_decl
= lookup_decl (decl
, ctx
);
1516 TREE_TYPE (new_decl
)
1517 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1519 else if (DECL_SIZE (decl
)
1520 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1522 tree decl2
= DECL_VALUE_EXPR (decl
);
1523 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1524 decl2
= TREE_OPERAND (decl2
, 0);
1525 gcc_assert (DECL_P (decl2
));
1526 fixup_remapped_decl (decl2
, ctx
, false);
1527 fixup_remapped_decl (decl
, ctx
, true);
1530 fixup_remapped_decl (decl
, ctx
, false);
1534 case OMP_CLAUSE_COPYPRIVATE
:
1535 case OMP_CLAUSE_COPYIN
:
1536 case OMP_CLAUSE_DEFAULT
:
1538 case OMP_CLAUSE_NUM_THREADS
:
1539 case OMP_CLAUSE_NUM_TEAMS
:
1540 case OMP_CLAUSE_THREAD_LIMIT
:
1541 case OMP_CLAUSE_DEVICE
:
1542 case OMP_CLAUSE_SCHEDULE
:
1543 case OMP_CLAUSE_DIST_SCHEDULE
:
1544 case OMP_CLAUSE_NOWAIT
:
1545 case OMP_CLAUSE_ORDERED
:
1546 case OMP_CLAUSE_COLLAPSE
:
1547 case OMP_CLAUSE_UNTIED
:
1548 case OMP_CLAUSE_FINAL
:
1549 case OMP_CLAUSE_MERGEABLE
:
1550 case OMP_CLAUSE_PROC_BIND
:
1551 case OMP_CLAUSE_SAFELEN
:
1552 case OMP_CLAUSE_SIMDLEN
:
1553 case OMP_CLAUSE_ALIGNED
:
1554 case OMP_CLAUSE_DEPEND
:
1555 case OMP_CLAUSE__LOOPTEMP_
:
1556 case OMP_CLAUSE__REDUCTEMP_
:
1558 case OMP_CLAUSE_FROM
:
1559 case OMP_CLAUSE_PRIORITY
:
1560 case OMP_CLAUSE_GRAINSIZE
:
1561 case OMP_CLAUSE_NUM_TASKS
:
1562 case OMP_CLAUSE_THREADS
:
1563 case OMP_CLAUSE_SIMD
:
1564 case OMP_CLAUSE_NOGROUP
:
1565 case OMP_CLAUSE_DEFAULTMAP
:
1566 case OMP_CLAUSE_USE_DEVICE_PTR
:
1567 case OMP_CLAUSE_NONTEMPORAL
:
1568 case OMP_CLAUSE_ASYNC
:
1569 case OMP_CLAUSE_WAIT
:
1570 case OMP_CLAUSE_NUM_GANGS
:
1571 case OMP_CLAUSE_NUM_WORKERS
:
1572 case OMP_CLAUSE_VECTOR_LENGTH
:
1573 case OMP_CLAUSE_GANG
:
1574 case OMP_CLAUSE_WORKER
:
1575 case OMP_CLAUSE_VECTOR
:
1576 case OMP_CLAUSE_INDEPENDENT
:
1577 case OMP_CLAUSE_AUTO
:
1578 case OMP_CLAUSE_SEQ
:
1579 case OMP_CLAUSE_TILE
:
1580 case OMP_CLAUSE__GRIDDIM_
:
1581 case OMP_CLAUSE__SIMT_
:
1582 case OMP_CLAUSE_IF_PRESENT
:
1583 case OMP_CLAUSE_FINALIZE
:
1586 case OMP_CLAUSE__CACHE_
:
1592 gcc_checking_assert (!scan_array_reductions
1593 || !is_gimple_omp_oacc (ctx
->stmt
));
1594 if (scan_array_reductions
)
1596 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1597 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1598 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1599 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1600 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1602 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1603 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1605 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1606 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1607 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1608 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1609 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1610 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1614 /* Create a new name for omp child function. Returns an identifier. */
1617 create_omp_child_function_name (bool task_copy
)
1619 return clone_function_name_numbered (current_function_decl
,
1620 task_copy
? "_omp_cpyfn" : "_omp_fn");
1623 /* Return true if CTX may belong to offloaded code: either if current function
1624 is offloaded, or any enclosing context corresponds to a target region. */
1627 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1629 if (cgraph_node::get (current_function_decl
)->offloadable
)
1631 for (; ctx
; ctx
= ctx
->outer
)
1632 if (is_gimple_omp_offloaded (ctx
->stmt
))
1637 /* Build a decl for the omp child function. It'll not contain a body
1638 yet, just the bare decl. */
1641 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1643 tree decl
, type
, name
, t
;
1645 name
= create_omp_child_function_name (task_copy
);
1647 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1648 ptr_type_node
, NULL_TREE
);
1650 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1652 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1654 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1657 ctx
->cb
.dst_fn
= decl
;
1659 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1661 TREE_STATIC (decl
) = 1;
1662 TREE_USED (decl
) = 1;
1663 DECL_ARTIFICIAL (decl
) = 1;
1664 DECL_IGNORED_P (decl
) = 0;
1665 TREE_PUBLIC (decl
) = 0;
1666 DECL_UNINLINABLE (decl
) = 1;
1667 DECL_EXTERNAL (decl
) = 0;
1668 DECL_CONTEXT (decl
) = NULL_TREE
;
1669 DECL_INITIAL (decl
) = make_node (BLOCK
);
1670 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1671 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1672 /* Remove omp declare simd attribute from the new attributes. */
1673 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1675 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1678 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1679 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1680 *p
= TREE_CHAIN (*p
);
1683 tree chain
= TREE_CHAIN (*p
);
1684 *p
= copy_node (*p
);
1685 p
= &TREE_CHAIN (*p
);
1689 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1690 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1691 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1692 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1693 DECL_FUNCTION_VERSIONED (decl
)
1694 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1696 if (omp_maybe_offloaded_ctx (ctx
))
1698 cgraph_node::get_create (decl
)->offloadable
= 1;
1699 if (ENABLE_OFFLOADING
)
1700 g
->have_offload
= true;
1703 if (cgraph_node::get_create (decl
)->offloadable
1704 && !lookup_attribute ("omp declare target",
1705 DECL_ATTRIBUTES (current_function_decl
)))
1707 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1708 ? "omp target entrypoint"
1709 : "omp declare target");
1710 DECL_ATTRIBUTES (decl
)
1711 = tree_cons (get_identifier (target_attr
),
1712 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1715 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1716 RESULT_DECL
, NULL_TREE
, void_type_node
);
1717 DECL_ARTIFICIAL (t
) = 1;
1718 DECL_IGNORED_P (t
) = 1;
1719 DECL_CONTEXT (t
) = decl
;
1720 DECL_RESULT (decl
) = t
;
1722 tree data_name
= get_identifier (".omp_data_i");
1723 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1725 DECL_ARTIFICIAL (t
) = 1;
1726 DECL_NAMELESS (t
) = 1;
1727 DECL_ARG_TYPE (t
) = ptr_type_node
;
1728 DECL_CONTEXT (t
) = current_function_decl
;
1730 TREE_READONLY (t
) = 1;
1731 DECL_ARGUMENTS (decl
) = t
;
1733 ctx
->receiver_decl
= t
;
1736 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1737 PARM_DECL
, get_identifier (".omp_data_o"),
1739 DECL_ARTIFICIAL (t
) = 1;
1740 DECL_NAMELESS (t
) = 1;
1741 DECL_ARG_TYPE (t
) = ptr_type_node
;
1742 DECL_CONTEXT (t
) = current_function_decl
;
1744 TREE_ADDRESSABLE (t
) = 1;
1745 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1746 DECL_ARGUMENTS (decl
) = t
;
1749 /* Allocate memory for the function structure. The call to
1750 allocate_struct_function clobbers CFUN, so we need to restore
1752 push_struct_function (decl
);
1753 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1754 init_tree_ssa (cfun
);
1758 /* Callback for walk_gimple_seq. Check if combined parallel
1759 contains gimple_omp_for_combined_into_p OMP_FOR. */
1762 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1763 bool *handled_ops_p
,
1764 struct walk_stmt_info
*wi
)
1766 gimple
*stmt
= gsi_stmt (*gsi_p
);
1768 *handled_ops_p
= true;
1769 switch (gimple_code (stmt
))
1773 case GIMPLE_OMP_FOR
:
1774 if (gimple_omp_for_combined_into_p (stmt
)
1775 && gimple_omp_for_kind (stmt
)
1776 == *(const enum gf_mask
*) (wi
->info
))
1779 return integer_zero_node
;
1788 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1791 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1792 omp_context
*outer_ctx
)
1794 struct walk_stmt_info wi
;
1796 memset (&wi
, 0, sizeof (wi
));
1798 wi
.info
= (void *) &msk
;
1799 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1800 if (wi
.info
!= (void *) &msk
)
1802 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1803 struct omp_for_data fd
;
1804 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1805 /* We need two temporaries with fd.loop.v type (istart/iend)
1806 and then (fd.collapse - 1) temporaries with the same
1807 type for count2 ... countN-1 vars if not constant. */
1808 size_t count
= 2, i
;
1809 tree type
= fd
.iter_type
;
1811 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1813 count
+= fd
.collapse
- 1;
1814 /* If there are lastprivate clauses on the inner
1815 GIMPLE_OMP_FOR, add one more temporaries for the total number
1816 of iterations (product of count1 ... countN-1). */
1817 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1818 OMP_CLAUSE_LASTPRIVATE
))
1820 else if (msk
== GF_OMP_FOR_KIND_FOR
1821 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1822 OMP_CLAUSE_LASTPRIVATE
))
1825 for (i
= 0; i
< count
; i
++)
1827 tree temp
= create_tmp_var (type
);
1828 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1829 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1830 OMP_CLAUSE_DECL (c
) = temp
;
1831 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1832 gimple_omp_taskreg_set_clauses (stmt
, c
);
1835 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
1836 && omp_find_clause (gimple_omp_task_clauses (stmt
),
1837 OMP_CLAUSE_REDUCTION
))
1839 tree type
= build_pointer_type (pointer_sized_int_node
);
1840 tree temp
= create_tmp_var (type
);
1841 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1842 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1843 OMP_CLAUSE_DECL (c
) = temp
;
1844 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
1845 gimple_omp_task_set_clauses (stmt
, c
);
1849 /* Scan an OpenMP parallel directive. */
1852 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1856 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1858 /* Ignore parallel directives with empty bodies, unless there
1859 are copyin clauses. */
1861 && empty_body_p (gimple_omp_body (stmt
))
1862 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1863 OMP_CLAUSE_COPYIN
) == NULL
)
1865 gsi_replace (gsi
, gimple_build_nop (), false);
1869 if (gimple_omp_parallel_combined_p (stmt
))
1870 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1871 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1872 OMP_CLAUSE_REDUCTION
);
1873 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
1874 if (OMP_CLAUSE_REDUCTION_TASK (c
))
1876 tree type
= build_pointer_type (pointer_sized_int_node
);
1877 tree temp
= create_tmp_var (type
);
1878 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1880 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1881 OMP_CLAUSE_DECL (c
) = temp
;
1882 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
1883 gimple_omp_parallel_set_clauses (stmt
, c
);
1886 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
1889 ctx
= new_omp_context (stmt
, outer_ctx
);
1890 taskreg_contexts
.safe_push (ctx
);
1891 if (taskreg_nesting_level
> 1)
1892 ctx
->is_nested
= true;
1893 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1894 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1895 name
= create_tmp_var_name (".omp_data_s");
1896 name
= build_decl (gimple_location (stmt
),
1897 TYPE_DECL
, name
, ctx
->record_type
);
1898 DECL_ARTIFICIAL (name
) = 1;
1899 DECL_NAMELESS (name
) = 1;
1900 TYPE_NAME (ctx
->record_type
) = name
;
1901 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1902 if (!gimple_omp_parallel_grid_phony (stmt
))
1904 create_omp_child_function (ctx
, false);
1905 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1908 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
1909 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1911 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1912 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1915 /* Scan an OpenMP task directive. */
1918 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1922 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
1924 /* Ignore task directives with empty bodies, unless they have depend
1927 && gimple_omp_body (stmt
)
1928 && empty_body_p (gimple_omp_body (stmt
))
1929 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
1931 gsi_replace (gsi
, gimple_build_nop (), false);
1935 if (gimple_omp_task_taskloop_p (stmt
))
1936 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
1938 ctx
= new_omp_context (stmt
, outer_ctx
);
1940 if (gimple_omp_task_taskwait_p (stmt
))
1942 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1946 taskreg_contexts
.safe_push (ctx
);
1947 if (taskreg_nesting_level
> 1)
1948 ctx
->is_nested
= true;
1949 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1950 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1951 name
= create_tmp_var_name (".omp_data_s");
1952 name
= build_decl (gimple_location (stmt
),
1953 TYPE_DECL
, name
, ctx
->record_type
);
1954 DECL_ARTIFICIAL (name
) = 1;
1955 DECL_NAMELESS (name
) = 1;
1956 TYPE_NAME (ctx
->record_type
) = name
;
1957 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1958 create_omp_child_function (ctx
, false);
1959 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1961 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1963 if (ctx
->srecord_type
)
1965 name
= create_tmp_var_name (".omp_data_a");
1966 name
= build_decl (gimple_location (stmt
),
1967 TYPE_DECL
, name
, ctx
->srecord_type
);
1968 DECL_ARTIFICIAL (name
) = 1;
1969 DECL_NAMELESS (name
) = 1;
1970 TYPE_NAME (ctx
->srecord_type
) = name
;
1971 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
1972 create_omp_child_function (ctx
, true);
1975 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1977 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1979 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1980 t
= build_int_cst (long_integer_type_node
, 0);
1981 gimple_omp_task_set_arg_size (stmt
, t
);
1982 t
= build_int_cst (long_integer_type_node
, 1);
1983 gimple_omp_task_set_arg_align (stmt
, t
);
1987 /* Helper function for finish_taskreg_scan, called through walk_tree.
1988 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1989 tree, replace it in the expression. */
1992 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
1996 omp_context
*ctx
= (omp_context
*) data
;
1997 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2000 if (DECL_HAS_VALUE_EXPR_P (t
))
2001 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2006 else if (IS_TYPE_OR_DECL_P (*tp
))
2011 /* If any decls have been made addressable during scan_omp,
2012 adjust their fields if needed, and layout record types
2013 of parallel/task constructs. */
2016 finish_taskreg_scan (omp_context
*ctx
)
2018 if (ctx
->record_type
== NULL_TREE
)
2021 /* If any task_shared_vars were needed, verify all
2022 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2023 statements if use_pointer_for_field hasn't changed
2024 because of that. If it did, update field types now. */
2025 if (task_shared_vars
)
2029 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2030 c
; c
= OMP_CLAUSE_CHAIN (c
))
2031 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2032 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2034 tree decl
= OMP_CLAUSE_DECL (c
);
2036 /* Global variables don't need to be copied,
2037 the receiver side will use them directly. */
2038 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2040 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2041 || !use_pointer_for_field (decl
, ctx
))
2043 tree field
= lookup_field (decl
, ctx
);
2044 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2045 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2047 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2048 TREE_THIS_VOLATILE (field
) = 0;
2049 DECL_USER_ALIGN (field
) = 0;
2050 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2051 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2052 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2053 if (ctx
->srecord_type
)
2055 tree sfield
= lookup_sfield (decl
, ctx
);
2056 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2057 TREE_THIS_VOLATILE (sfield
) = 0;
2058 DECL_USER_ALIGN (sfield
) = 0;
2059 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2060 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2061 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2066 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2068 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2069 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2072 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2073 expects to find it at the start of data. */
2074 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2075 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2079 *p
= DECL_CHAIN (*p
);
2083 p
= &DECL_CHAIN (*p
);
2084 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2085 TYPE_FIELDS (ctx
->record_type
) = f
;
2087 layout_type (ctx
->record_type
);
2088 fixup_child_record_type (ctx
);
2090 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2092 layout_type (ctx
->record_type
);
2093 fixup_child_record_type (ctx
);
2097 location_t loc
= gimple_location (ctx
->stmt
);
2098 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2099 /* Move VLA fields to the end. */
2100 p
= &TYPE_FIELDS (ctx
->record_type
);
2102 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2103 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2106 *p
= TREE_CHAIN (*p
);
2107 TREE_CHAIN (*q
) = NULL_TREE
;
2108 q
= &TREE_CHAIN (*q
);
2111 p
= &DECL_CHAIN (*p
);
2113 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2115 /* Move fields corresponding to first and second _looptemp_
2116 clause first. There are filled by GOMP_taskloop
2117 and thus need to be in specific positions. */
2118 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2119 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2120 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2121 OMP_CLAUSE__LOOPTEMP_
);
2122 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2123 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2124 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2125 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2126 p
= &TYPE_FIELDS (ctx
->record_type
);
2128 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2129 *p
= DECL_CHAIN (*p
);
2131 p
= &DECL_CHAIN (*p
);
2132 DECL_CHAIN (f1
) = f2
;
2135 DECL_CHAIN (f2
) = f3
;
2136 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2139 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2140 TYPE_FIELDS (ctx
->record_type
) = f1
;
2141 if (ctx
->srecord_type
)
2143 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2144 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2146 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2147 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2149 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2150 *p
= DECL_CHAIN (*p
);
2152 p
= &DECL_CHAIN (*p
);
2153 DECL_CHAIN (f1
) = f2
;
2154 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2157 DECL_CHAIN (f2
) = f3
;
2158 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2161 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2162 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2165 layout_type (ctx
->record_type
);
2166 fixup_child_record_type (ctx
);
2167 if (ctx
->srecord_type
)
2168 layout_type (ctx
->srecord_type
);
2169 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2170 TYPE_SIZE_UNIT (ctx
->record_type
));
2171 if (TREE_CODE (t
) != INTEGER_CST
)
2173 t
= unshare_expr (t
);
2174 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2176 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2177 t
= build_int_cst (long_integer_type_node
,
2178 TYPE_ALIGN_UNIT (ctx
->record_type
));
2179 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2183 /* Find the enclosing offload context. */
2185 static omp_context
*
2186 enclosing_target_ctx (omp_context
*ctx
)
2188 for (; ctx
; ctx
= ctx
->outer
)
2189 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2195 /* Return true if ctx is part of an oacc kernels region. */
2198 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2200 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2202 gimple
*stmt
= ctx
->stmt
;
2203 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2204 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2211 /* Check the parallelism clauses inside a kernels regions.
2212 Until kernels handling moves to use the same loop indirection
2213 scheme as parallel, we need to do this checking early. */
2216 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2218 bool checking
= true;
2219 unsigned outer_mask
= 0;
2220 unsigned this_mask
= 0;
2221 bool has_seq
= false, has_auto
= false;
2224 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2228 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2230 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2233 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2235 switch (OMP_CLAUSE_CODE (c
))
2237 case OMP_CLAUSE_GANG
:
2238 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2240 case OMP_CLAUSE_WORKER
:
2241 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2243 case OMP_CLAUSE_VECTOR
:
2244 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2246 case OMP_CLAUSE_SEQ
:
2249 case OMP_CLAUSE_AUTO
:
2259 if (has_seq
&& (this_mask
|| has_auto
))
2260 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2261 " OpenACC loop specifiers");
2262 else if (has_auto
&& this_mask
)
2263 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2264 " OpenACC loop specifiers");
2266 if (this_mask
& outer_mask
)
2267 error_at (gimple_location (stmt
), "inner loop uses same"
2268 " OpenACC parallelism as containing loop");
2271 return outer_mask
| this_mask
;
2274 /* Scan a GIMPLE_OMP_FOR. */
2276 static omp_context
*
2277 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2281 tree clauses
= gimple_omp_for_clauses (stmt
);
2283 ctx
= new_omp_context (stmt
, outer_ctx
);
2285 if (is_gimple_omp_oacc (stmt
))
2287 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2289 if (!tgt
|| is_oacc_parallel (tgt
))
2290 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2292 char const *check
= NULL
;
2294 switch (OMP_CLAUSE_CODE (c
))
2296 case OMP_CLAUSE_GANG
:
2300 case OMP_CLAUSE_WORKER
:
2304 case OMP_CLAUSE_VECTOR
:
2312 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2313 error_at (gimple_location (stmt
),
2314 "argument not permitted on %qs clause in"
2315 " OpenACC %<parallel%>", check
);
2318 if (tgt
&& is_oacc_kernels (tgt
))
2320 /* Strip out reductions, as they are not handled yet. */
2321 tree
*prev_ptr
= &clauses
;
2323 while (tree probe
= *prev_ptr
)
2325 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2327 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2328 *prev_ptr
= *next_ptr
;
2330 prev_ptr
= next_ptr
;
2333 gimple_omp_for_set_clauses (stmt
, clauses
);
2334 check_oacc_kernel_gwv (stmt
, ctx
);
2338 scan_sharing_clauses (clauses
, ctx
);
2340 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2341 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2343 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2344 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2345 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2346 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2348 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2352 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2355 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2356 omp_context
*outer_ctx
)
2358 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2359 gsi_replace (gsi
, bind
, false);
2360 gimple_seq seq
= NULL
;
2361 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2362 tree cond
= create_tmp_var_raw (integer_type_node
);
2363 DECL_CONTEXT (cond
) = current_function_decl
;
2364 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2365 gimple_bind_set_vars (bind
, cond
);
2366 gimple_call_set_lhs (g
, cond
);
2367 gimple_seq_add_stmt (&seq
, g
);
2368 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2369 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2370 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2371 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2372 gimple_seq_add_stmt (&seq
, g
);
2373 g
= gimple_build_label (lab1
);
2374 gimple_seq_add_stmt (&seq
, g
);
2375 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2376 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2377 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2378 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2379 gimple_omp_for_set_clauses (new_stmt
, clause
);
2380 gimple_seq_add_stmt (&seq
, new_stmt
);
2381 g
= gimple_build_goto (lab3
);
2382 gimple_seq_add_stmt (&seq
, g
);
2383 g
= gimple_build_label (lab2
);
2384 gimple_seq_add_stmt (&seq
, g
);
2385 gimple_seq_add_stmt (&seq
, stmt
);
2386 g
= gimple_build_label (lab3
);
2387 gimple_seq_add_stmt (&seq
, g
);
2388 gimple_bind_set_body (bind
, seq
);
2390 scan_omp_for (new_stmt
, outer_ctx
);
2391 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2394 /* Scan an OpenMP sections directive. */
2397 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2401 ctx
= new_omp_context (stmt
, outer_ctx
);
2402 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2403 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2406 /* Scan an OpenMP single directive. */
2409 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2414 ctx
= new_omp_context (stmt
, outer_ctx
);
2415 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2416 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2417 name
= create_tmp_var_name (".omp_copy_s");
2418 name
= build_decl (gimple_location (stmt
),
2419 TYPE_DECL
, name
, ctx
->record_type
);
2420 TYPE_NAME (ctx
->record_type
) = name
;
2422 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2423 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2425 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2426 ctx
->record_type
= NULL
;
2428 layout_type (ctx
->record_type
);
2431 /* Scan a GIMPLE_OMP_TARGET. */
2434 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2438 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2439 tree clauses
= gimple_omp_target_clauses (stmt
);
2441 ctx
= new_omp_context (stmt
, outer_ctx
);
2442 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2443 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2444 name
= create_tmp_var_name (".omp_data_t");
2445 name
= build_decl (gimple_location (stmt
),
2446 TYPE_DECL
, name
, ctx
->record_type
);
2447 DECL_ARTIFICIAL (name
) = 1;
2448 DECL_NAMELESS (name
) = 1;
2449 TYPE_NAME (ctx
->record_type
) = name
;
2450 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2454 create_omp_child_function (ctx
, false);
2455 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2458 scan_sharing_clauses (clauses
, ctx
);
2459 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2461 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2462 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2465 TYPE_FIELDS (ctx
->record_type
)
2466 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2469 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2470 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2472 field
= DECL_CHAIN (field
))
2473 gcc_assert (DECL_ALIGN (field
) == align
);
2475 layout_type (ctx
->record_type
);
2477 fixup_child_record_type (ctx
);
2481 /* Scan an OpenMP teams directive. */
2484 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2486 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2488 if (!gimple_omp_teams_host (stmt
))
2490 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2491 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2494 taskreg_contexts
.safe_push (ctx
);
2495 gcc_assert (taskreg_nesting_level
== 1);
2496 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2497 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2498 tree name
= create_tmp_var_name (".omp_data_s");
2499 name
= build_decl (gimple_location (stmt
),
2500 TYPE_DECL
, name
, ctx
->record_type
);
2501 DECL_ARTIFICIAL (name
) = 1;
2502 DECL_NAMELESS (name
) = 1;
2503 TYPE_NAME (ctx
->record_type
) = name
;
2504 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2505 create_omp_child_function (ctx
, false);
2506 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2508 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2509 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2511 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2512 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2515 /* Check nesting restrictions. */
2517 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2521 if (ctx
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_GRID_BODY
)
2522 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2523 the original copy of its contents. */
2526 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2527 inside an OpenACC CTX. */
2528 if (!(is_gimple_omp (stmt
)
2529 && is_gimple_omp_oacc (stmt
))
2530 /* Except for atomic codes that we share with OpenMP. */
2531 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2532 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2534 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2536 error_at (gimple_location (stmt
),
2537 "non-OpenACC construct inside of OpenACC routine");
2541 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2542 if (is_gimple_omp (octx
->stmt
)
2543 && is_gimple_omp_oacc (octx
->stmt
))
2545 error_at (gimple_location (stmt
),
2546 "non-OpenACC construct inside of OpenACC region");
2553 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2554 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
2557 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2559 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2560 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2562 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2563 && (ctx
->outer
== NULL
2564 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2565 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2566 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2567 != GF_OMP_FOR_KIND_FOR
)
2568 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2570 error_at (gimple_location (stmt
),
2571 "%<ordered simd threads%> must be closely "
2572 "nested inside of %<for simd%> region");
2578 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2579 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
)
2581 error_at (gimple_location (stmt
),
2582 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2583 " or %<#pragma omp atomic%> may not be nested inside"
2584 " %<simd%> region");
2587 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2589 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2590 || ((gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
)
2591 && (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
)))
2592 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2594 error_at (gimple_location (stmt
),
2595 "only %<distribute%> or %<parallel%> regions are "
2596 "allowed to be strictly nested inside %<teams%> "
2602 switch (gimple_code (stmt
))
2604 case GIMPLE_OMP_FOR
:
2605 if (gimple_omp_for_kind (stmt
) & GF_OMP_FOR_SIMD
)
2607 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2609 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2611 error_at (gimple_location (stmt
),
2612 "%<distribute%> region must be strictly nested "
2613 "inside %<teams%> construct");
2618 /* We split taskloop into task and nested taskloop in it. */
2619 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2621 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2626 switch (gimple_code (ctx
->stmt
))
2628 case GIMPLE_OMP_FOR
:
2629 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2630 == GF_OMP_FOR_KIND_OACC_LOOP
);
2633 case GIMPLE_OMP_TARGET
:
2634 switch (gimple_omp_target_kind (ctx
->stmt
))
2636 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2637 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2648 else if (oacc_get_fn_attrib (current_function_decl
))
2652 error_at (gimple_location (stmt
),
2653 "OpenACC loop directive must be associated with"
2654 " an OpenACC compute region");
2660 if (is_gimple_call (stmt
)
2661 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2662 == BUILT_IN_GOMP_CANCEL
2663 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2664 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2666 const char *bad
= NULL
;
2667 const char *kind
= NULL
;
2668 const char *construct
2669 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2670 == BUILT_IN_GOMP_CANCEL
)
2671 ? "#pragma omp cancel"
2672 : "#pragma omp cancellation point";
2675 error_at (gimple_location (stmt
), "orphaned %qs construct",
2679 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2680 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2684 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2685 bad
= "#pragma omp parallel";
2686 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2687 == BUILT_IN_GOMP_CANCEL
2688 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2689 ctx
->cancellable
= true;
2693 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2694 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2695 bad
= "#pragma omp for";
2696 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2697 == BUILT_IN_GOMP_CANCEL
2698 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2700 ctx
->cancellable
= true;
2701 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2703 warning_at (gimple_location (stmt
), 0,
2704 "%<#pragma omp cancel for%> inside "
2705 "%<nowait%> for construct");
2706 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2707 OMP_CLAUSE_ORDERED
))
2708 warning_at (gimple_location (stmt
), 0,
2709 "%<#pragma omp cancel for%> inside "
2710 "%<ordered%> for construct");
2715 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
2716 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
2717 bad
= "#pragma omp sections";
2718 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2719 == BUILT_IN_GOMP_CANCEL
2720 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2722 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
2724 ctx
->cancellable
= true;
2725 if (omp_find_clause (gimple_omp_sections_clauses
2728 warning_at (gimple_location (stmt
), 0,
2729 "%<#pragma omp cancel sections%> inside "
2730 "%<nowait%> sections construct");
2734 gcc_assert (ctx
->outer
2735 && gimple_code (ctx
->outer
->stmt
)
2736 == GIMPLE_OMP_SECTIONS
);
2737 ctx
->outer
->cancellable
= true;
2738 if (omp_find_clause (gimple_omp_sections_clauses
2741 warning_at (gimple_location (stmt
), 0,
2742 "%<#pragma omp cancel sections%> inside "
2743 "%<nowait%> sections construct");
2749 if (!is_task_ctx (ctx
)
2750 && (!is_taskloop_ctx (ctx
)
2751 || ctx
->outer
== NULL
2752 || !is_task_ctx (ctx
->outer
)))
2753 bad
= "#pragma omp task";
2756 for (omp_context
*octx
= ctx
->outer
;
2757 octx
; octx
= octx
->outer
)
2759 switch (gimple_code (octx
->stmt
))
2761 case GIMPLE_OMP_TASKGROUP
:
2763 case GIMPLE_OMP_TARGET
:
2764 if (gimple_omp_target_kind (octx
->stmt
)
2765 != GF_OMP_TARGET_KIND_REGION
)
2768 case GIMPLE_OMP_PARALLEL
:
2769 case GIMPLE_OMP_TEAMS
:
2770 error_at (gimple_location (stmt
),
2771 "%<%s taskgroup%> construct not closely "
2772 "nested inside of %<taskgroup%> region",
2775 case GIMPLE_OMP_TASK
:
2776 if (gimple_omp_task_taskloop_p (octx
->stmt
)
2778 && is_taskloop_ctx (octx
->outer
))
2781 = gimple_omp_for_clauses (octx
->outer
->stmt
);
2782 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
2791 ctx
->cancellable
= true;
2796 error_at (gimple_location (stmt
), "invalid arguments");
2801 error_at (gimple_location (stmt
),
2802 "%<%s %s%> construct not closely nested inside of %qs",
2803 construct
, kind
, bad
);
2808 case GIMPLE_OMP_SECTIONS
:
2809 case GIMPLE_OMP_SINGLE
:
2810 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2811 switch (gimple_code (ctx
->stmt
))
2813 case GIMPLE_OMP_FOR
:
2814 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2815 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2818 case GIMPLE_OMP_SECTIONS
:
2819 case GIMPLE_OMP_SINGLE
:
2820 case GIMPLE_OMP_ORDERED
:
2821 case GIMPLE_OMP_MASTER
:
2822 case GIMPLE_OMP_TASK
:
2823 case GIMPLE_OMP_CRITICAL
:
2824 if (is_gimple_call (stmt
))
2826 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2827 != BUILT_IN_GOMP_BARRIER
)
2829 error_at (gimple_location (stmt
),
2830 "barrier region may not be closely nested inside "
2831 "of work-sharing, %<critical%>, %<ordered%>, "
2832 "%<master%>, explicit %<task%> or %<taskloop%> "
2836 error_at (gimple_location (stmt
),
2837 "work-sharing region may not be closely nested inside "
2838 "of work-sharing, %<critical%>, %<ordered%>, "
2839 "%<master%>, explicit %<task%> or %<taskloop%> region");
2841 case GIMPLE_OMP_PARALLEL
:
2842 case GIMPLE_OMP_TEAMS
:
2844 case GIMPLE_OMP_TARGET
:
2845 if (gimple_omp_target_kind (ctx
->stmt
)
2846 == GF_OMP_TARGET_KIND_REGION
)
2853 case GIMPLE_OMP_MASTER
:
2854 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2855 switch (gimple_code (ctx
->stmt
))
2857 case GIMPLE_OMP_FOR
:
2858 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2859 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2862 case GIMPLE_OMP_SECTIONS
:
2863 case GIMPLE_OMP_SINGLE
:
2864 case GIMPLE_OMP_TASK
:
2865 error_at (gimple_location (stmt
),
2866 "%<master%> region may not be closely nested inside "
2867 "of work-sharing, explicit %<task%> or %<taskloop%> "
2870 case GIMPLE_OMP_PARALLEL
:
2871 case GIMPLE_OMP_TEAMS
:
2873 case GIMPLE_OMP_TARGET
:
2874 if (gimple_omp_target_kind (ctx
->stmt
)
2875 == GF_OMP_TARGET_KIND_REGION
)
2882 case GIMPLE_OMP_TASK
:
2883 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2884 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2885 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2886 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2888 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2889 error_at (OMP_CLAUSE_LOCATION (c
),
2890 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2891 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2895 case GIMPLE_OMP_ORDERED
:
2896 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2897 c
; c
= OMP_CLAUSE_CHAIN (c
))
2899 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
2901 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
2902 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
2905 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2906 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
2907 || kind
== OMP_CLAUSE_DEPEND_SINK
)
2910 /* Look for containing ordered(N) loop. */
2912 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2914 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2915 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
2917 error_at (OMP_CLAUSE_LOCATION (c
),
2918 "%<ordered%> construct with %<depend%> clause "
2919 "must be closely nested inside an %<ordered%> "
2923 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
2925 error_at (OMP_CLAUSE_LOCATION (c
),
2926 "%<ordered%> construct with %<depend%> clause "
2927 "must be closely nested inside a loop with "
2928 "%<ordered%> clause with a parameter");
2934 error_at (OMP_CLAUSE_LOCATION (c
),
2935 "invalid depend kind in omp %<ordered%> %<depend%>");
2939 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2940 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2942 /* ordered simd must be closely nested inside of simd region,
2943 and simd region must not encounter constructs other than
2944 ordered simd, therefore ordered simd may be either orphaned,
2945 or ctx->stmt must be simd. The latter case is handled already
2949 error_at (gimple_location (stmt
),
2950 "%<ordered%> %<simd%> must be closely nested inside "
2955 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2956 switch (gimple_code (ctx
->stmt
))
2958 case GIMPLE_OMP_CRITICAL
:
2959 case GIMPLE_OMP_TASK
:
2960 case GIMPLE_OMP_ORDERED
:
2961 ordered_in_taskloop
:
2962 error_at (gimple_location (stmt
),
2963 "%<ordered%> region may not be closely nested inside "
2964 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2965 "%<taskloop%> region");
2967 case GIMPLE_OMP_FOR
:
2968 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2969 goto ordered_in_taskloop
;
2971 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2972 OMP_CLAUSE_ORDERED
);
2975 error_at (gimple_location (stmt
),
2976 "%<ordered%> region must be closely nested inside "
2977 "a loop region with an %<ordered%> clause");
2980 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
2981 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
2983 error_at (gimple_location (stmt
),
2984 "%<ordered%> region without %<depend%> clause may "
2985 "not be closely nested inside a loop region with "
2986 "an %<ordered%> clause with a parameter");
2990 case GIMPLE_OMP_TARGET
:
2991 if (gimple_omp_target_kind (ctx
->stmt
)
2992 != GF_OMP_TARGET_KIND_REGION
)
2995 case GIMPLE_OMP_PARALLEL
:
2996 case GIMPLE_OMP_TEAMS
:
2997 error_at (gimple_location (stmt
),
2998 "%<ordered%> region must be closely nested inside "
2999 "a loop region with an %<ordered%> clause");
3005 case GIMPLE_OMP_CRITICAL
:
3008 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3009 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3010 if (gomp_critical
*other_crit
3011 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3012 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3014 error_at (gimple_location (stmt
),
3015 "%<critical%> region may not be nested inside "
3016 "a %<critical%> region with the same name");
3021 case GIMPLE_OMP_TEAMS
:
3024 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3025 || (gimple_omp_target_kind (ctx
->stmt
)
3026 != GF_OMP_TARGET_KIND_REGION
))
3028 /* Teams construct can appear either strictly nested inside of
3029 target construct with no intervening stmts, or can be encountered
3030 only by initial task (so must not appear inside any OpenMP
3032 error_at (gimple_location (stmt
),
3033 "%<teams%> construct must be closely nested inside of "
3034 "%<target%> construct or not nested in any OpenMP "
3039 case GIMPLE_OMP_TARGET
:
3040 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3041 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3042 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3043 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3045 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3046 error_at (OMP_CLAUSE_LOCATION (c
),
3047 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3048 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3051 if (is_gimple_omp_offloaded (stmt
)
3052 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3054 error_at (gimple_location (stmt
),
3055 "OpenACC region inside of OpenACC routine, nested "
3056 "parallelism not supported yet");
3059 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3061 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3063 if (is_gimple_omp (stmt
)
3064 && is_gimple_omp_oacc (stmt
)
3065 && is_gimple_omp (ctx
->stmt
))
3067 error_at (gimple_location (stmt
),
3068 "OpenACC construct inside of non-OpenACC region");
3074 const char *stmt_name
, *ctx_stmt_name
;
3075 switch (gimple_omp_target_kind (stmt
))
3077 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3078 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3079 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3080 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3081 stmt_name
= "target enter data"; break;
3082 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3083 stmt_name
= "target exit data"; break;
3084 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3085 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3086 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3087 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3088 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
3089 stmt_name
= "enter/exit data"; break;
3090 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3092 default: gcc_unreachable ();
3094 switch (gimple_omp_target_kind (ctx
->stmt
))
3096 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3097 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3098 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3099 ctx_stmt_name
= "parallel"; break;
3100 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3101 ctx_stmt_name
= "kernels"; break;
3102 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3103 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3104 ctx_stmt_name
= "host_data"; break;
3105 default: gcc_unreachable ();
3108 /* OpenACC/OpenMP mismatch? */
3109 if (is_gimple_omp_oacc (stmt
)
3110 != is_gimple_omp_oacc (ctx
->stmt
))
3112 error_at (gimple_location (stmt
),
3113 "%s %qs construct inside of %s %qs region",
3114 (is_gimple_omp_oacc (stmt
)
3115 ? "OpenACC" : "OpenMP"), stmt_name
,
3116 (is_gimple_omp_oacc (ctx
->stmt
)
3117 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3120 if (is_gimple_omp_offloaded (ctx
->stmt
))
3122 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3123 if (is_gimple_omp_oacc (ctx
->stmt
))
3125 error_at (gimple_location (stmt
),
3126 "%qs construct inside of %qs region",
3127 stmt_name
, ctx_stmt_name
);
3132 warning_at (gimple_location (stmt
), 0,
3133 "%qs construct inside of %qs region",
3134 stmt_name
, ctx_stmt_name
);
3146 /* Helper function scan_omp.
3148 Callback for walk_tree or operators in walk_gimple_stmt used to
3149 scan for OMP directives in TP. */
3152 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3154 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3155 omp_context
*ctx
= (omp_context
*) wi
->info
;
3158 switch (TREE_CODE (t
))
3166 tree repl
= remap_decl (t
, &ctx
->cb
);
3167 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3173 if (ctx
&& TYPE_P (t
))
3174 *tp
= remap_type (t
, &ctx
->cb
);
3175 else if (!DECL_P (t
))
3180 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3181 if (tem
!= TREE_TYPE (t
))
3183 if (TREE_CODE (t
) == INTEGER_CST
)
3184 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3186 TREE_TYPE (t
) = tem
;
3196 /* Return true if FNDECL is a setjmp or a longjmp. */
3199 setjmp_or_longjmp_p (const_tree fndecl
)
3201 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3202 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3205 tree declname
= DECL_NAME (fndecl
);
3208 const char *name
= IDENTIFIER_POINTER (declname
);
3209 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3213 /* Helper function for scan_omp.
3215 Callback for walk_gimple_stmt used to scan for OMP directives in
3216 the current statement in GSI. */
3219 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3220 struct walk_stmt_info
*wi
)
3222 gimple
*stmt
= gsi_stmt (*gsi
);
3223 omp_context
*ctx
= (omp_context
*) wi
->info
;
3225 if (gimple_has_location (stmt
))
3226 input_location
= gimple_location (stmt
);
3228 /* Check the nesting restrictions. */
3229 bool remove
= false;
3230 if (is_gimple_omp (stmt
))
3231 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3232 else if (is_gimple_call (stmt
))
3234 tree fndecl
= gimple_call_fndecl (stmt
);
3237 if (setjmp_or_longjmp_p (fndecl
)
3239 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3240 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
3243 error_at (gimple_location (stmt
),
3244 "setjmp/longjmp inside simd construct");
3246 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3247 switch (DECL_FUNCTION_CODE (fndecl
))
3249 case BUILT_IN_GOMP_BARRIER
:
3250 case BUILT_IN_GOMP_CANCEL
:
3251 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3252 case BUILT_IN_GOMP_TASKYIELD
:
3253 case BUILT_IN_GOMP_TASKWAIT
:
3254 case BUILT_IN_GOMP_TASKGROUP_START
:
3255 case BUILT_IN_GOMP_TASKGROUP_END
:
3256 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3265 stmt
= gimple_build_nop ();
3266 gsi_replace (gsi
, stmt
, false);
3269 *handled_ops_p
= true;
3271 switch (gimple_code (stmt
))
3273 case GIMPLE_OMP_PARALLEL
:
3274 taskreg_nesting_level
++;
3275 scan_omp_parallel (gsi
, ctx
);
3276 taskreg_nesting_level
--;
3279 case GIMPLE_OMP_TASK
:
3280 taskreg_nesting_level
++;
3281 scan_omp_task (gsi
, ctx
);
3282 taskreg_nesting_level
--;
3285 case GIMPLE_OMP_FOR
:
3286 if (((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3287 & GF_OMP_FOR_KIND_MASK
) == GF_OMP_FOR_KIND_SIMD
)
3288 && omp_maybe_offloaded_ctx (ctx
)
3289 && omp_max_simt_vf ())
3290 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3292 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3295 case GIMPLE_OMP_SECTIONS
:
3296 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3299 case GIMPLE_OMP_SINGLE
:
3300 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3303 case GIMPLE_OMP_SECTION
:
3304 case GIMPLE_OMP_MASTER
:
3305 case GIMPLE_OMP_ORDERED
:
3306 case GIMPLE_OMP_CRITICAL
:
3307 case GIMPLE_OMP_GRID_BODY
:
3308 ctx
= new_omp_context (stmt
, ctx
);
3309 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3312 case GIMPLE_OMP_TASKGROUP
:
3313 ctx
= new_omp_context (stmt
, ctx
);
3314 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
3315 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3318 case GIMPLE_OMP_TARGET
:
3319 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3322 case GIMPLE_OMP_TEAMS
:
3323 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
3325 taskreg_nesting_level
++;
3326 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3327 taskreg_nesting_level
--;
3330 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3337 *handled_ops_p
= false;
3339 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3341 var
= DECL_CHAIN (var
))
3342 insert_decl_map (&ctx
->cb
, var
, var
);
3346 *handled_ops_p
= false;
3354 /* Scan all the statements starting at the current statement. CTX
3355 contains context information about the OMP directives and
3356 clauses found during the scan. */
3359 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3361 location_t saved_location
;
3362 struct walk_stmt_info wi
;
3364 memset (&wi
, 0, sizeof (wi
));
3366 wi
.want_locations
= true;
3368 saved_location
= input_location
;
3369 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3370 input_location
= saved_location
;
3373 /* Re-gimplification and code generation routines. */
3375 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3376 of BIND if in a method. */
3379 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3381 if (DECL_ARGUMENTS (current_function_decl
)
3382 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3383 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3386 tree vars
= gimple_bind_vars (bind
);
3387 for (tree
*pvar
= &vars
; *pvar
; )
3388 if (omp_member_access_dummy_var (*pvar
))
3389 *pvar
= DECL_CHAIN (*pvar
);
3391 pvar
= &DECL_CHAIN (*pvar
);
3392 gimple_bind_set_vars (bind
, vars
);
3396 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3397 block and its subblocks. */
3400 remove_member_access_dummy_vars (tree block
)
3402 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3403 if (omp_member_access_dummy_var (*pvar
))
3404 *pvar
= DECL_CHAIN (*pvar
);
3406 pvar
= &DECL_CHAIN (*pvar
);
3408 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3409 remove_member_access_dummy_vars (block
);
3412 /* If a context was created for STMT when it was scanned, return it. */
3414 static omp_context
*
3415 maybe_lookup_ctx (gimple
*stmt
)
3418 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3419 return n
? (omp_context
*) n
->value
: NULL
;
3423 /* Find the mapping for DECL in CTX or the immediately enclosing
3424 context that has a mapping for DECL.
3426 If CTX is a nested parallel directive, we may have to use the decl
3427 mappings created in CTX's parent context. Suppose that we have the
3428 following parallel nesting (variable UIDs showed for clarity):
3431 #omp parallel shared(iD.1562) -> outer parallel
3432 iD.1562 = iD.1562 + 1;
3434 #omp parallel shared (iD.1562) -> inner parallel
3435 iD.1562 = iD.1562 - 1;
3437 Each parallel structure will create a distinct .omp_data_s structure
3438 for copying iD.1562 in/out of the directive:
3440 outer parallel .omp_data_s.1.i -> iD.1562
3441 inner parallel .omp_data_s.2.i -> iD.1562
3443 A shared variable mapping will produce a copy-out operation before
3444 the parallel directive and a copy-in operation after it. So, in
3445 this case we would have:
3448 .omp_data_o.1.i = iD.1562;
3449 #omp parallel shared(iD.1562) -> outer parallel
3450 .omp_data_i.1 = &.omp_data_o.1
3451 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3453 .omp_data_o.2.i = iD.1562; -> **
3454 #omp parallel shared(iD.1562) -> inner parallel
3455 .omp_data_i.2 = &.omp_data_o.2
3456 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3459 ** This is a problem. The symbol iD.1562 cannot be referenced
3460 inside the body of the outer parallel region. But since we are
3461 emitting this copy operation while expanding the inner parallel
3462 directive, we need to access the CTX structure of the outer
3463 parallel directive to get the correct mapping:
3465 .omp_data_o.2.i = .omp_data_i.1->i
3467 Since there may be other workshare or parallel directives enclosing
3468 the parallel directive, it may be necessary to walk up the context
3469 parent chain. This is not a problem in general because nested
3470 parallelism happens only rarely. */
3473 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3478 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3479 t
= maybe_lookup_decl (decl
, up
);
3481 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3483 return t
? t
: decl
;
3487 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3488 in outer contexts. */
3491 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3496 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3497 t
= maybe_lookup_decl (decl
, up
);
3499 return t
? t
: decl
;
3503 /* Construct the initialization value for reduction operation OP. */
3506 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3515 case TRUTH_ORIF_EXPR
:
3516 case TRUTH_XOR_EXPR
:
3518 return build_zero_cst (type
);
3521 case TRUTH_AND_EXPR
:
3522 case TRUTH_ANDIF_EXPR
:
3524 return fold_convert_loc (loc
, type
, integer_one_node
);
3527 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3530 if (SCALAR_FLOAT_TYPE_P (type
))
3532 REAL_VALUE_TYPE max
, min
;
3533 if (HONOR_INFINITIES (type
))
3536 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3539 real_maxval (&min
, 1, TYPE_MODE (type
));
3540 return build_real (type
, min
);
3542 else if (POINTER_TYPE_P (type
))
3545 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3546 return wide_int_to_tree (type
, min
);
3550 gcc_assert (INTEGRAL_TYPE_P (type
));
3551 return TYPE_MIN_VALUE (type
);
3555 if (SCALAR_FLOAT_TYPE_P (type
))
3557 REAL_VALUE_TYPE max
;
3558 if (HONOR_INFINITIES (type
))
3561 real_maxval (&max
, 0, TYPE_MODE (type
));
3562 return build_real (type
, max
);
3564 else if (POINTER_TYPE_P (type
))
3567 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3568 return wide_int_to_tree (type
, max
);
3572 gcc_assert (INTEGRAL_TYPE_P (type
));
3573 return TYPE_MAX_VALUE (type
);
3581 /* Construct the initialization value for reduction CLAUSE. */
3584 omp_reduction_init (tree clause
, tree type
)
3586 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
3587 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
3590 /* Return alignment to be assumed for var in CLAUSE, which should be
3591 OMP_CLAUSE_ALIGNED. */
3594 omp_clause_aligned_alignment (tree clause
)
3596 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
3597 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
3599 /* Otherwise return implementation defined alignment. */
3600 unsigned int al
= 1;
3601 opt_scalar_mode mode_iter
;
3602 auto_vector_sizes sizes
;
3603 targetm
.vectorize
.autovectorize_vector_sizes (&sizes
);
3605 for (unsigned int i
= 0; i
< sizes
.length (); ++i
)
3606 vs
= ordered_max (vs
, sizes
[i
]);
3607 static enum mode_class classes
[]
3608 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
3609 for (int i
= 0; i
< 4; i
+= 2)
3610 /* The for loop above dictates that we only walk through scalar classes. */
3611 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
3613 scalar_mode mode
= mode_iter
.require ();
3614 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
3615 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
3617 while (maybe_ne (vs
, 0U)
3618 && known_lt (GET_MODE_SIZE (vmode
), vs
)
3619 && GET_MODE_2XWIDER_MODE (vmode
).exists ())
3620 vmode
= GET_MODE_2XWIDER_MODE (vmode
).require ();
3622 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3623 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
3625 poly_uint64 nelts
= exact_div (GET_MODE_SIZE (vmode
),
3626 GET_MODE_SIZE (mode
));
3627 type
= build_vector_type (type
, nelts
);
3628 if (TYPE_MODE (type
) != vmode
)
3630 if (TYPE_ALIGN_UNIT (type
) > al
)
3631 al
= TYPE_ALIGN_UNIT (type
);
3633 return build_int_cst (integer_type_node
, al
);
3637 /* This structure is part of the interface between lower_rec_simd_input_clauses
3638 and lower_rec_input_clauses. */
3640 struct omplow_simd_context
{
3641 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3644 vec
<tree
, va_heap
> simt_eargs
;
3645 gimple_seq simt_dlist
;
3646 poly_uint64_pod max_vf
;
3650 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3654 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
3655 omplow_simd_context
*sctx
, tree
&ivar
, tree
&lvar
)
3657 if (known_eq (sctx
->max_vf
, 0U))
3659 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
3660 if (maybe_gt (sctx
->max_vf
, 1U))
3662 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3663 OMP_CLAUSE_SAFELEN
);
3666 poly_uint64 safe_len
;
3667 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
3668 || maybe_lt (safe_len
, 1U))
3671 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
3674 if (maybe_gt (sctx
->max_vf
, 1U))
3676 sctx
->idx
= create_tmp_var (unsigned_type_node
);
3677 sctx
->lane
= create_tmp_var (unsigned_type_node
);
3680 if (known_eq (sctx
->max_vf
, 1U))
3685 if (is_gimple_reg (new_var
))
3687 ivar
= lvar
= new_var
;
3690 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
3691 ivar
= lvar
= create_tmp_var (type
);
3692 TREE_ADDRESSABLE (ivar
) = 1;
3693 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
3694 NULL
, DECL_ATTRIBUTES (ivar
));
3695 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
3696 tree clobber
= build_constructor (type
, NULL
);
3697 TREE_THIS_VOLATILE (clobber
) = 1;
3698 gimple
*g
= gimple_build_assign (ivar
, clobber
);
3699 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
3703 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
3704 tree avar
= create_tmp_var_raw (atype
);
3705 if (TREE_ADDRESSABLE (new_var
))
3706 TREE_ADDRESSABLE (avar
) = 1;
3707 DECL_ATTRIBUTES (avar
)
3708 = tree_cons (get_identifier ("omp simd array"), NULL
,
3709 DECL_ATTRIBUTES (avar
));
3710 gimple_add_tmp_var (avar
);
3711 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->idx
,
3712 NULL_TREE
, NULL_TREE
);
3713 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
3714 NULL_TREE
, NULL_TREE
);
3716 if (DECL_P (new_var
))
3718 SET_DECL_VALUE_EXPR (new_var
, lvar
);
3719 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3724 /* Helper function of lower_rec_input_clauses. For a reference
3725 in simd reduction, add an underlying variable it will reference. */
3728 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
3730 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
3731 if (TREE_CONSTANT (z
))
3733 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
3734 get_name (new_vard
));
3735 gimple_add_tmp_var (z
);
3736 TREE_ADDRESSABLE (z
) = 1;
3737 z
= build_fold_addr_expr_loc (loc
, z
);
3738 gimplify_assign (new_vard
, z
, ilist
);
3742 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3743 code to emit (type) (tskred_temp[idx]). */
3746 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
3749 unsigned HOST_WIDE_INT sz
3750 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
3751 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
3752 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
3754 tree v
= create_tmp_var (pointer_sized_int_node
);
3755 gimple
*g
= gimple_build_assign (v
, r
);
3756 gimple_seq_add_stmt (ilist
, g
);
3757 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
3759 v
= create_tmp_var (type
);
3760 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
3761 gimple_seq_add_stmt (ilist
, g
);
3766 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3767 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3768 private variables. Initialization statements go in ILIST, while calls
3769 to destructors go in DLIST. */
3772 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
3773 omp_context
*ctx
, struct omp_for_data
*fd
)
3775 tree c
, dtor
, copyin_seq
, x
, ptr
;
3776 bool copyin_by_ref
= false;
3777 bool lastprivate_firstprivate
= false;
3778 bool reduction_omp_orig_ref
= false;
3780 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3781 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
);
3782 omplow_simd_context sctx
= omplow_simd_context ();
3783 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
3784 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
3785 gimple_seq llist
[3] = { };
3788 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
3790 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3791 with data sharing clauses referencing variable sized vars. That
3792 is unnecessarily hard to support and very unlikely to result in
3793 vectorized code anyway. */
3795 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3796 switch (OMP_CLAUSE_CODE (c
))
3798 case OMP_CLAUSE_LINEAR
:
3799 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
3802 case OMP_CLAUSE_PRIVATE
:
3803 case OMP_CLAUSE_FIRSTPRIVATE
:
3804 case OMP_CLAUSE_LASTPRIVATE
:
3805 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
3808 case OMP_CLAUSE_REDUCTION
:
3809 case OMP_CLAUSE_IN_REDUCTION
:
3810 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
3811 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
3818 /* Add a placeholder for simduid. */
3819 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
3820 sctx
.simt_eargs
.safe_push (NULL_TREE
);
3822 unsigned task_reduction_cnt
= 0;
3823 unsigned task_reduction_cntorig
= 0;
3824 unsigned task_reduction_cnt_full
= 0;
3825 unsigned task_reduction_cntorig_full
= 0;
3826 unsigned task_reduction_other_cnt
= 0;
3827 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
3828 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
3829 /* Do all the fixed sized types in the first pass, and the variable sized
3830 types in the second pass. This makes sure that the scalar arguments to
3831 the variable sized types are processed before we use them in the
3832 variable sized operations. For task reductions we use 4 passes, in the
3833 first two we ignore them, in the third one gather arguments for
3834 GOMP_task_reduction_remap call and in the last pass actually handle
3835 the task reductions. */
3836 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
3839 if (pass
== 2 && task_reduction_cnt
)
3842 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
3843 + task_reduction_cntorig
);
3844 tskred_avar
= create_tmp_var_raw (tskred_atype
);
3845 gimple_add_tmp_var (tskred_avar
);
3846 TREE_ADDRESSABLE (tskred_avar
) = 1;
3847 task_reduction_cnt_full
= task_reduction_cnt
;
3848 task_reduction_cntorig_full
= task_reduction_cntorig
;
3850 else if (pass
== 3 && task_reduction_cnt
)
3852 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
3854 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
3855 size_int (task_reduction_cntorig
),
3856 build_fold_addr_expr (tskred_avar
));
3857 gimple_seq_add_stmt (ilist
, g
);
3859 if (pass
== 3 && task_reduction_other_cnt
)
3861 /* For reduction clauses, build
3862 tskred_base = (void *) tskred_temp[2]
3863 + omp_get_thread_num () * tskred_temp[1]
3864 or if tskred_temp[1] is known to be constant, that constant
3865 directly. This is the start of the private reduction copy block
3866 for the current thread. */
3867 tree v
= create_tmp_var (integer_type_node
);
3868 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
3869 gimple
*g
= gimple_build_call (x
, 0);
3870 gimple_call_set_lhs (g
, v
);
3871 gimple_seq_add_stmt (ilist
, g
);
3872 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
3873 tskred_temp
= OMP_CLAUSE_DECL (c
);
3874 if (is_taskreg_ctx (ctx
))
3875 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
3876 tree v2
= create_tmp_var (sizetype
);
3877 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
3878 gimple_seq_add_stmt (ilist
, g
);
3879 if (ctx
->task_reductions
[0])
3880 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
3882 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
3883 tree v3
= create_tmp_var (sizetype
);
3884 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
3885 gimple_seq_add_stmt (ilist
, g
);
3886 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
3887 tskred_base
= create_tmp_var (ptr_type_node
);
3888 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
3889 gimple_seq_add_stmt (ilist
, g
);
3891 task_reduction_cnt
= 0;
3892 task_reduction_cntorig
= 0;
3893 task_reduction_other_cnt
= 0;
3894 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3896 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
3899 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
3900 bool task_reduction_p
= false;
3901 bool task_reduction_needs_orig_p
= false;
3902 tree cond
= NULL_TREE
;
3906 case OMP_CLAUSE_PRIVATE
:
3907 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
3910 case OMP_CLAUSE_SHARED
:
3911 /* Ignore shared directives in teams construct inside
3912 of target construct. */
3913 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
3914 && !is_host_teams_ctx (ctx
))
3916 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
3918 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
3919 || is_global_var (OMP_CLAUSE_DECL (c
)));
3922 case OMP_CLAUSE_FIRSTPRIVATE
:
3923 case OMP_CLAUSE_COPYIN
:
3925 case OMP_CLAUSE_LINEAR
:
3926 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
3927 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
3928 lastprivate_firstprivate
= true;
3930 case OMP_CLAUSE_REDUCTION
:
3931 case OMP_CLAUSE_IN_REDUCTION
:
3932 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
3934 task_reduction_p
= true;
3935 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
3937 task_reduction_other_cnt
++;
3942 task_reduction_cnt
++;
3943 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3945 var
= OMP_CLAUSE_DECL (c
);
3946 /* If var is a global variable that isn't privatized
3947 in outer contexts, we don't need to look up the
3948 original address, it is always the address of the
3949 global variable itself. */
3951 || omp_is_reference (var
)
3953 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
3955 task_reduction_needs_orig_p
= true;
3956 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
3957 task_reduction_cntorig
++;
3961 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
3962 reduction_omp_orig_ref
= true;
3964 case OMP_CLAUSE__REDUCTEMP_
:
3965 if (!is_taskreg_ctx (ctx
))
3968 case OMP_CLAUSE__LOOPTEMP_
:
3969 /* Handle _looptemp_/_reductemp_ clauses only on
3974 case OMP_CLAUSE_LASTPRIVATE
:
3975 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
3977 lastprivate_firstprivate
= true;
3978 if (pass
!= 0 || is_taskloop_ctx (ctx
))
3981 /* Even without corresponding firstprivate, if
3982 decl is Fortran allocatable, it needs outer var
3985 && lang_hooks
.decls
.omp_private_outer_ref
3986 (OMP_CLAUSE_DECL (c
)))
3987 lastprivate_firstprivate
= true;
3989 case OMP_CLAUSE_ALIGNED
:
3992 var
= OMP_CLAUSE_DECL (c
);
3993 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
3994 && !is_global_var (var
))
3996 new_var
= maybe_lookup_decl (var
, ctx
);
3997 if (new_var
== NULL_TREE
)
3998 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
3999 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4000 tree alarg
= omp_clause_aligned_alignment (c
);
4001 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4002 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
4003 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4004 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4005 gimplify_and_add (x
, ilist
);
4007 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
4008 && is_global_var (var
))
4010 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
4011 new_var
= lookup_decl (var
, ctx
);
4012 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4013 t
= build_fold_addr_expr_loc (clause_loc
, t
);
4014 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4015 tree alarg
= omp_clause_aligned_alignment (c
);
4016 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4017 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
4018 t
= fold_convert_loc (clause_loc
, ptype
, t
);
4019 x
= create_tmp_var (ptype
);
4020 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
4021 gimplify_and_add (t
, ilist
);
4022 t
= build_simple_mem_ref_loc (clause_loc
, x
);
4023 SET_DECL_VALUE_EXPR (new_var
, t
);
4024 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4031 if (task_reduction_p
!= (pass
>= 2))
4034 new_var
= var
= OMP_CLAUSE_DECL (c
);
4035 if ((c_kind
== OMP_CLAUSE_REDUCTION
4036 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4037 && TREE_CODE (var
) == MEM_REF
)
4039 var
= TREE_OPERAND (var
, 0);
4040 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
4041 var
= TREE_OPERAND (var
, 0);
4042 if (TREE_CODE (var
) == INDIRECT_REF
4043 || TREE_CODE (var
) == ADDR_EXPR
)
4044 var
= TREE_OPERAND (var
, 0);
4045 if (is_variable_sized (var
))
4047 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
4048 var
= DECL_VALUE_EXPR (var
);
4049 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
4050 var
= TREE_OPERAND (var
, 0);
4051 gcc_assert (DECL_P (var
));
4055 if (c_kind
!= OMP_CLAUSE_COPYIN
)
4056 new_var
= lookup_decl (var
, ctx
);
4058 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
4063 /* C/C++ array section reductions. */
4064 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4065 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4066 && var
!= OMP_CLAUSE_DECL (c
))
4071 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
4072 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
4074 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
4076 tree b
= TREE_OPERAND (orig_var
, 1);
4077 b
= maybe_lookup_decl (b
, ctx
);
4080 b
= TREE_OPERAND (orig_var
, 1);
4081 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
4083 if (integer_zerop (bias
))
4087 bias
= fold_convert_loc (clause_loc
,
4088 TREE_TYPE (b
), bias
);
4089 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4090 TREE_TYPE (b
), b
, bias
);
4092 orig_var
= TREE_OPERAND (orig_var
, 0);
4096 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4097 if (is_global_var (out
)
4098 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
4099 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
4100 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
4105 bool by_ref
= use_pointer_for_field (var
, NULL
);
4106 x
= build_receiver_ref (var
, by_ref
, ctx
);
4107 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
4108 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
4110 x
= build_fold_addr_expr (x
);
4112 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
4113 x
= build_simple_mem_ref (x
);
4114 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
4116 if (var
== TREE_OPERAND (orig_var
, 0))
4117 x
= build_fold_addr_expr (x
);
4119 bias
= fold_convert (sizetype
, bias
);
4120 x
= fold_convert (ptr_type_node
, x
);
4121 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
4122 TREE_TYPE (x
), x
, bias
);
4123 unsigned cnt
= task_reduction_cnt
- 1;
4124 if (!task_reduction_needs_orig_p
)
4125 cnt
+= (task_reduction_cntorig_full
4126 - task_reduction_cntorig
);
4128 cnt
= task_reduction_cntorig
- 1;
4129 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4130 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4131 gimplify_assign (r
, x
, ilist
);
4135 if (TREE_CODE (orig_var
) == INDIRECT_REF
4136 || TREE_CODE (orig_var
) == ADDR_EXPR
)
4137 orig_var
= TREE_OPERAND (orig_var
, 0);
4138 tree d
= OMP_CLAUSE_DECL (c
);
4139 tree type
= TREE_TYPE (d
);
4140 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
4141 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
4142 const char *name
= get_name (orig_var
);
4145 tree xv
= create_tmp_var (ptr_type_node
);
4146 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4148 unsigned cnt
= task_reduction_cnt
- 1;
4149 if (!task_reduction_needs_orig_p
)
4150 cnt
+= (task_reduction_cntorig_full
4151 - task_reduction_cntorig
);
4153 cnt
= task_reduction_cntorig
- 1;
4154 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4155 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4157 gimple
*g
= gimple_build_assign (xv
, x
);
4158 gimple_seq_add_stmt (ilist
, g
);
4162 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4164 if (ctx
->task_reductions
[1 + idx
])
4165 off
= fold_convert (sizetype
,
4166 ctx
->task_reductions
[1 + idx
]);
4168 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4170 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
4172 gimple_seq_add_stmt (ilist
, g
);
4174 x
= fold_convert (build_pointer_type (boolean_type_node
),
4176 if (TREE_CONSTANT (v
))
4177 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
4178 TYPE_SIZE_UNIT (type
));
4181 tree t
= maybe_lookup_decl (v
, ctx
);
4185 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4186 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
4188 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4190 build_int_cst (TREE_TYPE (v
), 1));
4191 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4193 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4194 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4196 cond
= create_tmp_var (TREE_TYPE (x
));
4197 gimplify_assign (cond
, x
, ilist
);
4200 else if (TREE_CONSTANT (v
))
4202 x
= create_tmp_var_raw (type
, name
);
4203 gimple_add_tmp_var (x
);
4204 TREE_ADDRESSABLE (x
) = 1;
4205 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4210 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4211 tree t
= maybe_lookup_decl (v
, ctx
);
4215 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4216 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
4217 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4219 build_int_cst (TREE_TYPE (v
), 1));
4220 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4222 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4223 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
4224 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
4227 tree ptype
= build_pointer_type (TREE_TYPE (type
));
4228 x
= fold_convert_loc (clause_loc
, ptype
, x
);
4229 tree y
= create_tmp_var (ptype
, name
);
4230 gimplify_assign (y
, x
, ilist
);
4234 if (!integer_zerop (bias
))
4236 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4238 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4240 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
4241 pointer_sized_int_node
, yb
, bias
);
4242 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
4243 yb
= create_tmp_var (ptype
, name
);
4244 gimplify_assign (yb
, x
, ilist
);
4248 d
= TREE_OPERAND (d
, 0);
4249 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
4250 d
= TREE_OPERAND (d
, 0);
4251 if (TREE_CODE (d
) == ADDR_EXPR
)
4253 if (orig_var
!= var
)
4255 gcc_assert (is_variable_sized (orig_var
));
4256 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
4258 gimplify_assign (new_var
, x
, ilist
);
4259 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
4260 tree t
= build_fold_indirect_ref (new_var
);
4261 DECL_IGNORED_P (new_var
) = 0;
4262 TREE_THIS_NOTRAP (t
) = 1;
4263 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
4264 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
4268 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
4269 build_int_cst (ptype
, 0));
4270 SET_DECL_VALUE_EXPR (new_var
, x
);
4271 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4276 gcc_assert (orig_var
== var
);
4277 if (TREE_CODE (d
) == INDIRECT_REF
)
4279 x
= create_tmp_var (ptype
, name
);
4280 TREE_ADDRESSABLE (x
) = 1;
4281 gimplify_assign (x
, yb
, ilist
);
4282 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4284 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4285 gimplify_assign (new_var
, x
, ilist
);
4287 /* GOMP_taskgroup_reduction_register memsets the whole
4288 array to zero. If the initializer is zero, we don't
4289 need to initialize it again, just mark it as ever
4290 used unconditionally, i.e. cond = true. */
4292 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
4293 && initializer_zerop (omp_reduction_init (c
,
4296 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
4298 gimple_seq_add_stmt (ilist
, g
);
4301 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4305 if (!is_parallel_ctx (ctx
))
4307 tree condv
= create_tmp_var (boolean_type_node
);
4308 g
= gimple_build_assign (condv
,
4309 build_simple_mem_ref (cond
));
4310 gimple_seq_add_stmt (ilist
, g
);
4311 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
4312 g
= gimple_build_cond (NE_EXPR
, condv
,
4313 boolean_false_node
, end
, lab1
);
4314 gimple_seq_add_stmt (ilist
, g
);
4315 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
4317 g
= gimple_build_assign (build_simple_mem_ref (cond
),
4319 gimple_seq_add_stmt (ilist
, g
);
4322 tree y1
= create_tmp_var (ptype
);
4323 gimplify_assign (y1
, y
, ilist
);
4324 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
4325 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
4326 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
4327 if (task_reduction_needs_orig_p
)
4329 y3
= create_tmp_var (ptype
);
4331 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4332 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4333 size_int (task_reduction_cnt_full
4334 + task_reduction_cntorig
- 1),
4335 NULL_TREE
, NULL_TREE
);
4338 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4339 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
4342 gimplify_assign (y3
, ref
, ilist
);
4344 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
4348 y2
= create_tmp_var (ptype
);
4349 gimplify_assign (y2
, y
, ilist
);
4351 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4353 tree ref
= build_outer_var_ref (var
, ctx
);
4354 /* For ref build_outer_var_ref already performs this. */
4355 if (TREE_CODE (d
) == INDIRECT_REF
)
4356 gcc_assert (omp_is_reference (var
));
4357 else if (TREE_CODE (d
) == ADDR_EXPR
)
4358 ref
= build_fold_addr_expr (ref
);
4359 else if (omp_is_reference (var
))
4360 ref
= build_fold_addr_expr (ref
);
4361 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
4362 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
4363 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4365 y3
= create_tmp_var (ptype
);
4366 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
4370 y4
= create_tmp_var (ptype
);
4371 gimplify_assign (y4
, ref
, dlist
);
4375 tree i
= create_tmp_var (TREE_TYPE (v
));
4376 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
4377 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4378 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
4381 i2
= create_tmp_var (TREE_TYPE (v
));
4382 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
4383 body2
= create_artificial_label (UNKNOWN_LOCATION
);
4384 end2
= create_artificial_label (UNKNOWN_LOCATION
);
4385 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
4387 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4389 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4390 tree decl_placeholder
4391 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
4392 SET_DECL_VALUE_EXPR (decl_placeholder
,
4393 build_simple_mem_ref (y1
));
4394 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
4395 SET_DECL_VALUE_EXPR (placeholder
,
4396 y3
? build_simple_mem_ref (y3
)
4398 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4399 x
= lang_hooks
.decls
.omp_clause_default_ctor
4400 (c
, build_simple_mem_ref (y1
),
4401 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
4403 gimplify_and_add (x
, ilist
);
4404 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4406 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4407 lower_omp (&tseq
, ctx
);
4408 gimple_seq_add_seq (ilist
, tseq
);
4410 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4413 SET_DECL_VALUE_EXPR (decl_placeholder
,
4414 build_simple_mem_ref (y2
));
4415 SET_DECL_VALUE_EXPR (placeholder
,
4416 build_simple_mem_ref (y4
));
4417 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4418 lower_omp (&tseq
, ctx
);
4419 gimple_seq_add_seq (dlist
, tseq
);
4420 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4422 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4423 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
4426 x
= lang_hooks
.decls
.omp_clause_dtor
4427 (c
, build_simple_mem_ref (y2
));
4430 gimple_seq tseq
= NULL
;
4432 gimplify_stmt (&dtor
, &tseq
);
4433 gimple_seq_add_seq (dlist
, tseq
);
4439 x
= omp_reduction_init (c
, TREE_TYPE (type
));
4440 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4442 /* reduction(-:var) sums up the partial results, so it
4443 acts identically to reduction(+:var). */
4444 if (code
== MINUS_EXPR
)
4447 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
4450 x
= build2 (code
, TREE_TYPE (type
),
4451 build_simple_mem_ref (y4
),
4452 build_simple_mem_ref (y2
));
4453 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
4457 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
4458 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4459 gimple_seq_add_stmt (ilist
, g
);
4462 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
4463 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4464 gimple_seq_add_stmt (ilist
, g
);
4466 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
4467 build_int_cst (TREE_TYPE (i
), 1));
4468 gimple_seq_add_stmt (ilist
, g
);
4469 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
4470 gimple_seq_add_stmt (ilist
, g
);
4471 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
4474 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
4475 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4476 gimple_seq_add_stmt (dlist
, g
);
4479 g
= gimple_build_assign
4480 (y4
, POINTER_PLUS_EXPR
, y4
,
4481 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4482 gimple_seq_add_stmt (dlist
, g
);
4484 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
4485 build_int_cst (TREE_TYPE (i2
), 1));
4486 gimple_seq_add_stmt (dlist
, g
);
4487 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
4488 gimple_seq_add_stmt (dlist
, g
);
4489 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
4495 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4499 bool by_ref
= use_pointer_for_field (var
, ctx
);
4500 x
= build_receiver_ref (var
, by_ref
, ctx
);
4502 if (!omp_is_reference (var
))
4503 x
= build_fold_addr_expr (x
);
4504 x
= fold_convert (ptr_type_node
, x
);
4505 unsigned cnt
= task_reduction_cnt
- 1;
4506 if (!task_reduction_needs_orig_p
)
4507 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
4509 cnt
= task_reduction_cntorig
- 1;
4510 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4511 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4512 gimplify_assign (r
, x
, ilist
);
4517 tree type
= TREE_TYPE (new_var
);
4518 if (!omp_is_reference (var
))
4519 type
= build_pointer_type (type
);
4520 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4522 unsigned cnt
= task_reduction_cnt
- 1;
4523 if (!task_reduction_needs_orig_p
)
4524 cnt
+= (task_reduction_cntorig_full
4525 - task_reduction_cntorig
);
4527 cnt
= task_reduction_cntorig
- 1;
4528 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4529 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4533 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4535 if (ctx
->task_reductions
[1 + idx
])
4536 off
= fold_convert (sizetype
,
4537 ctx
->task_reductions
[1 + idx
]);
4539 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4541 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
4544 x
= fold_convert (type
, x
);
4546 if (omp_is_reference (var
))
4548 gimplify_assign (new_var
, x
, ilist
);
4550 new_var
= build_simple_mem_ref (new_var
);
4554 t
= create_tmp_var (type
);
4555 gimplify_assign (t
, x
, ilist
);
4556 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
4557 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4559 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
4560 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
4561 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4562 cond
= create_tmp_var (TREE_TYPE (t
));
4563 gimplify_assign (cond
, t
, ilist
);
4565 else if (is_variable_sized (var
))
4567 /* For variable sized types, we need to allocate the
4568 actual storage here. Call alloca and store the
4569 result in the pointer decl that we created elsewhere. */
4573 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
4578 ptr
= DECL_VALUE_EXPR (new_var
);
4579 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
4580 ptr
= TREE_OPERAND (ptr
, 0);
4581 gcc_assert (DECL_P (ptr
));
4582 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
4584 /* void *tmp = __builtin_alloca */
4585 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4586 stmt
= gimple_build_call (atmp
, 2, x
,
4587 size_int (DECL_ALIGN (var
)));
4588 tmp
= create_tmp_var_raw (ptr_type_node
);
4589 gimple_add_tmp_var (tmp
);
4590 gimple_call_set_lhs (stmt
, tmp
);
4592 gimple_seq_add_stmt (ilist
, stmt
);
4594 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
4595 gimplify_assign (ptr
, x
, ilist
);
4598 else if (omp_is_reference (var
)
4599 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
4600 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
4602 /* For references that are being privatized for Fortran,
4603 allocate new backing storage for the new pointer
4604 variable. This allows us to avoid changing all the
4605 code that expects a pointer to something that expects
4606 a direct variable. */
4610 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
4611 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
4613 x
= build_receiver_ref (var
, false, ctx
);
4614 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4616 else if (TREE_CONSTANT (x
))
4618 /* For reduction in SIMD loop, defer adding the
4619 initialization of the reference, because if we decide
4620 to use SIMD array for it, the initilization could cause
4622 if (c_kind
== OMP_CLAUSE_REDUCTION
&& is_simd
)
4626 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
4628 gimple_add_tmp_var (x
);
4629 TREE_ADDRESSABLE (x
) = 1;
4630 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4636 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4637 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
4638 tree al
= size_int (TYPE_ALIGN (rtype
));
4639 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
4644 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4645 gimplify_assign (new_var
, x
, ilist
);
4648 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4650 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4651 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4652 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4660 switch (OMP_CLAUSE_CODE (c
))
4662 case OMP_CLAUSE_SHARED
:
4663 /* Ignore shared directives in teams construct inside
4664 target construct. */
4665 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4666 && !is_host_teams_ctx (ctx
))
4668 /* Shared global vars are just accessed directly. */
4669 if (is_global_var (new_var
))
4671 /* For taskloop firstprivate/lastprivate, represented
4672 as firstprivate and shared clause on the task, new_var
4673 is the firstprivate var. */
4674 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
4676 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4677 needs to be delayed until after fixup_child_record_type so
4678 that we get the correct type during the dereference. */
4679 by_ref
= use_pointer_for_field (var
, ctx
);
4680 x
= build_receiver_ref (var
, by_ref
, ctx
);
4681 SET_DECL_VALUE_EXPR (new_var
, x
);
4682 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4684 /* ??? If VAR is not passed by reference, and the variable
4685 hasn't been initialized yet, then we'll get a warning for
4686 the store into the omp_data_s structure. Ideally, we'd be
4687 able to notice this and not store anything at all, but
4688 we're generating code too early. Suppress the warning. */
4690 TREE_NO_WARNING (var
) = 1;
4693 case OMP_CLAUSE_LASTPRIVATE
:
4694 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4698 case OMP_CLAUSE_PRIVATE
:
4699 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
4700 x
= build_outer_var_ref (var
, ctx
);
4701 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
4703 if (is_task_ctx (ctx
))
4704 x
= build_receiver_ref (var
, false, ctx
);
4706 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
4712 nx
= lang_hooks
.decls
.omp_clause_default_ctor
4713 (c
, unshare_expr (new_var
), x
);
4716 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4717 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
4718 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
4719 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4723 x
= lang_hooks
.decls
.omp_clause_default_ctor
4724 (c
, unshare_expr (ivar
), x
);
4726 gimplify_and_add (x
, &llist
[0]);
4729 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4732 gimple_seq tseq
= NULL
;
4735 gimplify_stmt (&dtor
, &tseq
);
4736 gimple_seq_add_seq (&llist
[1], tseq
);
4743 gimplify_and_add (nx
, ilist
);
4747 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4750 gimple_seq tseq
= NULL
;
4753 gimplify_stmt (&dtor
, &tseq
);
4754 gimple_seq_add_seq (dlist
, tseq
);
4758 case OMP_CLAUSE_LINEAR
:
4759 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
4760 goto do_firstprivate
;
4761 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4764 x
= build_outer_var_ref (var
, ctx
);
4767 case OMP_CLAUSE_FIRSTPRIVATE
:
4768 if (is_task_ctx (ctx
))
4770 if ((omp_is_reference (var
)
4771 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
4772 || is_variable_sized (var
))
4774 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
4776 || use_pointer_for_field (var
, NULL
))
4778 x
= build_receiver_ref (var
, false, ctx
);
4779 SET_DECL_VALUE_EXPR (new_var
, x
);
4780 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4784 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
4785 && omp_is_reference (var
))
4787 x
= build_outer_var_ref (var
, ctx
);
4788 gcc_assert (TREE_CODE (x
) == MEM_REF
4789 && integer_zerop (TREE_OPERAND (x
, 1)));
4790 x
= TREE_OPERAND (x
, 0);
4791 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4792 (c
, unshare_expr (new_var
), x
);
4793 gimplify_and_add (x
, ilist
);
4797 x
= build_outer_var_ref (var
, ctx
);
4800 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4801 && gimple_omp_for_combined_into_p (ctx
->stmt
))
4803 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4804 tree stept
= TREE_TYPE (t
);
4805 tree ct
= omp_find_clause (clauses
,
4806 OMP_CLAUSE__LOOPTEMP_
);
4808 tree l
= OMP_CLAUSE_DECL (ct
);
4809 tree n1
= fd
->loop
.n1
;
4810 tree step
= fd
->loop
.step
;
4811 tree itype
= TREE_TYPE (l
);
4812 if (POINTER_TYPE_P (itype
))
4813 itype
= signed_type_for (itype
);
4814 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
4815 if (TYPE_UNSIGNED (itype
)
4816 && fd
->loop
.cond_code
== GT_EXPR
)
4817 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
4818 fold_build1 (NEGATE_EXPR
, itype
, l
),
4819 fold_build1 (NEGATE_EXPR
,
4822 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
4823 t
= fold_build2 (MULT_EXPR
, stept
,
4824 fold_convert (stept
, l
), t
);
4826 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4828 x
= lang_hooks
.decls
.omp_clause_linear_ctor
4830 gimplify_and_add (x
, ilist
);
4834 if (POINTER_TYPE_P (TREE_TYPE (x
)))
4835 x
= fold_build2 (POINTER_PLUS_EXPR
,
4836 TREE_TYPE (x
), x
, t
);
4838 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4841 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
4842 || TREE_ADDRESSABLE (new_var
))
4843 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4846 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
4848 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
4849 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
4850 gimplify_and_add (x
, ilist
);
4851 gimple_stmt_iterator gsi
4852 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
4854 = gimple_build_assign (unshare_expr (lvar
), iv
);
4855 gsi_insert_before_without_update (&gsi
, g
,
4857 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4858 enum tree_code code
= PLUS_EXPR
;
4859 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
4860 code
= POINTER_PLUS_EXPR
;
4861 g
= gimple_build_assign (iv
, code
, iv
, t
);
4862 gsi_insert_before_without_update (&gsi
, g
,
4866 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4867 (c
, unshare_expr (ivar
), x
);
4868 gimplify_and_add (x
, &llist
[0]);
4869 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4872 gimple_seq tseq
= NULL
;
4875 gimplify_stmt (&dtor
, &tseq
);
4876 gimple_seq_add_seq (&llist
[1], tseq
);
4881 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4882 (c
, unshare_expr (new_var
), x
);
4883 gimplify_and_add (x
, ilist
);
4886 case OMP_CLAUSE__LOOPTEMP_
:
4887 case OMP_CLAUSE__REDUCTEMP_
:
4888 gcc_assert (is_taskreg_ctx (ctx
));
4889 x
= build_outer_var_ref (var
, ctx
);
4890 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4891 gimplify_and_add (x
, ilist
);
4894 case OMP_CLAUSE_COPYIN
:
4895 by_ref
= use_pointer_for_field (var
, NULL
);
4896 x
= build_receiver_ref (var
, by_ref
, ctx
);
4897 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
4898 append_to_statement_list (x
, ©in_seq
);
4899 copyin_by_ref
|= by_ref
;
4902 case OMP_CLAUSE_REDUCTION
:
4903 case OMP_CLAUSE_IN_REDUCTION
:
4904 /* OpenACC reductions are initialized using the
4905 GOACC_REDUCTION internal function. */
4906 if (is_gimple_omp_oacc (ctx
->stmt
))
4908 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4910 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4912 tree ptype
= TREE_TYPE (placeholder
);
4915 x
= error_mark_node
;
4916 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
4917 && !task_reduction_needs_orig_p
)
4919 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4921 tree pptype
= build_pointer_type (ptype
);
4922 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4923 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4924 size_int (task_reduction_cnt_full
4925 + task_reduction_cntorig
- 1),
4926 NULL_TREE
, NULL_TREE
);
4930 = *ctx
->task_reduction_map
->get (c
);
4931 x
= task_reduction_read (ilist
, tskred_temp
,
4932 pptype
, 7 + 3 * idx
);
4934 x
= fold_convert (pptype
, x
);
4935 x
= build_simple_mem_ref (x
);
4940 x
= build_outer_var_ref (var
, ctx
);
4942 if (omp_is_reference (var
)
4943 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
4944 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4946 SET_DECL_VALUE_EXPR (placeholder
, x
);
4947 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4948 tree new_vard
= new_var
;
4949 if (omp_is_reference (var
))
4951 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4952 new_vard
= TREE_OPERAND (new_var
, 0);
4953 gcc_assert (DECL_P (new_vard
));
4956 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4959 if (new_vard
== new_var
)
4961 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
4962 SET_DECL_VALUE_EXPR (new_var
, ivar
);
4966 SET_DECL_VALUE_EXPR (new_vard
,
4967 build_fold_addr_expr (ivar
));
4968 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4970 x
= lang_hooks
.decls
.omp_clause_default_ctor
4971 (c
, unshare_expr (ivar
),
4972 build_outer_var_ref (var
, ctx
));
4974 gimplify_and_add (x
, &llist
[0]);
4975 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4977 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4978 lower_omp (&tseq
, ctx
);
4979 gimple_seq_add_seq (&llist
[0], tseq
);
4981 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4982 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4983 lower_omp (&tseq
, ctx
);
4984 gimple_seq_add_seq (&llist
[1], tseq
);
4985 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4986 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4987 if (new_vard
== new_var
)
4988 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4990 SET_DECL_VALUE_EXPR (new_vard
,
4991 build_fold_addr_expr (lvar
));
4992 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4997 gimplify_stmt (&dtor
, &tseq
);
4998 gimple_seq_add_seq (&llist
[1], tseq
);
5002 /* If this is a reference to constant size reduction var
5003 with placeholder, we haven't emitted the initializer
5004 for it because it is undesirable if SIMD arrays are used.
5005 But if they aren't used, we need to emit the deferred
5006 initialization now. */
5007 else if (omp_is_reference (var
) && is_simd
)
5008 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5010 tree lab2
= NULL_TREE
;
5014 if (!is_parallel_ctx (ctx
))
5016 tree condv
= create_tmp_var (boolean_type_node
);
5017 tree m
= build_simple_mem_ref (cond
);
5018 g
= gimple_build_assign (condv
, m
);
5019 gimple_seq_add_stmt (ilist
, g
);
5021 = create_artificial_label (UNKNOWN_LOCATION
);
5022 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5023 g
= gimple_build_cond (NE_EXPR
, condv
,
5026 gimple_seq_add_stmt (ilist
, g
);
5027 gimple_seq_add_stmt (ilist
,
5028 gimple_build_label (lab1
));
5030 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5032 gimple_seq_add_stmt (ilist
, g
);
5034 x
= lang_hooks
.decls
.omp_clause_default_ctor
5035 (c
, unshare_expr (new_var
),
5037 : build_outer_var_ref (var
, ctx
));
5039 gimplify_and_add (x
, ilist
);
5040 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5042 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5043 lower_omp (&tseq
, ctx
);
5044 gimple_seq_add_seq (ilist
, tseq
);
5046 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5049 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5050 lower_omp (&tseq
, ctx
);
5051 gimple_seq_add_seq (dlist
, tseq
);
5052 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5054 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5058 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5065 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
5066 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
5067 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5072 tree lab2
= NULL_TREE
;
5073 /* GOMP_taskgroup_reduction_register memsets the whole
5074 array to zero. If the initializer is zero, we don't
5075 need to initialize it again, just mark it as ever
5076 used unconditionally, i.e. cond = true. */
5077 if (initializer_zerop (x
))
5079 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5081 gimple_seq_add_stmt (ilist
, g
);
5086 if (!cond) { cond = true; new_var = x; } */
5087 if (!is_parallel_ctx (ctx
))
5089 tree condv
= create_tmp_var (boolean_type_node
);
5090 tree m
= build_simple_mem_ref (cond
);
5091 g
= gimple_build_assign (condv
, m
);
5092 gimple_seq_add_stmt (ilist
, g
);
5094 = create_artificial_label (UNKNOWN_LOCATION
);
5095 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5096 g
= gimple_build_cond (NE_EXPR
, condv
,
5099 gimple_seq_add_stmt (ilist
, g
);
5100 gimple_seq_add_stmt (ilist
,
5101 gimple_build_label (lab1
));
5103 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5105 gimple_seq_add_stmt (ilist
, g
);
5106 gimplify_assign (new_var
, x
, ilist
);
5108 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5112 /* reduction(-:var) sums up the partial results, so it
5113 acts identically to reduction(+:var). */
5114 if (code
== MINUS_EXPR
)
5117 tree new_vard
= new_var
;
5118 if (is_simd
&& omp_is_reference (var
))
5120 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5121 new_vard
= TREE_OPERAND (new_var
, 0);
5122 gcc_assert (DECL_P (new_vard
));
5125 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5128 tree ref
= build_outer_var_ref (var
, ctx
);
5130 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
5135 simt_lane
= create_tmp_var (unsigned_type_node
);
5136 x
= build_call_expr_internal_loc
5137 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
5138 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
5139 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
5140 gimplify_assign (ivar
, x
, &llist
[2]);
5142 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
5143 ref
= build_outer_var_ref (var
, ctx
);
5144 gimplify_assign (ref
, x
, &llist
[1]);
5146 if (new_vard
!= new_var
)
5148 SET_DECL_VALUE_EXPR (new_vard
,
5149 build_fold_addr_expr (lvar
));
5150 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5155 if (omp_is_reference (var
) && is_simd
)
5156 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5157 gimplify_assign (new_var
, x
, ilist
);
5160 tree ref
= build_outer_var_ref (var
, ctx
);
5162 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
5163 ref
= build_outer_var_ref (var
, ctx
);
5164 gimplify_assign (ref
, x
, dlist
);
5177 tree clobber
= build_constructor (TREE_TYPE (tskred_avar
), NULL
);
5178 TREE_THIS_VOLATILE (clobber
) = 1;
5179 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
5182 if (known_eq (sctx
.max_vf
, 1U))
5183 sctx
.is_simt
= false;
5185 if (sctx
.lane
|| sctx
.is_simt
)
5187 uid
= create_tmp_var (ptr_type_node
, "simduid");
5188 /* Don't want uninit warnings on simduid, it is always uninitialized,
5189 but we use it not for the value, but for the DECL_UID only. */
5190 TREE_NO_WARNING (uid
) = 1;
5191 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
5192 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
5193 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
5194 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
5196 /* Emit calls denoting privatized variables and initializing a pointer to
5197 structure that holds private variables as fields after ompdevlow pass. */
5200 sctx
.simt_eargs
[0] = uid
;
5202 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
5203 gimple_call_set_lhs (g
, uid
);
5204 gimple_seq_add_stmt (ilist
, g
);
5205 sctx
.simt_eargs
.release ();
5207 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
5208 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
5209 gimple_call_set_lhs (g
, simtrec
);
5210 gimple_seq_add_stmt (ilist
, g
);
5215 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 1, uid
);
5216 gimple_call_set_lhs (g
, sctx
.lane
);
5217 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5218 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
5219 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
5220 build_int_cst (unsigned_type_node
, 0));
5221 gimple_seq_add_stmt (ilist
, g
);
5222 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5225 tree simt_vf
= create_tmp_var (unsigned_type_node
);
5226 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
5227 gimple_call_set_lhs (g
, simt_vf
);
5228 gimple_seq_add_stmt (dlist
, g
);
5230 tree t
= build_int_cst (unsigned_type_node
, 1);
5231 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
5232 gimple_seq_add_stmt (dlist
, g
);
5234 t
= build_int_cst (unsigned_type_node
, 0);
5235 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
5236 gimple_seq_add_stmt (dlist
, g
);
5238 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5239 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
5240 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5241 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
5242 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
5244 gimple_seq_add_seq (dlist
, llist
[2]);
5246 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
5247 gimple_seq_add_stmt (dlist
, g
);
5249 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
5250 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
5251 gimple_seq_add_stmt (dlist
, g
);
5253 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
5255 for (int i
= 0; i
< 2; i
++)
5258 tree vf
= create_tmp_var (unsigned_type_node
);
5259 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
5260 gimple_call_set_lhs (g
, vf
);
5261 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
5262 gimple_seq_add_stmt (seq
, g
);
5263 tree t
= build_int_cst (unsigned_type_node
, 0);
5264 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
5265 gimple_seq_add_stmt (seq
, g
);
5266 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5267 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
5268 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5269 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
5270 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
5271 gimple_seq_add_seq (seq
, llist
[i
]);
5272 t
= build_int_cst (unsigned_type_node
, 1);
5273 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
5274 gimple_seq_add_stmt (seq
, g
);
5275 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
5276 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
5277 gimple_seq_add_stmt (seq
, g
);
5278 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
5283 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
5285 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
5286 gimple_seq_add_stmt (dlist
, g
);
5289 /* The copyin sequence is not to be executed by the main thread, since
5290 that would result in self-copies. Perhaps not visible to scalars,
5291 but it certainly is to C++ operator=. */
5294 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
5296 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
5297 build_int_cst (TREE_TYPE (x
), 0));
5298 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
5299 gimplify_and_add (x
, ilist
);
5302 /* If any copyin variable is passed by reference, we must ensure the
5303 master thread doesn't modify it before it is copied over in all
5304 threads. Similarly for variables in both firstprivate and
5305 lastprivate clauses we need to ensure the lastprivate copying
5306 happens after firstprivate copying in all threads. And similarly
5307 for UDRs if initializer expression refers to omp_orig. */
5308 if (copyin_by_ref
|| lastprivate_firstprivate
|| reduction_omp_orig_ref
)
5310 /* Don't add any barrier for #pragma omp simd or
5311 #pragma omp distribute. */
5312 if (!is_task_ctx (ctx
)
5313 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
5314 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
5315 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
5318 /* If max_vf is non-zero, then we can use only a vectorization factor
5319 up to the max_vf we chose. So stick it into the safelen clause. */
5320 if (maybe_ne (sctx
.max_vf
, 0U))
5322 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
5323 OMP_CLAUSE_SAFELEN
);
5324 poly_uint64 safe_len
;
5326 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
5327 && maybe_gt (safe_len
, sctx
.max_vf
)))
5329 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
5330 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
5332 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
5333 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
5339 /* Generate code to implement the LASTPRIVATE clauses. This is used for
5340 both parallel and workshare constructs. PREDICATE may be NULL if it's
5344 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*stmt_list
,
5347 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
5348 bool par_clauses
= false;
5349 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
5351 /* Early exit if there are no lastprivate or linear clauses. */
5352 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
5353 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
5354 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
5355 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
5357 if (clauses
== NULL
)
5359 /* If this was a workshare clause, see if it had been combined
5360 with its parallel. In that case, look for the clauses on the
5361 parallel statement itself. */
5362 if (is_parallel_ctx (ctx
))
5366 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
5369 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
5370 OMP_CLAUSE_LASTPRIVATE
);
5371 if (clauses
== NULL
)
5376 bool maybe_simt
= false;
5377 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5378 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
5380 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
5381 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
5383 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
5389 tree label_true
, arm1
, arm2
;
5390 enum tree_code pred_code
= TREE_CODE (predicate
);
5392 label
= create_artificial_label (UNKNOWN_LOCATION
);
5393 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
5394 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
5396 arm1
= TREE_OPERAND (predicate
, 0);
5397 arm2
= TREE_OPERAND (predicate
, 1);
5398 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
5399 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
5404 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
5405 arm2
= boolean_false_node
;
5406 pred_code
= NE_EXPR
;
5410 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
5411 c
= fold_convert (integer_type_node
, c
);
5412 simtcond
= create_tmp_var (integer_type_node
);
5413 gimplify_assign (simtcond
, c
, stmt_list
);
5414 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
5416 c
= create_tmp_var (integer_type_node
);
5417 gimple_call_set_lhs (g
, c
);
5418 gimple_seq_add_stmt (stmt_list
, g
);
5419 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
5423 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
5424 gimple_seq_add_stmt (stmt_list
, stmt
);
5425 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
5428 for (c
= clauses
; c
;)
5431 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5433 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5434 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5435 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
5437 var
= OMP_CLAUSE_DECL (c
);
5438 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5439 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
5440 && is_taskloop_ctx (ctx
))
5442 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
5443 new_var
= lookup_decl (var
, ctx
->outer
);
5447 new_var
= lookup_decl (var
, ctx
);
5448 /* Avoid uninitialized warnings for lastprivate and
5449 for linear iterators. */
5451 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5452 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
5453 TREE_NO_WARNING (new_var
) = 1;
5456 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
5458 tree val
= DECL_VALUE_EXPR (new_var
);
5459 if (TREE_CODE (val
) == ARRAY_REF
5460 && VAR_P (TREE_OPERAND (val
, 0))
5461 && lookup_attribute ("omp simd array",
5462 DECL_ATTRIBUTES (TREE_OPERAND (val
,
5465 if (lastlane
== NULL
)
5467 lastlane
= create_tmp_var (unsigned_type_node
);
5469 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
5471 TREE_OPERAND (val
, 1));
5472 gimple_call_set_lhs (g
, lastlane
);
5473 gimple_seq_add_stmt (stmt_list
, g
);
5475 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
5476 TREE_OPERAND (val
, 0), lastlane
,
5477 NULL_TREE
, NULL_TREE
);
5480 else if (maybe_simt
)
5482 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
5483 ? DECL_VALUE_EXPR (new_var
)
5485 if (simtlast
== NULL
)
5487 simtlast
= create_tmp_var (unsigned_type_node
);
5488 gcall
*g
= gimple_build_call_internal
5489 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
5490 gimple_call_set_lhs (g
, simtlast
);
5491 gimple_seq_add_stmt (stmt_list
, g
);
5493 x
= build_call_expr_internal_loc
5494 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
5495 TREE_TYPE (val
), 2, val
, simtlast
);
5496 new_var
= unshare_expr (new_var
);
5497 gimplify_assign (new_var
, x
, stmt_list
);
5498 new_var
= unshare_expr (new_var
);
5501 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5502 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
5504 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
5505 gimple_seq_add_seq (stmt_list
,
5506 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
5507 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
5509 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5510 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
5512 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
5513 gimple_seq_add_seq (stmt_list
,
5514 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
5515 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
5519 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5520 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
5522 gcc_checking_assert (is_taskloop_ctx (ctx
));
5523 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
5525 if (is_global_var (ovar
))
5529 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
5530 if (omp_is_reference (var
))
5531 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5532 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
5533 gimplify_and_add (x
, stmt_list
);
5535 c
= OMP_CLAUSE_CHAIN (c
);
5536 if (c
== NULL
&& !par_clauses
)
5538 /* If this was a workshare clause, see if it had been combined
5539 with its parallel. In that case, continue looking for the
5540 clauses also on the parallel statement itself. */
5541 if (is_parallel_ctx (ctx
))
5545 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
5548 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
5549 OMP_CLAUSE_LASTPRIVATE
);
5555 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
5558 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
5559 (which might be a placeholder). INNER is true if this is an inner
5560 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
5561 join markers. Generate the before-loop forking sequence in
5562 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
5563 general form of these sequences is
5565 GOACC_REDUCTION_SETUP
5567 GOACC_REDUCTION_INIT
5569 GOACC_REDUCTION_FINI
5571 GOACC_REDUCTION_TEARDOWN. */
5574 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
5575 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
5576 gimple_seq
*join_seq
, omp_context
*ctx
)
5578 gimple_seq before_fork
= NULL
;
5579 gimple_seq after_fork
= NULL
;
5580 gimple_seq before_join
= NULL
;
5581 gimple_seq after_join
= NULL
;
5582 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
5583 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
5584 unsigned offset
= 0;
5586 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5587 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5589 tree orig
= OMP_CLAUSE_DECL (c
);
5590 tree var
= maybe_lookup_decl (orig
, ctx
);
5591 tree ref_to_res
= NULL_TREE
;
5592 tree incoming
, outgoing
, v1
, v2
, v3
;
5593 bool is_private
= false;
5595 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
5596 if (rcode
== MINUS_EXPR
)
5598 else if (rcode
== TRUTH_ANDIF_EXPR
)
5599 rcode
= BIT_AND_EXPR
;
5600 else if (rcode
== TRUTH_ORIF_EXPR
)
5601 rcode
= BIT_IOR_EXPR
;
5602 tree op
= build_int_cst (unsigned_type_node
, rcode
);
5607 incoming
= outgoing
= var
;
5611 /* See if an outer construct also reduces this variable. */
5612 omp_context
*outer
= ctx
;
5614 while (omp_context
*probe
= outer
->outer
)
5616 enum gimple_code type
= gimple_code (probe
->stmt
);
5621 case GIMPLE_OMP_FOR
:
5622 cls
= gimple_omp_for_clauses (probe
->stmt
);
5625 case GIMPLE_OMP_TARGET
:
5626 if (gimple_omp_target_kind (probe
->stmt
)
5627 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
5630 cls
= gimple_omp_target_clauses (probe
->stmt
);
5638 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
5639 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
5640 && orig
== OMP_CLAUSE_DECL (cls
))
5642 incoming
= outgoing
= lookup_decl (orig
, probe
);
5643 goto has_outer_reduction
;
5645 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
5646 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
5647 && orig
== OMP_CLAUSE_DECL (cls
))
5655 /* This is the outermost construct with this reduction,
5656 see if there's a mapping for it. */
5657 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
5658 && maybe_lookup_field (orig
, outer
) && !is_private
)
5660 ref_to_res
= build_receiver_ref (orig
, false, outer
);
5661 if (omp_is_reference (orig
))
5662 ref_to_res
= build_simple_mem_ref (ref_to_res
);
5664 tree type
= TREE_TYPE (var
);
5665 if (POINTER_TYPE_P (type
))
5666 type
= TREE_TYPE (type
);
5669 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
5673 /* Try to look at enclosing contexts for reduction var,
5674 use original if no mapping found. */
5676 omp_context
*c
= ctx
->outer
;
5679 t
= maybe_lookup_decl (orig
, c
);
5682 incoming
= outgoing
= (t
? t
: orig
);
5685 has_outer_reduction
:;
5689 ref_to_res
= integer_zero_node
;
5691 if (omp_is_reference (orig
))
5693 tree type
= TREE_TYPE (var
);
5694 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
5698 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
5699 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
5702 v1
= create_tmp_var (type
, id
);
5703 v2
= create_tmp_var (type
, id
);
5704 v3
= create_tmp_var (type
, id
);
5706 gimplify_assign (v1
, var
, fork_seq
);
5707 gimplify_assign (v2
, var
, fork_seq
);
5708 gimplify_assign (v3
, var
, fork_seq
);
5710 var
= build_simple_mem_ref (var
);
5711 v1
= build_simple_mem_ref (v1
);
5712 v2
= build_simple_mem_ref (v2
);
5713 v3
= build_simple_mem_ref (v3
);
5714 outgoing
= build_simple_mem_ref (outgoing
);
5716 if (!TREE_CONSTANT (incoming
))
5717 incoming
= build_simple_mem_ref (incoming
);
5722 /* Determine position in reduction buffer, which may be used
5723 by target. The parser has ensured that this is not a
5724 variable-sized type. */
5725 fixed_size_mode mode
5726 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
5727 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
5728 offset
= (offset
+ align
- 1) & ~(align
- 1);
5729 tree off
= build_int_cst (sizetype
, offset
);
5730 offset
+= GET_MODE_SIZE (mode
);
5734 init_code
= build_int_cst (integer_type_node
,
5735 IFN_GOACC_REDUCTION_INIT
);
5736 fini_code
= build_int_cst (integer_type_node
,
5737 IFN_GOACC_REDUCTION_FINI
);
5738 setup_code
= build_int_cst (integer_type_node
,
5739 IFN_GOACC_REDUCTION_SETUP
);
5740 teardown_code
= build_int_cst (integer_type_node
,
5741 IFN_GOACC_REDUCTION_TEARDOWN
);
5745 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5746 TREE_TYPE (var
), 6, setup_code
,
5747 unshare_expr (ref_to_res
),
5748 incoming
, level
, op
, off
);
5750 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5751 TREE_TYPE (var
), 6, init_code
,
5752 unshare_expr (ref_to_res
),
5753 v1
, level
, op
, off
);
5755 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5756 TREE_TYPE (var
), 6, fini_code
,
5757 unshare_expr (ref_to_res
),
5758 v2
, level
, op
, off
);
5760 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
5761 TREE_TYPE (var
), 6, teardown_code
,
5762 ref_to_res
, v3
, level
, op
, off
);
5764 gimplify_assign (v1
, setup_call
, &before_fork
);
5765 gimplify_assign (v2
, init_call
, &after_fork
);
5766 gimplify_assign (v3
, fini_call
, &before_join
);
5767 gimplify_assign (outgoing
, teardown_call
, &after_join
);
5770 /* Now stitch things together. */
5771 gimple_seq_add_seq (fork_seq
, before_fork
);
5773 gimple_seq_add_stmt (fork_seq
, fork
);
5774 gimple_seq_add_seq (fork_seq
, after_fork
);
5776 gimple_seq_add_seq (join_seq
, before_join
);
5778 gimple_seq_add_stmt (join_seq
, join
);
5779 gimple_seq_add_seq (join_seq
, after_join
);
5782 /* Generate code to implement the REDUCTION clauses. */
5785 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
, omp_context
*ctx
)
5787 gimple_seq sub_seq
= NULL
;
5792 /* OpenACC loop reductions are handled elsewhere. */
5793 if (is_gimple_omp_oacc (ctx
->stmt
))
5796 /* SIMD reductions are handled in lower_rec_input_clauses. */
5797 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5798 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
5801 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5802 update in that case, otherwise use a lock. */
5803 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
5804 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5805 && !OMP_CLAUSE_REDUCTION_TASK (c
))
5807 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5808 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5810 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5820 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5822 tree var
, ref
, new_var
, orig_var
;
5823 enum tree_code code
;
5824 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5826 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
5827 || OMP_CLAUSE_REDUCTION_TASK (c
))
5830 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
5831 orig_var
= var
= OMP_CLAUSE_DECL (c
);
5832 if (TREE_CODE (var
) == MEM_REF
)
5834 var
= TREE_OPERAND (var
, 0);
5835 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5836 var
= TREE_OPERAND (var
, 0);
5837 if (TREE_CODE (var
) == ADDR_EXPR
)
5838 var
= TREE_OPERAND (var
, 0);
5841 /* If this is a pointer or referenced based array
5842 section, the var could be private in the outer
5843 context e.g. on orphaned loop construct. Pretend this
5844 is private variable's outer reference. */
5845 ccode
= OMP_CLAUSE_PRIVATE
;
5846 if (TREE_CODE (var
) == INDIRECT_REF
)
5847 var
= TREE_OPERAND (var
, 0);
5850 if (is_variable_sized (var
))
5852 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5853 var
= DECL_VALUE_EXPR (var
);
5854 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5855 var
= TREE_OPERAND (var
, 0);
5856 gcc_assert (DECL_P (var
));
5859 new_var
= lookup_decl (var
, ctx
);
5860 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
5861 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5862 ref
= build_outer_var_ref (var
, ctx
, ccode
);
5863 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5865 /* reduction(-:var) sums up the partial results, so it acts
5866 identically to reduction(+:var). */
5867 if (code
== MINUS_EXPR
)
5872 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
5874 addr
= save_expr (addr
);
5875 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
5876 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
5877 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
5878 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
5879 gimplify_and_add (x
, stmt_seqp
);
5882 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
5884 tree d
= OMP_CLAUSE_DECL (c
);
5885 tree type
= TREE_TYPE (d
);
5886 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5887 tree i
= create_tmp_var (TREE_TYPE (v
));
5888 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5889 tree bias
= TREE_OPERAND (d
, 1);
5890 d
= TREE_OPERAND (d
, 0);
5891 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5893 tree b
= TREE_OPERAND (d
, 1);
5894 b
= maybe_lookup_decl (b
, ctx
);
5897 b
= TREE_OPERAND (d
, 1);
5898 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5900 if (integer_zerop (bias
))
5904 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
5905 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5906 TREE_TYPE (b
), b
, bias
);
5908 d
= TREE_OPERAND (d
, 0);
5910 /* For ref build_outer_var_ref already performs this, so
5911 only new_var needs a dereference. */
5912 if (TREE_CODE (d
) == INDIRECT_REF
)
5914 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5915 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
5917 else if (TREE_CODE (d
) == ADDR_EXPR
)
5919 if (orig_var
== var
)
5921 new_var
= build_fold_addr_expr (new_var
);
5922 ref
= build_fold_addr_expr (ref
);
5927 gcc_assert (orig_var
== var
);
5928 if (omp_is_reference (var
))
5929 ref
= build_fold_addr_expr (ref
);
5933 tree t
= maybe_lookup_decl (v
, ctx
);
5937 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5938 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
5940 if (!integer_zerop (bias
))
5942 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
5943 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5944 TREE_TYPE (new_var
), new_var
,
5945 unshare_expr (bias
));
5946 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5947 TREE_TYPE (ref
), ref
, bias
);
5949 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
5950 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5951 tree m
= create_tmp_var (ptype
);
5952 gimplify_assign (m
, new_var
, stmt_seqp
);
5954 m
= create_tmp_var (ptype
);
5955 gimplify_assign (m
, ref
, stmt_seqp
);
5957 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
5958 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5959 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5960 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
5961 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5962 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
5963 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5965 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5966 tree decl_placeholder
5967 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5968 SET_DECL_VALUE_EXPR (placeholder
, out
);
5969 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5970 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
5971 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5972 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
5973 gimple_seq_add_seq (&sub_seq
,
5974 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
5975 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5976 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
5977 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
5981 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
5982 out
= unshare_expr (out
);
5983 gimplify_assign (out
, x
, &sub_seq
);
5985 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
5986 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5987 gimple_seq_add_stmt (&sub_seq
, g
);
5988 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
5989 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5990 gimple_seq_add_stmt (&sub_seq
, g
);
5991 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5992 build_int_cst (TREE_TYPE (i
), 1));
5993 gimple_seq_add_stmt (&sub_seq
, g
);
5994 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5995 gimple_seq_add_stmt (&sub_seq
, g
);
5996 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
5998 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6000 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6002 if (omp_is_reference (var
)
6003 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
6005 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
6006 SET_DECL_VALUE_EXPR (placeholder
, ref
);
6007 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6008 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
6009 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
6010 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6011 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
6015 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
6016 ref
= build_outer_var_ref (var
, ctx
);
6017 gimplify_assign (ref
, x
, &sub_seq
);
6021 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
6023 gimple_seq_add_stmt (stmt_seqp
, stmt
);
6025 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
6027 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
6029 gimple_seq_add_stmt (stmt_seqp
, stmt
);
6033 /* Generate code to implement the COPYPRIVATE clauses. */
6036 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
6041 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6043 tree var
, new_var
, ref
, x
;
6045 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6047 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
6050 var
= OMP_CLAUSE_DECL (c
);
6051 by_ref
= use_pointer_for_field (var
, NULL
);
6053 ref
= build_sender_ref (var
, ctx
);
6054 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
6057 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
6058 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
6060 gimplify_assign (ref
, x
, slist
);
6062 ref
= build_receiver_ref (var
, false, ctx
);
6065 ref
= fold_convert_loc (clause_loc
,
6066 build_pointer_type (TREE_TYPE (new_var
)),
6068 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
6070 if (omp_is_reference (var
))
6072 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
6073 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
6074 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6076 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
6077 gimplify_and_add (x
, rlist
);
6082 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6083 and REDUCTION from the sender (aka parent) side. */
6086 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
6090 int ignored_looptemp
= 0;
6091 bool is_taskloop
= false;
6093 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6094 by GOMP_taskloop. */
6095 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
6097 ignored_looptemp
= 2;
6101 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6103 tree val
, ref
, x
, var
;
6104 bool by_ref
, do_in
= false, do_out
= false;
6105 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6107 switch (OMP_CLAUSE_CODE (c
))
6109 case OMP_CLAUSE_PRIVATE
:
6110 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
6113 case OMP_CLAUSE_FIRSTPRIVATE
:
6114 case OMP_CLAUSE_COPYIN
:
6115 case OMP_CLAUSE_LASTPRIVATE
:
6116 case OMP_CLAUSE_IN_REDUCTION
:
6117 case OMP_CLAUSE__REDUCTEMP_
:
6119 case OMP_CLAUSE_REDUCTION
:
6120 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
6123 case OMP_CLAUSE_SHARED
:
6124 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
6127 case OMP_CLAUSE__LOOPTEMP_
:
6128 if (ignored_looptemp
)
6138 val
= OMP_CLAUSE_DECL (c
);
6139 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6140 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
6141 && TREE_CODE (val
) == MEM_REF
)
6143 val
= TREE_OPERAND (val
, 0);
6144 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
6145 val
= TREE_OPERAND (val
, 0);
6146 if (TREE_CODE (val
) == INDIRECT_REF
6147 || TREE_CODE (val
) == ADDR_EXPR
)
6148 val
= TREE_OPERAND (val
, 0);
6149 if (is_variable_sized (val
))
6153 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6154 outer taskloop region. */
6155 omp_context
*ctx_for_o
= ctx
;
6157 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
6158 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
6159 ctx_for_o
= ctx
->outer
;
6161 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
6163 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
6164 && is_global_var (var
)
6165 && (val
== OMP_CLAUSE_DECL (c
)
6166 || !is_task_ctx (ctx
)
6167 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
6168 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
6169 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
6170 != POINTER_TYPE
)))))
6173 t
= omp_member_access_dummy_var (var
);
6176 var
= DECL_VALUE_EXPR (var
);
6177 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
6179 var
= unshare_and_remap (var
, t
, o
);
6181 var
= unshare_expr (var
);
6184 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
6186 /* Handle taskloop firstprivate/lastprivate, where the
6187 lastprivate on GIMPLE_OMP_TASK is represented as
6188 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6189 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
6190 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
6191 if (use_pointer_for_field (val
, ctx
))
6192 var
= build_fold_addr_expr (var
);
6193 gimplify_assign (x
, var
, ilist
);
6194 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
6198 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
6199 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
6200 || val
== OMP_CLAUSE_DECL (c
))
6201 && is_variable_sized (val
))
6203 by_ref
= use_pointer_for_field (val
, NULL
);
6205 switch (OMP_CLAUSE_CODE (c
))
6207 case OMP_CLAUSE_FIRSTPRIVATE
:
6208 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
6210 && is_task_ctx (ctx
))
6211 TREE_NO_WARNING (var
) = 1;
6215 case OMP_CLAUSE_PRIVATE
:
6216 case OMP_CLAUSE_COPYIN
:
6217 case OMP_CLAUSE__LOOPTEMP_
:
6218 case OMP_CLAUSE__REDUCTEMP_
:
6222 case OMP_CLAUSE_LASTPRIVATE
:
6223 if (by_ref
|| omp_is_reference (val
))
6225 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
6232 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
6237 case OMP_CLAUSE_REDUCTION
:
6238 case OMP_CLAUSE_IN_REDUCTION
:
6240 if (val
== OMP_CLAUSE_DECL (c
))
6242 if (is_task_ctx (ctx
))
6243 by_ref
= use_pointer_for_field (val
, ctx
);
6245 do_out
= !(by_ref
|| omp_is_reference (val
));
6248 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
6257 ref
= build_sender_ref (val
, ctx
);
6258 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
6259 gimplify_assign (ref
, x
, ilist
);
6260 if (is_task_ctx (ctx
))
6261 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
6266 ref
= build_sender_ref (val
, ctx
);
6267 gimplify_assign (var
, ref
, olist
);
6272 /* Generate code to implement SHARED from the sender (aka parent)
6273 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6274 list things that got automatically shared. */
6277 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
6279 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
6281 if (ctx
->record_type
== NULL
)
6284 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
6285 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
6287 ovar
= DECL_ABSTRACT_ORIGIN (f
);
6288 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
6291 nvar
= maybe_lookup_decl (ovar
, ctx
);
6292 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
6295 /* If CTX is a nested parallel directive. Find the immediately
6296 enclosing parallel or workshare construct that contains a
6297 mapping for OVAR. */
6298 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
6300 t
= omp_member_access_dummy_var (var
);
6303 var
= DECL_VALUE_EXPR (var
);
6304 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
6306 var
= unshare_and_remap (var
, t
, o
);
6308 var
= unshare_expr (var
);
6311 if (use_pointer_for_field (ovar
, ctx
))
6313 x
= build_sender_ref (ovar
, ctx
);
6314 var
= build_fold_addr_expr (var
);
6315 gimplify_assign (x
, var
, ilist
);
6319 x
= build_sender_ref (ovar
, ctx
);
6320 gimplify_assign (x
, var
, ilist
);
6322 if (!TREE_READONLY (var
)
6323 /* We don't need to receive a new reference to a result
6324 or parm decl. In fact we may not store to it as we will
6325 invalidate any pending RSO and generate wrong gimple
6327 && !((TREE_CODE (var
) == RESULT_DECL
6328 || TREE_CODE (var
) == PARM_DECL
)
6329 && DECL_BY_REFERENCE (var
)))
6331 x
= build_sender_ref (ovar
, ctx
);
6332 gimplify_assign (var
, x
, olist
);
6338 /* Emit an OpenACC head marker call, encapulating the partitioning and
6339 other information that must be processed by the target compiler.
6340 Return the maximum number of dimensions the associated loop might
6341 be partitioned over. */
6344 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
6345 gimple_seq
*seq
, omp_context
*ctx
)
6347 unsigned levels
= 0;
6349 tree gang_static
= NULL_TREE
;
6350 auto_vec
<tree
, 5> args
;
6352 args
.quick_push (build_int_cst
6353 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
6354 args
.quick_push (ddvar
);
6355 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6357 switch (OMP_CLAUSE_CODE (c
))
6359 case OMP_CLAUSE_GANG
:
6360 tag
|= OLF_DIM_GANG
;
6361 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
6362 /* static:* is represented by -1, and we can ignore it, as
6363 scheduling is always static. */
6364 if (gang_static
&& integer_minus_onep (gang_static
))
6365 gang_static
= NULL_TREE
;
6369 case OMP_CLAUSE_WORKER
:
6370 tag
|= OLF_DIM_WORKER
;
6374 case OMP_CLAUSE_VECTOR
:
6375 tag
|= OLF_DIM_VECTOR
;
6379 case OMP_CLAUSE_SEQ
:
6383 case OMP_CLAUSE_AUTO
:
6387 case OMP_CLAUSE_INDEPENDENT
:
6388 tag
|= OLF_INDEPENDENT
;
6391 case OMP_CLAUSE_TILE
:
6402 if (DECL_P (gang_static
))
6403 gang_static
= build_outer_var_ref (gang_static
, ctx
);
6404 tag
|= OLF_GANG_STATIC
;
6407 /* In a parallel region, loops are implicitly INDEPENDENT. */
6408 omp_context
*tgt
= enclosing_target_ctx (ctx
);
6409 if (!tgt
|| is_oacc_parallel (tgt
))
6410 tag
|= OLF_INDEPENDENT
;
6413 /* Tiling could use all 3 levels. */
6417 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
6418 Ensure at least one level, or 2 for possible auto
6420 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
6421 << OLF_DIM_BASE
) | OLF_SEQ
));
6423 if (levels
< 1u + maybe_auto
)
6424 levels
= 1u + maybe_auto
;
6427 args
.quick_push (build_int_cst (integer_type_node
, levels
));
6428 args
.quick_push (build_int_cst (integer_type_node
, tag
));
6430 args
.quick_push (gang_static
);
6432 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
6433 gimple_set_location (call
, loc
);
6434 gimple_set_lhs (call
, ddvar
);
6435 gimple_seq_add_stmt (seq
, call
);
6440 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
6441 partitioning level of the enclosed region. */
6444 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
6445 tree tofollow
, gimple_seq
*seq
)
6447 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
6448 : IFN_UNIQUE_OACC_TAIL_MARK
);
6449 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
6450 int nargs
= 2 + (tofollow
!= NULL_TREE
);
6451 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
6452 marker
, ddvar
, tofollow
);
6453 gimple_set_location (call
, loc
);
6454 gimple_set_lhs (call
, ddvar
);
6455 gimple_seq_add_stmt (seq
, call
);
6458 /* Generate the before and after OpenACC loop sequences. CLAUSES are
6459 the loop clauses, from which we extract reductions. Initialize
6463 lower_oacc_head_tail (location_t loc
, tree clauses
,
6464 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
6467 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
6468 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
6470 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
6471 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
6472 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
6475 for (unsigned done
= 1; count
; count
--, done
++)
6477 gimple_seq fork_seq
= NULL
;
6478 gimple_seq join_seq
= NULL
;
6480 tree place
= build_int_cst (integer_type_node
, -1);
6481 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
6482 fork_kind
, ddvar
, place
);
6483 gimple_set_location (fork
, loc
);
6484 gimple_set_lhs (fork
, ddvar
);
6486 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
6487 join_kind
, ddvar
, place
);
6488 gimple_set_location (join
, loc
);
6489 gimple_set_lhs (join
, ddvar
);
6491 /* Mark the beginning of this level sequence. */
6493 lower_oacc_loop_marker (loc
, ddvar
, true,
6494 build_int_cst (integer_type_node
, count
),
6496 lower_oacc_loop_marker (loc
, ddvar
, false,
6497 build_int_cst (integer_type_node
, done
),
6500 lower_oacc_reductions (loc
, clauses
, place
, inner
,
6501 fork
, join
, &fork_seq
, &join_seq
, ctx
);
6503 /* Append this level to head. */
6504 gimple_seq_add_seq (head
, fork_seq
);
6505 /* Prepend it to tail. */
6506 gimple_seq_add_seq (&join_seq
, *tail
);
6512 /* Mark the end of the sequence. */
6513 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
6514 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
6517 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
6518 catch handler and return it. This prevents programs from violating the
6519 structured block semantics with throws. */
6522 maybe_catch_exception (gimple_seq body
)
6527 if (!flag_exceptions
)
6530 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
6531 decl
= lang_hooks
.eh_protect_cleanup_actions ();
6533 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
6535 g
= gimple_build_eh_must_not_throw (decl
);
6536 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
6539 return gimple_seq_alloc_with_stmt (g
);
6543 /* Routines to lower OMP directives into OMP-GIMPLE. */
6545 /* If ctx is a worksharing context inside of a cancellable parallel
6546 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
6547 and conditional branch to parallel's cancel_label to handle
6548 cancellation in the implicit barrier. */
6551 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
6554 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
6555 if (gimple_omp_return_nowait_p (omp_return
))
6557 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
6558 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
6559 && outer
->cancellable
)
6561 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
6562 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
6563 tree lhs
= create_tmp_var (c_bool_type
);
6564 gimple_omp_return_set_lhs (omp_return
, lhs
);
6565 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
6566 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
6567 fold_convert (c_bool_type
,
6568 boolean_false_node
),
6569 outer
->cancel_label
, fallthru_label
);
6570 gimple_seq_add_stmt (body
, g
);
6571 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
6573 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
6577 /* Find the first task_reduction or reduction clause or return NULL
6578 if there are none. */
6581 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
6582 enum omp_clause_code ccode
)
6586 clauses
= omp_find_clause (clauses
, ccode
);
6587 if (clauses
== NULL_TREE
)
6589 if (ccode
!= OMP_CLAUSE_REDUCTION
6590 || code
== OMP_TASKLOOP
6591 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
6593 clauses
= OMP_CLAUSE_CHAIN (clauses
);
6597 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
6598 gimple_seq
*, gimple_seq
*);
6600 /* Lower the OpenMP sections directive in the current statement in GSI_P.
6601 CTX is the enclosing OMP context for the current statement. */
6604 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6606 tree block
, control
;
6607 gimple_stmt_iterator tgsi
;
6608 gomp_sections
*stmt
;
6610 gbind
*new_stmt
, *bind
;
6611 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, new_body
;
6613 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
6615 push_gimplify_context ();
6621 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
6622 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
6623 tree rtmp
= NULL_TREE
;
6626 tree type
= build_pointer_type (pointer_sized_int_node
);
6627 tree temp
= create_tmp_var (type
);
6628 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
6629 OMP_CLAUSE_DECL (c
) = temp
;
6630 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
6631 gimple_omp_sections_set_clauses (stmt
, c
);
6632 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
6633 gimple_omp_sections_clauses (stmt
),
6634 &ilist
, &tred_dlist
);
6636 rtmp
= make_ssa_name (type
);
6637 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
6640 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
6641 &ilist
, &dlist
, ctx
, NULL
);
6643 new_body
= gimple_omp_body (stmt
);
6644 gimple_omp_set_body (stmt
, NULL
);
6645 tgsi
= gsi_start (new_body
);
6646 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
6651 sec_start
= gsi_stmt (tgsi
);
6652 sctx
= maybe_lookup_ctx (sec_start
);
6655 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
6656 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
6657 GSI_CONTINUE_LINKING
);
6658 gimple_omp_set_body (sec_start
, NULL
);
6660 if (gsi_one_before_end_p (tgsi
))
6662 gimple_seq l
= NULL
;
6663 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
6665 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
6666 gimple_omp_section_set_last (sec_start
);
6669 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
6670 GSI_CONTINUE_LINKING
);
6673 block
= make_node (BLOCK
);
6674 bind
= gimple_build_bind (NULL
, new_body
, block
);
6677 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
, ctx
);
6679 block
= make_node (BLOCK
);
6680 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
6681 gsi_replace (gsi_p
, new_stmt
, true);
6683 pop_gimplify_context (new_stmt
);
6684 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
6685 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
6686 if (BLOCK_VARS (block
))
6687 TREE_USED (block
) = 1;
6690 gimple_seq_add_seq (&new_body
, ilist
);
6691 gimple_seq_add_stmt (&new_body
, stmt
);
6692 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
6693 gimple_seq_add_stmt (&new_body
, bind
);
6695 control
= create_tmp_var (unsigned_type_node
, ".section");
6696 t
= gimple_build_omp_continue (control
, control
);
6697 gimple_omp_sections_set_control (stmt
, control
);
6698 gimple_seq_add_stmt (&new_body
, t
);
6700 gimple_seq_add_seq (&new_body
, olist
);
6701 if (ctx
->cancellable
)
6702 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
6703 gimple_seq_add_seq (&new_body
, dlist
);
6705 new_body
= maybe_catch_exception (new_body
);
6707 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
6708 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
6709 t
= gimple_build_omp_return (nowait
);
6710 gimple_seq_add_stmt (&new_body
, t
);
6711 gimple_seq_add_seq (&new_body
, tred_dlist
);
6712 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
6715 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
6717 gimple_bind_set_body (new_stmt
, new_body
);
6721 /* A subroutine of lower_omp_single. Expand the simple form of
6722 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
6724 if (GOMP_single_start ())
6726 [ GOMP_barrier (); ] -> unless 'nowait' is present.
6728 FIXME. It may be better to delay expanding the logic of this until
6729 pass_expand_omp. The expanded logic may make the job more difficult
6730 to a synchronization analysis pass. */
6733 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
6735 location_t loc
= gimple_location (single_stmt
);
6736 tree tlabel
= create_artificial_label (loc
);
6737 tree flabel
= create_artificial_label (loc
);
6738 gimple
*call
, *cond
;
6741 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
6742 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
6743 call
= gimple_build_call (decl
, 0);
6744 gimple_call_set_lhs (call
, lhs
);
6745 gimple_seq_add_stmt (pre_p
, call
);
6747 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
6748 fold_convert_loc (loc
, TREE_TYPE (lhs
),
6751 gimple_seq_add_stmt (pre_p
, cond
);
6752 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
6753 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
6754 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
6758 /* A subroutine of lower_omp_single. Expand the simple form of
6759 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6761 #pragma omp single copyprivate (a, b, c)
6763 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6766 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6772 GOMP_single_copy_end (©out);
6783 FIXME. It may be better to delay expanding the logic of this until
6784 pass_expand_omp. The expanded logic may make the job more difficult
6785 to a synchronization analysis pass. */
6788 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
6791 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
6792 gimple_seq copyin_seq
;
6793 location_t loc
= gimple_location (single_stmt
);
6795 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
6797 ptr_type
= build_pointer_type (ctx
->record_type
);
6798 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
6800 l0
= create_artificial_label (loc
);
6801 l1
= create_artificial_label (loc
);
6802 l2
= create_artificial_label (loc
);
6804 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
6805 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
6806 t
= fold_convert_loc (loc
, ptr_type
, t
);
6807 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
6809 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
6810 build_int_cst (ptr_type
, 0));
6811 t
= build3 (COND_EXPR
, void_type_node
, t
,
6812 build_and_jump (&l0
), build_and_jump (&l1
));
6813 gimplify_and_add (t
, pre_p
);
6815 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
6817 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
6820 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
6823 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
6824 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
6825 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
6826 gimplify_and_add (t
, pre_p
);
6828 t
= build_and_jump (&l2
);
6829 gimplify_and_add (t
, pre_p
);
6831 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
6833 gimple_seq_add_seq (pre_p
, copyin_seq
);
6835 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
6839 /* Expand code for an OpenMP single directive. */
6842 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6845 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
6847 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
6849 push_gimplify_context ();
6851 block
= make_node (BLOCK
);
6852 bind
= gimple_build_bind (NULL
, NULL
, block
);
6853 gsi_replace (gsi_p
, bind
, true);
6856 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
6857 &bind_body
, &dlist
, ctx
, NULL
);
6858 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
6860 gimple_seq_add_stmt (&bind_body
, single_stmt
);
6862 if (ctx
->record_type
)
6863 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
6865 lower_omp_single_simple (single_stmt
, &bind_body
);
6867 gimple_omp_set_body (single_stmt
, NULL
);
6869 gimple_seq_add_seq (&bind_body
, dlist
);
6871 bind_body
= maybe_catch_exception (bind_body
);
6873 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
6874 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
6875 gimple
*g
= gimple_build_omp_return (nowait
);
6876 gimple_seq_add_stmt (&bind_body_tail
, g
);
6877 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
6878 if (ctx
->record_type
)
6880 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
6881 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
6882 TREE_THIS_VOLATILE (clobber
) = 1;
6883 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
6884 clobber
), GSI_SAME_STMT
);
6886 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
6887 gimple_bind_set_body (bind
, bind_body
);
6889 pop_gimplify_context (bind
);
6891 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6892 BLOCK_VARS (block
) = ctx
->block_vars
;
6893 if (BLOCK_VARS (block
))
6894 TREE_USED (block
) = 1;
6898 /* Expand code for an OpenMP master directive. */
6901 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
6903 tree block
, lab
= NULL
, x
, bfn_decl
;
6904 gimple
*stmt
= gsi_stmt (*gsi_p
);
6906 location_t loc
= gimple_location (stmt
);
6909 push_gimplify_context ();
6911 block
= make_node (BLOCK
);
6912 bind
= gimple_build_bind (NULL
, NULL
, block
);
6913 gsi_replace (gsi_p
, bind
, true);
6914 gimple_bind_add_stmt (bind
, stmt
);
6916 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
6917 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
6918 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
6919 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
6921 gimplify_and_add (x
, &tseq
);
6922 gimple_bind_add_seq (bind
, tseq
);
6924 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
6925 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
6926 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
6927 gimple_omp_set_body (stmt
, NULL
);
6929 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
6931 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
6933 pop_gimplify_context (bind
);
6935 gimple_bind_append_vars (bind
, ctx
->block_vars
);
6936 BLOCK_VARS (block
) = ctx
->block_vars
;
6939 /* Helper function for lower_omp_task_reductions. For a specific PASS
6940 find out the current clause it should be processed, or return false
6941 if all have been processed already. */
6944 omp_task_reduction_iterate (int pass
, enum tree_code code
,
6945 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
6946 tree
*type
, tree
*next
)
6948 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
6950 if (ccode
== OMP_CLAUSE_REDUCTION
6951 && code
!= OMP_TASKLOOP
6952 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
6954 *decl
= OMP_CLAUSE_DECL (*c
);
6955 *type
= TREE_TYPE (*decl
);
6956 if (TREE_CODE (*decl
) == MEM_REF
)
6963 if (omp_is_reference (*decl
))
6964 *type
= TREE_TYPE (*type
);
6965 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
6968 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
6977 /* Lower task_reduction and reduction clauses (the latter unless CODE is
6978 OMP_TASKGROUP only with task modifier). Register mapping of those in
6979 START sequence and reducing them and unregister them in the END sequence. */
6982 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
6983 gimple_seq
*start
, gimple_seq
*end
)
6985 enum omp_clause_code ccode
6986 = (code
== OMP_TASKGROUP
6987 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
6988 tree cancellable
= NULL_TREE
;
6989 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
6990 if (clauses
== NULL_TREE
)
6992 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
6994 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
6995 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
6996 && outer
->cancellable
)
6998 cancellable
= error_mark_node
;
7001 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
7004 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
7005 tree
*last
= &TYPE_FIELDS (record_type
);
7009 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
7011 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
7014 DECL_CHAIN (field
) = ifield
;
7015 last
= &DECL_CHAIN (ifield
);
7016 DECL_CONTEXT (field
) = record_type
;
7017 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
7018 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
7019 DECL_CONTEXT (ifield
) = record_type
;
7020 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
7021 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
7023 for (int pass
= 0; pass
< 2; pass
++)
7025 tree decl
, type
, next
;
7026 for (tree c
= clauses
;
7027 omp_task_reduction_iterate (pass
, code
, ccode
,
7028 &c
, &decl
, &type
, &next
); c
= next
)
7031 tree new_type
= type
;
7033 new_type
= remap_type (type
, &ctx
->outer
->cb
);
7035 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
7036 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
7038 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
7040 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
7041 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
7042 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
7045 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
7046 DECL_CONTEXT (field
) = record_type
;
7047 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
7048 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
7050 last
= &DECL_CHAIN (field
);
7052 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
7054 DECL_CONTEXT (bfield
) = record_type
;
7055 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
7056 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
7058 last
= &DECL_CHAIN (bfield
);
7062 layout_type (record_type
);
7064 /* Build up an array which registers with the runtime all the reductions
7065 and deregisters them at the end. Format documented in libgomp/task.c. */
7066 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
7067 tree avar
= create_tmp_var_raw (atype
);
7068 gimple_add_tmp_var (avar
);
7069 TREE_ADDRESSABLE (avar
) = 1;
7070 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
7071 NULL_TREE
, NULL_TREE
);
7072 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
7073 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7074 gimple_seq seq
= NULL
;
7075 tree sz
= fold_convert (pointer_sized_int_node
,
7076 TYPE_SIZE_UNIT (record_type
));
7078 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
7079 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
7080 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
7081 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
7082 ctx
->task_reductions
.create (1 + cnt
);
7083 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
7084 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
7086 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
7087 gimple_seq_add_seq (start
, seq
);
7088 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
7089 NULL_TREE
, NULL_TREE
);
7090 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
7091 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
7092 NULL_TREE
, NULL_TREE
);
7093 t
= build_int_cst (pointer_sized_int_node
,
7094 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
7095 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7096 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
7097 NULL_TREE
, NULL_TREE
);
7098 t
= build_int_cst (pointer_sized_int_node
, -1);
7099 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7100 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
7101 NULL_TREE
, NULL_TREE
);
7102 t
= build_int_cst (pointer_sized_int_node
, 0);
7103 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7105 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7106 and for each task reduction checks a bool right after the private variable
7107 within that thread's chunk; if the bool is clear, it hasn't been
7108 initialized and thus isn't going to be reduced nor destructed, otherwise
7109 reduce and destruct it. */
7110 tree idx
= create_tmp_var (size_type_node
);
7111 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
7112 tree num_thr_sz
= create_tmp_var (size_type_node
);
7113 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7114 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7115 tree lab3
= NULL_TREE
;
7117 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7119 /* For worksharing constructs, only perform it in the master thread,
7120 with the exception of cancelled implicit barriers - then only handle
7121 the current thread. */
7122 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
7123 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
7124 tree thr_num
= create_tmp_var (integer_type_node
);
7125 g
= gimple_build_call (t
, 0);
7126 gimple_call_set_lhs (g
, thr_num
);
7127 gimple_seq_add_stmt (end
, g
);
7131 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7132 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
7133 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
7134 if (code
== OMP_FOR
)
7135 c
= gimple_omp_for_clauses (ctx
->stmt
);
7136 else /* if (code == OMP_SECTIONS) */
7137 c
= gimple_omp_sections_clauses (ctx
->stmt
);
7138 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
7140 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
7142 gimple_seq_add_stmt (end
, g
);
7143 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7144 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
7145 gimple_seq_add_stmt (end
, g
);
7146 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
7147 build_one_cst (TREE_TYPE (idx
)));
7148 gimple_seq_add_stmt (end
, g
);
7149 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
7150 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
7152 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
7153 gimple_seq_add_stmt (end
, g
);
7154 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
7156 if (code
!= OMP_PARALLEL
)
7158 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
7159 tree num_thr
= create_tmp_var (integer_type_node
);
7160 g
= gimple_build_call (t
, 0);
7161 gimple_call_set_lhs (g
, num_thr
);
7162 gimple_seq_add_stmt (end
, g
);
7163 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
7164 gimple_seq_add_stmt (end
, g
);
7166 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
7170 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7171 OMP_CLAUSE__REDUCTEMP_
);
7172 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
7173 t
= fold_convert (size_type_node
, t
);
7174 gimplify_assign (num_thr_sz
, t
, end
);
7176 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
7177 NULL_TREE
, NULL_TREE
);
7178 tree data
= create_tmp_var (pointer_sized_int_node
);
7179 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
7180 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
7182 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
7183 ptr
= create_tmp_var (build_pointer_type (record_type
));
7185 ptr
= create_tmp_var (ptr_type_node
);
7186 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
7188 tree field
= TYPE_FIELDS (record_type
);
7191 field
= DECL_CHAIN (DECL_CHAIN (field
));
7192 for (int pass
= 0; pass
< 2; pass
++)
7194 tree decl
, type
, next
;
7195 for (tree c
= clauses
;
7196 omp_task_reduction_iterate (pass
, code
, ccode
,
7197 &c
, &decl
, &type
, &next
); c
= next
)
7199 tree var
= decl
, ref
;
7200 if (TREE_CODE (decl
) == MEM_REF
)
7202 var
= TREE_OPERAND (var
, 0);
7203 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7204 var
= TREE_OPERAND (var
, 0);
7206 if (TREE_CODE (var
) == ADDR_EXPR
)
7207 var
= TREE_OPERAND (var
, 0);
7208 else if (TREE_CODE (var
) == INDIRECT_REF
)
7209 var
= TREE_OPERAND (var
, 0);
7210 tree orig_var
= var
;
7211 if (is_variable_sized (var
))
7213 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7214 var
= DECL_VALUE_EXPR (var
);
7215 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7216 var
= TREE_OPERAND (var
, 0);
7217 gcc_assert (DECL_P (var
));
7219 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
7220 if (orig_var
!= var
)
7221 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
7222 else if (TREE_CODE (v
) == ADDR_EXPR
)
7223 t
= build_fold_addr_expr (t
);
7224 else if (TREE_CODE (v
) == INDIRECT_REF
)
7225 t
= build_fold_indirect_ref (t
);
7226 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
7228 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
7229 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7230 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
7232 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
7233 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
7234 fold_convert (size_type_node
,
7235 TREE_OPERAND (decl
, 1)));
7239 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
7240 if (!omp_is_reference (decl
))
7241 t
= build_fold_addr_expr (t
);
7243 t
= fold_convert (pointer_sized_int_node
, t
);
7245 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
7246 gimple_seq_add_seq (start
, seq
);
7247 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
7248 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
7249 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7250 t
= unshare_expr (byte_position (field
));
7251 t
= fold_convert (pointer_sized_int_node
, t
);
7252 ctx
->task_reduction_map
->put (c
, cnt
);
7253 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
7256 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
7257 gimple_seq_add_seq (start
, seq
);
7258 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
7259 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
7260 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7262 tree bfield
= DECL_CHAIN (field
);
7264 if (code
== OMP_PARALLEL
|| code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7265 /* In parallel or worksharing all threads unconditionally
7266 initialize all their task reduction private variables. */
7267 cond
= boolean_true_node
;
7268 else if (TREE_TYPE (ptr
) == ptr_type_node
)
7270 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
7271 unshare_expr (byte_position (bfield
)));
7273 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
7274 gimple_seq_add_seq (end
, seq
);
7275 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
7276 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
7277 build_int_cst (pbool
, 0));
7280 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
7281 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
7282 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
7283 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
7284 tree condv
= create_tmp_var (boolean_type_node
);
7285 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
7286 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
7288 gimple_seq_add_stmt (end
, g
);
7289 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
7290 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
7292 /* If this reduction doesn't need destruction and parallel
7293 has been cancelled, there is nothing to do for this
7294 reduction, so jump around the merge operation. */
7295 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7296 g
= gimple_build_cond (NE_EXPR
, cancellable
,
7297 build_zero_cst (TREE_TYPE (cancellable
)),
7299 gimple_seq_add_stmt (end
, g
);
7300 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7304 if (TREE_TYPE (ptr
) == ptr_type_node
)
7306 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
7307 unshare_expr (byte_position (field
)));
7309 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
7310 gimple_seq_add_seq (end
, seq
);
7311 tree pbool
= build_pointer_type (TREE_TYPE (field
));
7312 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
7313 build_int_cst (pbool
, 0));
7316 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
7317 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
7319 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7320 if (TREE_CODE (decl
) != MEM_REF
&& omp_is_reference (decl
))
7321 ref
= build_simple_mem_ref (ref
);
7322 /* reduction(-:var) sums up the partial results, so it acts
7323 identically to reduction(+:var). */
7324 if (rcode
== MINUS_EXPR
)
7326 if (TREE_CODE (decl
) == MEM_REF
)
7328 tree type
= TREE_TYPE (new_var
);
7329 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7330 tree i
= create_tmp_var (TREE_TYPE (v
));
7331 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7334 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7335 tree vv
= create_tmp_var (TREE_TYPE (v
));
7336 gimplify_assign (vv
, v
, start
);
7339 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
7340 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
7341 new_var
= build_fold_addr_expr (new_var
);
7342 new_var
= fold_convert (ptype
, new_var
);
7343 ref
= fold_convert (ptype
, ref
);
7344 tree m
= create_tmp_var (ptype
);
7345 gimplify_assign (m
, new_var
, end
);
7347 m
= create_tmp_var (ptype
);
7348 gimplify_assign (m
, ref
, end
);
7350 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
7351 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7352 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
7353 gimple_seq_add_stmt (end
, gimple_build_label (body
));
7354 tree priv
= build_simple_mem_ref (new_var
);
7355 tree out
= build_simple_mem_ref (ref
);
7356 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7358 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7359 tree decl_placeholder
7360 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7361 tree lab6
= NULL_TREE
;
7364 /* If this reduction needs destruction and parallel
7365 has been cancelled, jump around the merge operation
7366 to the destruction. */
7367 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7368 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
7369 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
7370 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
7372 gimple_seq_add_stmt (end
, g
);
7373 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7375 SET_DECL_VALUE_EXPR (placeholder
, out
);
7376 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7377 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7378 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7379 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7380 gimple_seq_add_seq (end
,
7381 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7382 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7383 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
7385 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7386 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7389 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
7390 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
7393 gimple_seq tseq
= NULL
;
7394 gimplify_stmt (&x
, &tseq
);
7395 gimple_seq_add_seq (end
, tseq
);
7400 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
7401 out
= unshare_expr (out
);
7402 gimplify_assign (out
, x
, end
);
7405 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7406 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7407 gimple_seq_add_stmt (end
, g
);
7408 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7409 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7410 gimple_seq_add_stmt (end
, g
);
7411 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7412 build_int_cst (TREE_TYPE (i
), 1));
7413 gimple_seq_add_stmt (end
, g
);
7414 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
7415 gimple_seq_add_stmt (end
, g
);
7416 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
7418 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7420 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7421 tree oldv
= NULL_TREE
;
7422 tree lab6
= NULL_TREE
;
7425 /* If this reduction needs destruction and parallel
7426 has been cancelled, jump around the merge operation
7427 to the destruction. */
7428 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7429 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
7430 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
7431 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
7433 gimple_seq_add_stmt (end
, g
);
7434 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7436 if (omp_is_reference (decl
)
7437 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7439 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
7440 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
7441 tree refv
= create_tmp_var (TREE_TYPE (ref
));
7442 gimplify_assign (refv
, ref
, end
);
7443 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
7444 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7445 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7446 tree d
= maybe_lookup_decl (decl
, ctx
);
7448 if (DECL_HAS_VALUE_EXPR_P (d
))
7449 oldv
= DECL_VALUE_EXPR (d
);
7450 if (omp_is_reference (var
))
7452 tree v
= fold_convert (TREE_TYPE (d
),
7453 build_fold_addr_expr (new_var
));
7454 SET_DECL_VALUE_EXPR (d
, v
);
7457 SET_DECL_VALUE_EXPR (d
, new_var
);
7458 DECL_HAS_VALUE_EXPR_P (d
) = 1;
7459 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7461 SET_DECL_VALUE_EXPR (d
, oldv
);
7464 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
7465 DECL_HAS_VALUE_EXPR_P (d
) = 0;
7467 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7468 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7469 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
7470 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7472 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
7473 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
7476 gimple_seq tseq
= NULL
;
7477 gimplify_stmt (&x
, &tseq
);
7478 gimple_seq_add_seq (end
, tseq
);
7483 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
7484 ref
= unshare_expr (ref
);
7485 gimplify_assign (ref
, x
, end
);
7487 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
7489 field
= DECL_CHAIN (bfield
);
7493 if (code
== OMP_TASKGROUP
)
7495 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
7496 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
7497 gimple_seq_add_stmt (start
, g
);
7502 if (code
== OMP_FOR
)
7503 c
= gimple_omp_for_clauses (ctx
->stmt
);
7504 else if (code
== OMP_SECTIONS
)
7505 c
= gimple_omp_sections_clauses (ctx
->stmt
);
7507 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
7508 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
7509 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
7510 build_fold_addr_expr (avar
));
7511 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
7514 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
7515 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
7517 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
7518 gimple_seq_add_stmt (end
, g
);
7519 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
7520 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7522 enum built_in_function bfn
7523 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
7524 t
= builtin_decl_explicit (bfn
);
7525 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
7529 arg
= create_tmp_var (c_bool_type
);
7530 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
7534 arg
= build_int_cst (c_bool_type
, 0);
7535 g
= gimple_build_call (t
, 1, arg
);
7539 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
7540 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
7542 gimple_seq_add_stmt (end
, g
);
7543 t
= build_constructor (atype
, NULL
);
7544 TREE_THIS_VOLATILE (t
) = 1;
7545 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
7548 /* Expand code for an OpenMP taskgroup directive. */
7551 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7553 gimple
*stmt
= gsi_stmt (*gsi_p
);
7556 gimple_seq dseq
= NULL
;
7557 tree block
= make_node (BLOCK
);
7559 bind
= gimple_build_bind (NULL
, NULL
, block
);
7560 gsi_replace (gsi_p
, bind
, true);
7561 gimple_bind_add_stmt (bind
, stmt
);
7563 push_gimplify_context ();
7565 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
7567 gimple_bind_add_stmt (bind
, x
);
7569 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
7570 gimple_omp_taskgroup_clauses (stmt
),
7571 gimple_bind_body_ptr (bind
), &dseq
);
7573 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
7574 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
7575 gimple_omp_set_body (stmt
, NULL
);
7577 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
7578 gimple_bind_add_seq (bind
, dseq
);
7580 pop_gimplify_context (bind
);
7582 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7583 BLOCK_VARS (block
) = ctx
->block_vars
;
7587 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
7590 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
7593 struct omp_for_data fd
;
7594 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
7597 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
7598 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
7599 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
7603 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
7604 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
7605 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
7606 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
7608 /* Merge depend clauses from multiple adjacent
7609 #pragma omp ordered depend(sink:...) constructs
7610 into one #pragma omp ordered depend(sink:...), so that
7611 we can optimize them together. */
7612 gimple_stmt_iterator gsi
= *gsi_p
;
7614 while (!gsi_end_p (gsi
))
7616 gimple
*stmt
= gsi_stmt (gsi
);
7617 if (is_gimple_debug (stmt
)
7618 || gimple_code (stmt
) == GIMPLE_NOP
)
7623 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
7625 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
7626 c
= gimple_omp_ordered_clauses (ord_stmt2
);
7628 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
7629 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
7632 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
7634 gsi_remove (&gsi
, true);
7638 /* Canonicalize sink dependence clauses into one folded clause if
7641 The basic algorithm is to create a sink vector whose first
7642 element is the GCD of all the first elements, and whose remaining
7643 elements are the minimum of the subsequent columns.
7645 We ignore dependence vectors whose first element is zero because
7646 such dependencies are known to be executed by the same thread.
7648 We take into account the direction of the loop, so a minimum
7649 becomes a maximum if the loop is iterating forwards. We also
7650 ignore sink clauses where the loop direction is unknown, or where
7651 the offsets are clearly invalid because they are not a multiple
7652 of the loop increment.
7656 #pragma omp for ordered(2)
7657 for (i=0; i < N; ++i)
7658 for (j=0; j < M; ++j)
7660 #pragma omp ordered \
7661 depend(sink:i-8,j-2) \
7662 depend(sink:i,j-1) \ // Completely ignored because i+0.
7663 depend(sink:i-4,j-3) \
7664 depend(sink:i-6,j-4)
7665 #pragma omp ordered depend(source)
7670 depend(sink:-gcd(8,4,6),-min(2,3,4))
7675 /* FIXME: Computing GCD's where the first element is zero is
7676 non-trivial in the presence of collapsed loops. Do this later. */
7677 if (fd
.collapse
> 1)
7680 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
7682 /* wide_int is not a POD so it must be default-constructed. */
7683 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
7684 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
7686 tree folded_dep
= NULL_TREE
;
7687 /* TRUE if the first dimension's offset is negative. */
7688 bool neg_offset_p
= false;
7690 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
7692 while ((c
= *list_p
) != NULL
)
7694 bool remove
= false;
7696 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
7697 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
7698 goto next_ordered_clause
;
7701 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
7702 vec
&& TREE_CODE (vec
) == TREE_LIST
;
7703 vec
= TREE_CHAIN (vec
), ++i
)
7705 gcc_assert (i
< len
);
7707 /* omp_extract_for_data has canonicalized the condition. */
7708 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
7709 || fd
.loops
[i
].cond_code
== GT_EXPR
);
7710 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
7711 bool maybe_lexically_later
= true;
7713 /* While the committee makes up its mind, bail if we have any
7714 non-constant steps. */
7715 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
7716 goto lower_omp_ordered_ret
;
7718 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
7719 if (POINTER_TYPE_P (itype
))
7721 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
7722 TYPE_PRECISION (itype
),
7725 /* Ignore invalid offsets that are not multiples of the step. */
7726 if (!wi::multiple_of_p (wi::abs (offset
),
7727 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
7730 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
7731 "ignoring sink clause with offset that is not "
7732 "a multiple of the loop step");
7734 goto next_ordered_clause
;
7737 /* Calculate the first dimension. The first dimension of
7738 the folded dependency vector is the GCD of the first
7739 elements, while ignoring any first elements whose offset
7743 /* Ignore dependence vectors whose first dimension is 0. */
7747 goto next_ordered_clause
;
7751 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
7753 error_at (OMP_CLAUSE_LOCATION (c
),
7754 "first offset must be in opposite direction "
7755 "of loop iterations");
7756 goto lower_omp_ordered_ret
;
7760 neg_offset_p
= forward
;
7761 /* Initialize the first time around. */
7762 if (folded_dep
== NULL_TREE
)
7765 folded_deps
[0] = offset
;
7768 folded_deps
[0] = wi::gcd (folded_deps
[0],
7772 /* Calculate minimum for the remaining dimensions. */
7775 folded_deps
[len
+ i
- 1] = offset
;
7776 if (folded_dep
== c
)
7777 folded_deps
[i
] = offset
;
7778 else if (maybe_lexically_later
7779 && !wi::eq_p (folded_deps
[i
], offset
))
7781 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
7785 for (j
= 1; j
<= i
; j
++)
7786 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
7789 maybe_lexically_later
= false;
7793 gcc_assert (i
== len
);
7797 next_ordered_clause
:
7799 *list_p
= OMP_CLAUSE_CHAIN (c
);
7801 list_p
= &OMP_CLAUSE_CHAIN (c
);
7807 folded_deps
[0] = -folded_deps
[0];
7809 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
7810 if (POINTER_TYPE_P (itype
))
7813 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
7814 = wide_int_to_tree (itype
, folded_deps
[0]);
7815 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
7816 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
7819 lower_omp_ordered_ret
:
7821 /* Ordered without clauses is #pragma omp threads, while we want
7822 a nop instead if we remove all clauses. */
7823 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
7824 gsi_replace (gsi_p
, gimple_build_nop (), true);
7828 /* Expand code for an OpenMP ordered directive. */
7831 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7834 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
7835 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
7838 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
7840 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
7843 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
7844 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
7845 OMP_CLAUSE_THREADS
);
7847 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
7850 /* FIXME: This is needs to be moved to the expansion to verify various
7851 conditions only testable on cfg with dominators computed, and also
7852 all the depend clauses to be merged still might need to be available
7853 for the runtime checks. */
7855 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
7859 push_gimplify_context ();
7861 block
= make_node (BLOCK
);
7862 bind
= gimple_build_bind (NULL
, NULL
, block
);
7863 gsi_replace (gsi_p
, bind
, true);
7864 gimple_bind_add_stmt (bind
, stmt
);
7868 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
7869 build_int_cst (NULL_TREE
, threads
));
7870 cfun
->has_simduid_loops
= true;
7873 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
7875 gimple_bind_add_stmt (bind
, x
);
7877 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
7880 counter
= create_tmp_var (integer_type_node
);
7881 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
7882 gimple_call_set_lhs (g
, counter
);
7883 gimple_bind_add_stmt (bind
, g
);
7885 body
= create_artificial_label (UNKNOWN_LOCATION
);
7886 test
= create_artificial_label (UNKNOWN_LOCATION
);
7887 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
7889 tree simt_pred
= create_tmp_var (integer_type_node
);
7890 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
7891 gimple_call_set_lhs (g
, simt_pred
);
7892 gimple_bind_add_stmt (bind
, g
);
7894 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
7895 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
7896 gimple_bind_add_stmt (bind
, g
);
7898 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
7900 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
7901 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
7902 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
7903 gimple_omp_set_body (stmt
, NULL
);
7907 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
7908 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
7909 gimple_bind_add_stmt (bind
, g
);
7911 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
7912 tree nonneg
= create_tmp_var (integer_type_node
);
7913 gimple_seq tseq
= NULL
;
7914 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
7915 gimple_bind_add_seq (bind
, tseq
);
7917 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
7918 gimple_call_set_lhs (g
, nonneg
);
7919 gimple_bind_add_stmt (bind
, g
);
7921 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7922 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
7923 gimple_bind_add_stmt (bind
, g
);
7925 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
7928 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
7929 build_int_cst (NULL_TREE
, threads
));
7931 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
7933 gimple_bind_add_stmt (bind
, x
);
7935 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
7937 pop_gimplify_context (bind
);
7939 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7940 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
7944 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
7945 substitution of a couple of function calls. But in the NAMED case,
7946 requires that languages coordinate a symbol name. It is therefore
7947 best put here in common code. */
7949 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
7952 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7955 tree name
, lock
, unlock
;
7956 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
7958 location_t loc
= gimple_location (stmt
);
7961 name
= gimple_omp_critical_name (stmt
);
7966 if (!critical_name_mutexes
)
7967 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
7969 tree
*n
= critical_name_mutexes
->get (name
);
7974 decl
= create_tmp_var_raw (ptr_type_node
);
7976 new_str
= ACONCAT ((".gomp_critical_user_",
7977 IDENTIFIER_POINTER (name
), NULL
));
7978 DECL_NAME (decl
) = get_identifier (new_str
);
7979 TREE_PUBLIC (decl
) = 1;
7980 TREE_STATIC (decl
) = 1;
7981 DECL_COMMON (decl
) = 1;
7982 DECL_ARTIFICIAL (decl
) = 1;
7983 DECL_IGNORED_P (decl
) = 1;
7985 varpool_node::finalize_decl (decl
);
7987 critical_name_mutexes
->put (name
, decl
);
7992 /* If '#pragma omp critical' is inside offloaded region or
7993 inside function marked as offloadable, the symbol must be
7994 marked as offloadable too. */
7996 if (cgraph_node::get (current_function_decl
)->offloadable
)
7997 varpool_node::get_create (decl
)->offloadable
= 1;
7999 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
8000 if (is_gimple_omp_offloaded (octx
->stmt
))
8002 varpool_node::get_create (decl
)->offloadable
= 1;
8006 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
8007 lock
= build_call_expr_loc (loc
, lock
, 1,
8008 build_fold_addr_expr_loc (loc
, decl
));
8010 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
8011 unlock
= build_call_expr_loc (loc
, unlock
, 1,
8012 build_fold_addr_expr_loc (loc
, decl
));
8016 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
8017 lock
= build_call_expr_loc (loc
, lock
, 0);
8019 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
8020 unlock
= build_call_expr_loc (loc
, unlock
, 0);
8023 push_gimplify_context ();
8025 block
= make_node (BLOCK
);
8026 bind
= gimple_build_bind (NULL
, NULL
, block
);
8027 gsi_replace (gsi_p
, bind
, true);
8028 gimple_bind_add_stmt (bind
, stmt
);
8030 tbody
= gimple_bind_body (bind
);
8031 gimplify_and_add (lock
, &tbody
);
8032 gimple_bind_set_body (bind
, tbody
);
8034 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8035 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8036 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8037 gimple_omp_set_body (stmt
, NULL
);
8039 tbody
= gimple_bind_body (bind
);
8040 gimplify_and_add (unlock
, &tbody
);
8041 gimple_bind_set_body (bind
, tbody
);
8043 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8045 pop_gimplify_context (bind
);
8046 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8047 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8050 /* A subroutine of lower_omp_for. Generate code to emit the predicate
8051 for a lastprivate clause. Given a loop control predicate of (V
8052 cond N2), we gate the clause on (!(V cond N2)). The lowered form
8053 is appended to *DLIST, iterator initialization is appended to
8057 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
8058 gimple_seq
*dlist
, struct omp_context
*ctx
)
8060 tree clauses
, cond
, vinit
;
8061 enum tree_code cond_code
;
8064 cond_code
= fd
->loop
.cond_code
;
8065 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
8067 /* When possible, use a strict equality expression. This can let VRP
8068 type optimizations deduce the value and remove a copy. */
8069 if (tree_fits_shwi_p (fd
->loop
.step
))
8071 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
8072 if (step
== 1 || step
== -1)
8073 cond_code
= EQ_EXPR
;
8076 if (gimple_omp_for_kind (fd
->for_stmt
) == GF_OMP_FOR_KIND_GRID_LOOP
8077 || gimple_omp_for_grid_phony (fd
->for_stmt
))
8078 cond
= omp_grid_lastprivate_predicate (fd
);
8081 tree n2
= fd
->loop
.n2
;
8082 if (fd
->collapse
> 1
8083 && TREE_CODE (n2
) != INTEGER_CST
8084 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
8086 struct omp_context
*taskreg_ctx
= NULL
;
8087 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
8089 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
8090 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
8091 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
8093 if (gimple_omp_for_combined_into_p (gfor
))
8095 gcc_assert (ctx
->outer
->outer
8096 && is_parallel_ctx (ctx
->outer
->outer
));
8097 taskreg_ctx
= ctx
->outer
->outer
;
8101 struct omp_for_data outer_fd
;
8102 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
8103 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
8106 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
8107 taskreg_ctx
= ctx
->outer
->outer
;
8109 else if (is_taskreg_ctx (ctx
->outer
))
8110 taskreg_ctx
= ctx
->outer
;
8114 tree taskreg_clauses
8115 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
8116 tree innerc
= omp_find_clause (taskreg_clauses
,
8117 OMP_CLAUSE__LOOPTEMP_
);
8118 gcc_assert (innerc
);
8119 for (i
= 0; i
< fd
->collapse
; i
++)
8121 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
8122 OMP_CLAUSE__LOOPTEMP_
);
8123 gcc_assert (innerc
);
8125 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
8126 OMP_CLAUSE__LOOPTEMP_
);
8128 n2
= fold_convert (TREE_TYPE (n2
),
8129 lookup_decl (OMP_CLAUSE_DECL (innerc
),
8133 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
8136 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
8138 lower_lastprivate_clauses (clauses
, cond
, &stmts
, ctx
);
8139 if (!gimple_seq_empty_p (stmts
))
8141 gimple_seq_add_seq (&stmts
, *dlist
);
8144 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
8145 vinit
= fd
->loop
.n1
;
8146 if (cond_code
== EQ_EXPR
8147 && tree_fits_shwi_p (fd
->loop
.n2
)
8148 && ! integer_zerop (fd
->loop
.n2
))
8149 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
8151 vinit
= unshare_expr (vinit
);
8153 /* Initialize the iterator variable, so that threads that don't execute
8154 any iterations don't execute the lastprivate clauses by accident. */
8155 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
8160 /* Lower code for an OMP loop directive. */
8163 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8166 struct omp_for_data fd
, *fdp
= NULL
;
8167 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
8169 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
8170 gimple_seq cnt_list
= NULL
;
8171 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
8174 push_gimplify_context ();
8176 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
8178 block
= make_node (BLOCK
);
8179 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8180 /* Replace at gsi right away, so that 'stmt' is no member
8181 of a sequence anymore as we're going to add to a different
8183 gsi_replace (gsi_p
, new_stmt
, true);
8185 /* Move declaration of temporaries in the loop body before we make
8187 omp_for_body
= gimple_omp_body (stmt
);
8188 if (!gimple_seq_empty_p (omp_for_body
)
8189 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
8192 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
8193 tree vars
= gimple_bind_vars (inner_bind
);
8194 gimple_bind_append_vars (new_stmt
, vars
);
8195 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
8196 keep them on the inner_bind and it's block. */
8197 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
8198 if (gimple_bind_block (inner_bind
))
8199 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
8202 if (gimple_omp_for_combined_into_p (stmt
))
8204 omp_extract_for_data (stmt
, &fd
, NULL
);
8207 /* We need two temporaries with fd.loop.v type (istart/iend)
8208 and then (fd.collapse - 1) temporaries with the same
8209 type for count2 ... countN-1 vars if not constant. */
8211 tree type
= fd
.iter_type
;
8213 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
8214 count
+= fd
.collapse
- 1;
8216 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
8217 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
8218 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
8223 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
8224 OMP_CLAUSE__LOOPTEMP_
);
8226 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
8227 OMP_CLAUSE__LOOPTEMP_
);
8228 for (i
= 0; i
< count
; i
++)
8233 gcc_assert (outerc
);
8234 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
8235 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
8236 OMP_CLAUSE__LOOPTEMP_
);
8240 /* If there are 2 adjacent SIMD stmts, one with _simt_
8241 clause, another without, make sure they have the same
8242 decls in _looptemp_ clauses, because the outer stmt
8243 they are combined into will look up just one inner_stmt. */
8245 temp
= OMP_CLAUSE_DECL (simtc
);
8247 temp
= create_tmp_var (type
);
8248 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
8250 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
8251 OMP_CLAUSE_DECL (*pc
) = temp
;
8252 pc
= &OMP_CLAUSE_CHAIN (*pc
);
8254 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
8255 OMP_CLAUSE__LOOPTEMP_
);
8260 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
8264 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
8265 OMP_CLAUSE_REDUCTION
);
8266 tree rtmp
= NULL_TREE
;
8269 tree type
= build_pointer_type (pointer_sized_int_node
);
8270 tree temp
= create_tmp_var (type
);
8271 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8272 OMP_CLAUSE_DECL (c
) = temp
;
8273 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
8274 gimple_omp_for_set_clauses (stmt
, c
);
8275 lower_omp_task_reductions (ctx
, OMP_FOR
,
8276 gimple_omp_for_clauses (stmt
),
8277 &tred_ilist
, &tred_dlist
);
8279 rtmp
= make_ssa_name (type
);
8280 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
8283 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
8285 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
8286 gimple_omp_for_pre_body (stmt
));
8288 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8290 /* Lower the header expressions. At this point, we can assume that
8291 the header is of the form:
8293 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8295 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8296 using the .omp_data_s mapping, if needed. */
8297 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
8299 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
8300 if (!is_gimple_min_invariant (*rhs_p
))
8301 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
8302 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
8303 recompute_tree_invariant_for_addr_expr (*rhs_p
);
8305 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
8306 if (!is_gimple_min_invariant (*rhs_p
))
8307 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
8308 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
8309 recompute_tree_invariant_for_addr_expr (*rhs_p
);
8311 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
8312 if (!is_gimple_min_invariant (*rhs_p
))
8313 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
8316 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
8318 gimple_seq_add_seq (&body
, cnt_list
);
8320 /* Once lowered, extract the bounds and clauses. */
8321 omp_extract_for_data (stmt
, &fd
, NULL
);
8323 if (is_gimple_omp_oacc (ctx
->stmt
)
8324 && !ctx_in_oacc_kernels_region (ctx
))
8325 lower_oacc_head_tail (gimple_location (stmt
),
8326 gimple_omp_for_clauses (stmt
),
8327 &oacc_head
, &oacc_tail
, ctx
);
8329 /* Add OpenACC partitioning and reduction markers just before the loop. */
8331 gimple_seq_add_seq (&body
, oacc_head
);
8333 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, ctx
);
8335 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
8336 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
8337 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
8338 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8340 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
8341 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
8342 OMP_CLAUSE_LINEAR_STEP (c
)
8343 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
8347 bool phony_loop
= (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
8348 && gimple_omp_for_grid_phony (stmt
));
8350 gimple_seq_add_stmt (&body
, stmt
);
8351 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
8354 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
8357 /* After the loop, add exit clauses. */
8358 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, ctx
);
8360 if (ctx
->cancellable
)
8361 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
8363 gimple_seq_add_seq (&body
, dlist
);
8367 gimple_seq_add_seq (&tred_ilist
, body
);
8371 body
= maybe_catch_exception (body
);
8375 /* Region exit marker goes at the end of the loop body. */
8376 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
8377 gimple_seq_add_stmt (&body
, g
);
8379 gimple_seq_add_seq (&body
, tred_dlist
);
8381 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
8384 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8387 /* Add OpenACC joining and reduction markers just after the loop. */
8389 gimple_seq_add_seq (&body
, oacc_tail
);
8391 pop_gimplify_context (new_stmt
);
8393 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8394 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
8395 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
8396 if (BLOCK_VARS (block
))
8397 TREE_USED (block
) = 1;
8399 gimple_bind_set_body (new_stmt
, body
);
8400 gimple_omp_set_body (stmt
, NULL
);
8401 gimple_omp_for_set_pre_body (stmt
, NULL
);
8404 /* Callback for walk_stmts. Check if the current statement only contains
8405 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
8408 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
8409 bool *handled_ops_p
,
8410 struct walk_stmt_info
*wi
)
8412 int *info
= (int *) wi
->info
;
8413 gimple
*stmt
= gsi_stmt (*gsi_p
);
8415 *handled_ops_p
= true;
8416 switch (gimple_code (stmt
))
8422 case GIMPLE_OMP_FOR
:
8423 case GIMPLE_OMP_SECTIONS
:
8424 *info
= *info
== 0 ? 1 : -1;
8433 struct omp_taskcopy_context
8435 /* This field must be at the beginning, as we do "inheritance": Some
8436 callback functions for tree-inline.c (e.g., omp_copy_decl)
8437 receive a copy_body_data pointer that is up-casted to an
8438 omp_context pointer. */
8444 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
8446 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
8448 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
8449 return create_tmp_var (TREE_TYPE (var
));
8455 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
8457 tree name
, new_fields
= NULL
, type
, f
;
8459 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8460 name
= DECL_NAME (TYPE_NAME (orig_type
));
8461 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
8462 TYPE_DECL
, name
, type
);
8463 TYPE_NAME (type
) = name
;
8465 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
8467 tree new_f
= copy_node (f
);
8468 DECL_CONTEXT (new_f
) = type
;
8469 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
8470 TREE_CHAIN (new_f
) = new_fields
;
8471 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
8472 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
8473 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
8476 tcctx
->cb
.decl_map
->put (f
, new_f
);
8478 TYPE_FIELDS (type
) = nreverse (new_fields
);
8483 /* Create task copyfn. */
8486 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
8488 struct function
*child_cfun
;
8489 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
8490 tree record_type
, srecord_type
, bind
, list
;
8491 bool record_needs_remap
= false, srecord_needs_remap
= false;
8493 struct omp_taskcopy_context tcctx
;
8494 location_t loc
= gimple_location (task_stmt
);
8495 size_t looptempno
= 0;
8497 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
8498 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
8499 gcc_assert (child_cfun
->cfg
== NULL
);
8500 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
8502 /* Reset DECL_CONTEXT on function arguments. */
8503 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
8504 DECL_CONTEXT (t
) = child_fn
;
8506 /* Populate the function. */
8507 push_gimplify_context ();
8508 push_cfun (child_cfun
);
8510 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
8511 TREE_SIDE_EFFECTS (bind
) = 1;
8513 DECL_SAVED_TREE (child_fn
) = bind
;
8514 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
8516 /* Remap src and dst argument types if needed. */
8517 record_type
= ctx
->record_type
;
8518 srecord_type
= ctx
->srecord_type
;
8519 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
8520 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
8522 record_needs_remap
= true;
8525 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
8526 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
8528 srecord_needs_remap
= true;
8532 if (record_needs_remap
|| srecord_needs_remap
)
8534 memset (&tcctx
, '\0', sizeof (tcctx
));
8535 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
8536 tcctx
.cb
.dst_fn
= child_fn
;
8537 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
8538 gcc_checking_assert (tcctx
.cb
.src_node
);
8539 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
8540 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
8541 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
8542 tcctx
.cb
.eh_lp_nr
= 0;
8543 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
8544 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
8547 if (record_needs_remap
)
8548 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
8549 if (srecord_needs_remap
)
8550 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
8553 tcctx
.cb
.decl_map
= NULL
;
8555 arg
= DECL_ARGUMENTS (child_fn
);
8556 TREE_TYPE (arg
) = build_pointer_type (record_type
);
8557 sarg
= DECL_CHAIN (arg
);
8558 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
8560 /* First pass: initialize temporaries used in record_type and srecord_type
8561 sizes and field offsets. */
8562 if (tcctx
.cb
.decl_map
)
8563 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
8564 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
8568 decl
= OMP_CLAUSE_DECL (c
);
8569 p
= tcctx
.cb
.decl_map
->get (decl
);
8572 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
8573 sf
= (tree
) n
->value
;
8574 sf
= *tcctx
.cb
.decl_map
->get (sf
);
8575 src
= build_simple_mem_ref_loc (loc
, sarg
);
8576 src
= omp_build_component_ref (src
, sf
);
8577 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
8578 append_to_statement_list (t
, &list
);
8581 /* Second pass: copy shared var pointers and copy construct non-VLA
8582 firstprivate vars. */
8583 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
8584 switch (OMP_CLAUSE_CODE (c
))
8587 case OMP_CLAUSE_SHARED
:
8588 decl
= OMP_CLAUSE_DECL (c
);
8589 key
= (splay_tree_key
) decl
;
8590 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8591 key
= (splay_tree_key
) &DECL_UID (decl
);
8592 n
= splay_tree_lookup (ctx
->field_map
, key
);
8595 f
= (tree
) n
->value
;
8596 if (tcctx
.cb
.decl_map
)
8597 f
= *tcctx
.cb
.decl_map
->get (f
);
8598 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
8599 sf
= (tree
) n
->value
;
8600 if (tcctx
.cb
.decl_map
)
8601 sf
= *tcctx
.cb
.decl_map
->get (sf
);
8602 src
= build_simple_mem_ref_loc (loc
, sarg
);
8603 src
= omp_build_component_ref (src
, sf
);
8604 dst
= build_simple_mem_ref_loc (loc
, arg
);
8605 dst
= omp_build_component_ref (dst
, f
);
8606 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
8607 append_to_statement_list (t
, &list
);
8609 case OMP_CLAUSE_REDUCTION
:
8610 case OMP_CLAUSE_IN_REDUCTION
:
8611 decl
= OMP_CLAUSE_DECL (c
);
8612 if (TREE_CODE (decl
) == MEM_REF
)
8614 decl
= TREE_OPERAND (decl
, 0);
8615 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
8616 decl
= TREE_OPERAND (decl
, 0);
8617 if (TREE_CODE (decl
) == INDIRECT_REF
8618 || TREE_CODE (decl
) == ADDR_EXPR
)
8619 decl
= TREE_OPERAND (decl
, 0);
8621 key
= (splay_tree_key
) decl
;
8622 n
= splay_tree_lookup (ctx
->field_map
, key
);
8625 f
= (tree
) n
->value
;
8626 if (tcctx
.cb
.decl_map
)
8627 f
= *tcctx
.cb
.decl_map
->get (f
);
8628 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
8629 sf
= (tree
) n
->value
;
8630 if (tcctx
.cb
.decl_map
)
8631 sf
= *tcctx
.cb
.decl_map
->get (sf
);
8632 src
= build_simple_mem_ref_loc (loc
, sarg
);
8633 src
= omp_build_component_ref (src
, sf
);
8634 if (decl
!= OMP_CLAUSE_DECL (c
)
8635 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
8636 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
8637 src
= build_simple_mem_ref_loc (loc
, src
);
8638 dst
= build_simple_mem_ref_loc (loc
, arg
);
8639 dst
= omp_build_component_ref (dst
, f
);
8640 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
8641 append_to_statement_list (t
, &list
);
8643 case OMP_CLAUSE__LOOPTEMP_
:
8644 /* Fields for first two _looptemp_ clauses are initialized by
8645 GOMP_taskloop*, the rest are handled like firstprivate. */
8652 case OMP_CLAUSE__REDUCTEMP_
:
8653 case OMP_CLAUSE_FIRSTPRIVATE
:
8654 decl
= OMP_CLAUSE_DECL (c
);
8655 if (is_variable_sized (decl
))
8657 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
8660 f
= (tree
) n
->value
;
8661 if (tcctx
.cb
.decl_map
)
8662 f
= *tcctx
.cb
.decl_map
->get (f
);
8663 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
8666 sf
= (tree
) n
->value
;
8667 if (tcctx
.cb
.decl_map
)
8668 sf
= *tcctx
.cb
.decl_map
->get (sf
);
8669 src
= build_simple_mem_ref_loc (loc
, sarg
);
8670 src
= omp_build_component_ref (src
, sf
);
8671 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
8672 src
= build_simple_mem_ref_loc (loc
, src
);
8676 dst
= build_simple_mem_ref_loc (loc
, arg
);
8677 dst
= omp_build_component_ref (dst
, f
);
8678 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
8679 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
8681 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
8682 append_to_statement_list (t
, &list
);
8684 case OMP_CLAUSE_PRIVATE
:
8685 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
8687 decl
= OMP_CLAUSE_DECL (c
);
8688 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
8689 f
= (tree
) n
->value
;
8690 if (tcctx
.cb
.decl_map
)
8691 f
= *tcctx
.cb
.decl_map
->get (f
);
8692 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
8695 sf
= (tree
) n
->value
;
8696 if (tcctx
.cb
.decl_map
)
8697 sf
= *tcctx
.cb
.decl_map
->get (sf
);
8698 src
= build_simple_mem_ref_loc (loc
, sarg
);
8699 src
= omp_build_component_ref (src
, sf
);
8700 if (use_pointer_for_field (decl
, NULL
))
8701 src
= build_simple_mem_ref_loc (loc
, src
);
8705 dst
= build_simple_mem_ref_loc (loc
, arg
);
8706 dst
= omp_build_component_ref (dst
, f
);
8707 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
8708 append_to_statement_list (t
, &list
);
8714 /* Last pass: handle VLA firstprivates. */
8715 if (tcctx
.cb
.decl_map
)
8716 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
8717 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
8721 decl
= OMP_CLAUSE_DECL (c
);
8722 if (!is_variable_sized (decl
))
8724 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
8727 f
= (tree
) n
->value
;
8728 f
= *tcctx
.cb
.decl_map
->get (f
);
8729 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
8730 ind
= DECL_VALUE_EXPR (decl
);
8731 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
8732 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
8733 n
= splay_tree_lookup (ctx
->sfield_map
,
8734 (splay_tree_key
) TREE_OPERAND (ind
, 0));
8735 sf
= (tree
) n
->value
;
8736 sf
= *tcctx
.cb
.decl_map
->get (sf
);
8737 src
= build_simple_mem_ref_loc (loc
, sarg
);
8738 src
= omp_build_component_ref (src
, sf
);
8739 src
= build_simple_mem_ref_loc (loc
, src
);
8740 dst
= build_simple_mem_ref_loc (loc
, arg
);
8741 dst
= omp_build_component_ref (dst
, f
);
8742 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
8743 append_to_statement_list (t
, &list
);
8744 n
= splay_tree_lookup (ctx
->field_map
,
8745 (splay_tree_key
) TREE_OPERAND (ind
, 0));
8746 df
= (tree
) n
->value
;
8747 df
= *tcctx
.cb
.decl_map
->get (df
);
8748 ptr
= build_simple_mem_ref_loc (loc
, arg
);
8749 ptr
= omp_build_component_ref (ptr
, df
);
8750 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
8751 build_fold_addr_expr_loc (loc
, dst
));
8752 append_to_statement_list (t
, &list
);
8755 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
8756 append_to_statement_list (t
, &list
);
8758 if (tcctx
.cb
.decl_map
)
8759 delete tcctx
.cb
.decl_map
;
8760 pop_gimplify_context (NULL
);
8761 BIND_EXPR_BODY (bind
) = list
;
8766 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
8770 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
8772 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
8773 gcc_assert (clauses
);
8774 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8775 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8776 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8778 case OMP_CLAUSE_DEPEND_LAST
:
8779 /* Lowering already done at gimplification. */
8781 case OMP_CLAUSE_DEPEND_IN
:
8784 case OMP_CLAUSE_DEPEND_OUT
:
8785 case OMP_CLAUSE_DEPEND_INOUT
:
8788 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8791 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8794 case OMP_CLAUSE_DEPEND_SOURCE
:
8795 case OMP_CLAUSE_DEPEND_SINK
:
8800 if (cnt
[1] || cnt
[3])
8802 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
8803 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
8804 tree array
= create_tmp_var (type
);
8805 TREE_ADDRESSABLE (array
) = 1;
8806 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
8810 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
8811 gimple_seq_add_stmt (iseq
, g
);
8812 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
8815 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
8816 gimple_seq_add_stmt (iseq
, g
);
8817 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
8819 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
8820 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
8821 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
8822 gimple_seq_add_stmt (iseq
, g
);
8824 for (i
= 0; i
< 4; i
++)
8828 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8829 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
8833 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8835 case OMP_CLAUSE_DEPEND_IN
:
8839 case OMP_CLAUSE_DEPEND_OUT
:
8840 case OMP_CLAUSE_DEPEND_INOUT
:
8844 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8848 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8855 tree t
= OMP_CLAUSE_DECL (c
);
8856 t
= fold_convert (ptr_type_node
, t
);
8857 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
8858 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
8859 NULL_TREE
, NULL_TREE
);
8860 g
= gimple_build_assign (r
, t
);
8861 gimple_seq_add_stmt (iseq
, g
);
8864 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
8865 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
8866 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
8867 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
8869 tree clobber
= build_constructor (type
, NULL
);
8870 TREE_THIS_VOLATILE (clobber
) = 1;
8871 g
= gimple_build_assign (array
, clobber
);
8872 gimple_seq_add_stmt (oseq
, g
);
8875 /* Lower the OpenMP parallel or task directive in the current statement
8876 in GSI_P. CTX holds context information for the directive. */
8879 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8883 gimple
*stmt
= gsi_stmt (*gsi_p
);
8884 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
8885 gimple_seq par_body
;
8886 location_t loc
= gimple_location (stmt
);
8888 clauses
= gimple_omp_taskreg_clauses (stmt
);
8889 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
8890 && gimple_omp_task_taskwait_p (stmt
))
8898 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
8899 par_body
= gimple_bind_body (par_bind
);
8901 child_fn
= ctx
->cb
.dst_fn
;
8902 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
8903 && !gimple_omp_parallel_combined_p (stmt
))
8905 struct walk_stmt_info wi
;
8908 memset (&wi
, 0, sizeof (wi
));
8911 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
8913 gimple_omp_parallel_set_combined_p (stmt
, true);
8915 gimple_seq dep_ilist
= NULL
;
8916 gimple_seq dep_olist
= NULL
;
8917 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
8918 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
8920 push_gimplify_context ();
8921 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
8922 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
8923 &dep_ilist
, &dep_olist
);
8926 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
8927 && gimple_omp_task_taskwait_p (stmt
))
8931 gsi_replace (gsi_p
, dep_bind
, true);
8932 gimple_bind_add_seq (dep_bind
, dep_ilist
);
8933 gimple_bind_add_stmt (dep_bind
, stmt
);
8934 gimple_bind_add_seq (dep_bind
, dep_olist
);
8935 pop_gimplify_context (dep_bind
);
8940 if (ctx
->srecord_type
)
8941 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
8943 gimple_seq tskred_ilist
= NULL
;
8944 gimple_seq tskred_olist
= NULL
;
8945 if ((is_task_ctx (ctx
)
8946 && gimple_omp_task_taskloop_p (ctx
->stmt
)
8947 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
8948 OMP_CLAUSE_REDUCTION
))
8949 || (is_parallel_ctx (ctx
)
8950 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
8951 OMP_CLAUSE__REDUCTEMP_
)))
8953 if (dep_bind
== NULL
)
8955 push_gimplify_context ();
8956 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
8958 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
8960 gimple_omp_taskreg_clauses (ctx
->stmt
),
8961 &tskred_ilist
, &tskred_olist
);
8964 push_gimplify_context ();
8966 gimple_seq par_olist
= NULL
;
8967 gimple_seq par_ilist
= NULL
;
8968 gimple_seq par_rlist
= NULL
;
8969 bool phony_construct
= gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
8970 && gimple_omp_parallel_grid_phony (as_a
<gomp_parallel
*> (stmt
));
8971 if (phony_construct
&& ctx
->record_type
)
8973 gcc_checking_assert (!ctx
->receiver_decl
);
8974 ctx
->receiver_decl
= create_tmp_var
8975 (build_reference_type (ctx
->record_type
), ".omp_rec");
8977 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
8978 lower_omp (&par_body
, ctx
);
8979 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
)
8980 lower_reduction_clauses (clauses
, &par_rlist
, ctx
);
8982 /* Declare all the variables created by mapping and the variables
8983 declared in the scope of the parallel body. */
8984 record_vars_into (ctx
->block_vars
, child_fn
);
8985 maybe_remove_omp_member_access_dummy_vars (par_bind
);
8986 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
8988 if (ctx
->record_type
)
8991 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
8992 : ctx
->record_type
, ".omp_data_o");
8993 DECL_NAMELESS (ctx
->sender_decl
) = 1;
8994 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
8995 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
8998 gimple_seq olist
= NULL
;
8999 gimple_seq ilist
= NULL
;
9000 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
9001 lower_send_shared_vars (&ilist
, &olist
, ctx
);
9003 if (ctx
->record_type
)
9005 tree clobber
= build_constructor (TREE_TYPE (ctx
->sender_decl
), NULL
);
9006 TREE_THIS_VOLATILE (clobber
) = 1;
9007 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
9011 /* Once all the expansions are done, sequence all the different
9012 fragments inside gimple_omp_body. */
9014 gimple_seq new_body
= NULL
;
9016 if (ctx
->record_type
)
9018 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
9019 /* fixup_child_record_type might have changed receiver_decl's type. */
9020 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
9021 gimple_seq_add_stmt (&new_body
,
9022 gimple_build_assign (ctx
->receiver_decl
, t
));
9025 gimple_seq_add_seq (&new_body
, par_ilist
);
9026 gimple_seq_add_seq (&new_body
, par_body
);
9027 gimple_seq_add_seq (&new_body
, par_rlist
);
9028 if (ctx
->cancellable
)
9029 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
9030 gimple_seq_add_seq (&new_body
, par_olist
);
9031 new_body
= maybe_catch_exception (new_body
);
9032 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
9033 gimple_seq_add_stmt (&new_body
,
9034 gimple_build_omp_continue (integer_zero_node
,
9035 integer_zero_node
));
9036 if (!phony_construct
)
9038 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
9039 gimple_omp_set_body (stmt
, new_body
);
9042 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
9043 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
9045 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
9046 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
9047 gimple_bind_add_seq (bind
, ilist
);
9048 if (!phony_construct
)
9049 gimple_bind_add_stmt (bind
, stmt
);
9051 gimple_bind_add_seq (bind
, new_body
);
9052 gimple_bind_add_seq (bind
, olist
);
9054 pop_gimplify_context (NULL
);
9058 gimple_bind_add_seq (dep_bind
, dep_ilist
);
9059 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
9060 gimple_bind_add_stmt (dep_bind
, bind
);
9061 gimple_bind_add_seq (dep_bind
, tskred_olist
);
9062 gimple_bind_add_seq (dep_bind
, dep_olist
);
9063 pop_gimplify_context (dep_bind
);
9067 /* Lower the GIMPLE_OMP_TARGET in the current statement
9068 in GSI_P. CTX holds context information for the directive. */
9071 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9074 tree child_fn
, t
, c
;
9075 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
9076 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
9077 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
9078 location_t loc
= gimple_location (stmt
);
9079 bool offloaded
, data_region
;
9080 unsigned int map_cnt
= 0;
9082 offloaded
= is_gimple_omp_offloaded (stmt
);
9083 switch (gimple_omp_target_kind (stmt
))
9085 case GF_OMP_TARGET_KIND_REGION
:
9086 case GF_OMP_TARGET_KIND_UPDATE
:
9087 case GF_OMP_TARGET_KIND_ENTER_DATA
:
9088 case GF_OMP_TARGET_KIND_EXIT_DATA
:
9089 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
9090 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
9091 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
9092 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
9093 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
9094 data_region
= false;
9096 case GF_OMP_TARGET_KIND_DATA
:
9097 case GF_OMP_TARGET_KIND_OACC_DATA
:
9098 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
9105 clauses
= gimple_omp_target_clauses (stmt
);
9107 gimple_seq dep_ilist
= NULL
;
9108 gimple_seq dep_olist
= NULL
;
9109 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
9111 push_gimplify_context ();
9112 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
9113 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
9114 &dep_ilist
, &dep_olist
);
9121 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
9122 tgt_body
= gimple_bind_body (tgt_bind
);
9124 else if (data_region
)
9125 tgt_body
= gimple_omp_body (stmt
);
9126 child_fn
= ctx
->cb
.dst_fn
;
9128 push_gimplify_context ();
9131 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9132 switch (OMP_CLAUSE_CODE (c
))
9138 case OMP_CLAUSE_MAP
:
9140 /* First check what we're prepared to handle in the following. */
9141 switch (OMP_CLAUSE_MAP_KIND (c
))
9143 case GOMP_MAP_ALLOC
:
9146 case GOMP_MAP_TOFROM
:
9147 case GOMP_MAP_POINTER
:
9148 case GOMP_MAP_TO_PSET
:
9149 case GOMP_MAP_DELETE
:
9150 case GOMP_MAP_RELEASE
:
9151 case GOMP_MAP_ALWAYS_TO
:
9152 case GOMP_MAP_ALWAYS_FROM
:
9153 case GOMP_MAP_ALWAYS_TOFROM
:
9154 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9155 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9156 case GOMP_MAP_STRUCT
:
9157 case GOMP_MAP_ALWAYS_POINTER
:
9159 case GOMP_MAP_FORCE_ALLOC
:
9160 case GOMP_MAP_FORCE_TO
:
9161 case GOMP_MAP_FORCE_FROM
:
9162 case GOMP_MAP_FORCE_TOFROM
:
9163 case GOMP_MAP_FORCE_PRESENT
:
9164 case GOMP_MAP_FORCE_DEVICEPTR
:
9165 case GOMP_MAP_DEVICE_RESIDENT
:
9167 gcc_assert (is_gimple_omp_oacc (stmt
));
9175 case OMP_CLAUSE_FROM
:
9177 var
= OMP_CLAUSE_DECL (c
);
9180 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
9181 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
9182 && (OMP_CLAUSE_MAP_KIND (c
)
9183 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
9189 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
9191 tree var2
= DECL_VALUE_EXPR (var
);
9192 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
9193 var2
= TREE_OPERAND (var2
, 0);
9194 gcc_assert (DECL_P (var2
));
9199 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9200 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9201 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9203 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
9205 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
9206 && varpool_node::get_create (var
)->offloadable
)
9209 tree type
= build_pointer_type (TREE_TYPE (var
));
9210 tree new_var
= lookup_decl (var
, ctx
);
9211 x
= create_tmp_var_raw (type
, get_name (new_var
));
9212 gimple_add_tmp_var (x
);
9213 x
= build_simple_mem_ref (x
);
9214 SET_DECL_VALUE_EXPR (new_var
, x
);
9215 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9220 if (!maybe_lookup_field (var
, ctx
))
9223 /* Don't remap oacc parallel reduction variables, because the
9224 intermediate result must be local to each gang. */
9225 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9226 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
9228 x
= build_receiver_ref (var
, true, ctx
);
9229 tree new_var
= lookup_decl (var
, ctx
);
9231 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9232 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
9233 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
9234 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
9235 x
= build_simple_mem_ref (x
);
9236 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
9238 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
9239 if (omp_is_reference (new_var
)
9240 && TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
)
9242 /* Create a local object to hold the instance
9244 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
9245 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
9246 tree inst
= create_tmp_var (type
, id
);
9247 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
9248 x
= build_fold_addr_expr (inst
);
9250 gimplify_assign (new_var
, x
, &fplist
);
9252 else if (DECL_P (new_var
))
9254 SET_DECL_VALUE_EXPR (new_var
, x
);
9255 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9263 case OMP_CLAUSE_FIRSTPRIVATE
:
9264 if (is_oacc_parallel (ctx
))
9265 goto oacc_firstprivate
;
9267 var
= OMP_CLAUSE_DECL (c
);
9268 if (!omp_is_reference (var
)
9269 && !is_gimple_reg_type (TREE_TYPE (var
)))
9271 tree new_var
= lookup_decl (var
, ctx
);
9272 if (is_variable_sized (var
))
9274 tree pvar
= DECL_VALUE_EXPR (var
);
9275 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
9276 pvar
= TREE_OPERAND (pvar
, 0);
9277 gcc_assert (DECL_P (pvar
));
9278 tree new_pvar
= lookup_decl (pvar
, ctx
);
9279 x
= build_fold_indirect_ref (new_pvar
);
9280 TREE_THIS_NOTRAP (x
) = 1;
9283 x
= build_receiver_ref (var
, true, ctx
);
9284 SET_DECL_VALUE_EXPR (new_var
, x
);
9285 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9289 case OMP_CLAUSE_PRIVATE
:
9290 if (is_gimple_omp_oacc (ctx
->stmt
))
9292 var
= OMP_CLAUSE_DECL (c
);
9293 if (is_variable_sized (var
))
9295 tree new_var
= lookup_decl (var
, ctx
);
9296 tree pvar
= DECL_VALUE_EXPR (var
);
9297 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
9298 pvar
= TREE_OPERAND (pvar
, 0);
9299 gcc_assert (DECL_P (pvar
));
9300 tree new_pvar
= lookup_decl (pvar
, ctx
);
9301 x
= build_fold_indirect_ref (new_pvar
);
9302 TREE_THIS_NOTRAP (x
) = 1;
9303 SET_DECL_VALUE_EXPR (new_var
, x
);
9304 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9308 case OMP_CLAUSE_USE_DEVICE_PTR
:
9309 case OMP_CLAUSE_IS_DEVICE_PTR
:
9310 var
= OMP_CLAUSE_DECL (c
);
9312 if (is_variable_sized (var
))
9314 tree new_var
= lookup_decl (var
, ctx
);
9315 tree pvar
= DECL_VALUE_EXPR (var
);
9316 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
9317 pvar
= TREE_OPERAND (pvar
, 0);
9318 gcc_assert (DECL_P (pvar
));
9319 tree new_pvar
= lookup_decl (pvar
, ctx
);
9320 x
= build_fold_indirect_ref (new_pvar
);
9321 TREE_THIS_NOTRAP (x
) = 1;
9322 SET_DECL_VALUE_EXPR (new_var
, x
);
9323 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9325 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
9327 tree new_var
= lookup_decl (var
, ctx
);
9328 tree type
= build_pointer_type (TREE_TYPE (var
));
9329 x
= create_tmp_var_raw (type
, get_name (new_var
));
9330 gimple_add_tmp_var (x
);
9331 x
= build_simple_mem_ref (x
);
9332 SET_DECL_VALUE_EXPR (new_var
, x
);
9333 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9337 tree new_var
= lookup_decl (var
, ctx
);
9338 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
9339 gimple_add_tmp_var (x
);
9340 SET_DECL_VALUE_EXPR (new_var
, x
);
9341 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
9348 target_nesting_level
++;
9349 lower_omp (&tgt_body
, ctx
);
9350 target_nesting_level
--;
9352 else if (data_region
)
9353 lower_omp (&tgt_body
, ctx
);
9357 /* Declare all the variables created by mapping and the variables
9358 declared in the scope of the target body. */
9359 record_vars_into (ctx
->block_vars
, child_fn
);
9360 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
9361 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
9366 if (ctx
->record_type
)
9369 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
9370 DECL_NAMELESS (ctx
->sender_decl
) = 1;
9371 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
9372 t
= make_tree_vec (3);
9373 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
9375 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
9377 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
9378 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
9379 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
9380 tree tkind_type
= short_unsigned_type_node
;
9381 int talign_shift
= 8;
9383 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
9385 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
9386 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
9387 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
9388 gimple_omp_target_set_data_arg (stmt
, t
);
9390 vec
<constructor_elt
, va_gc
> *vsize
;
9391 vec
<constructor_elt
, va_gc
> *vkind
;
9392 vec_alloc (vsize
, map_cnt
);
9393 vec_alloc (vkind
, map_cnt
);
9394 unsigned int map_idx
= 0;
9396 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9397 switch (OMP_CLAUSE_CODE (c
))
9399 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
9400 unsigned int talign
;
9405 case OMP_CLAUSE_MAP
:
9407 case OMP_CLAUSE_FROM
:
9408 oacc_firstprivate_map
:
9410 ovar
= OMP_CLAUSE_DECL (c
);
9411 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9412 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9413 || (OMP_CLAUSE_MAP_KIND (c
)
9414 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
9418 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9419 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
9421 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
9422 == get_base_address (ovar
));
9423 nc
= OMP_CLAUSE_CHAIN (c
);
9424 ovar
= OMP_CLAUSE_DECL (nc
);
9428 tree x
= build_sender_ref (ovar
, ctx
);
9430 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
9431 gimplify_assign (x
, v
, &ilist
);
9437 if (DECL_SIZE (ovar
)
9438 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
9440 tree ovar2
= DECL_VALUE_EXPR (ovar
);
9441 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
9442 ovar2
= TREE_OPERAND (ovar2
, 0);
9443 gcc_assert (DECL_P (ovar2
));
9446 if (!maybe_lookup_field (ovar
, ctx
))
9450 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
9451 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
9452 talign
= DECL_ALIGN_UNIT (ovar
);
9455 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
9456 x
= build_sender_ref (ovar
, ctx
);
9458 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9459 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
9460 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
9461 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
9463 gcc_assert (offloaded
);
9465 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
9466 mark_addressable (avar
);
9467 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
9468 talign
= DECL_ALIGN_UNIT (avar
);
9469 avar
= build_fold_addr_expr (avar
);
9470 gimplify_assign (x
, avar
, &ilist
);
9472 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
9474 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
9475 if (!omp_is_reference (var
))
9477 if (is_gimple_reg (var
)
9478 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
9479 TREE_NO_WARNING (var
) = 1;
9480 var
= build_fold_addr_expr (var
);
9483 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
9484 gimplify_assign (x
, var
, &ilist
);
9486 else if (is_gimple_reg (var
))
9488 gcc_assert (offloaded
);
9489 tree avar
= create_tmp_var (TREE_TYPE (var
));
9490 mark_addressable (avar
);
9491 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
9492 if (GOMP_MAP_COPY_TO_P (map_kind
)
9493 || map_kind
== GOMP_MAP_POINTER
9494 || map_kind
== GOMP_MAP_TO_PSET
9495 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
9497 /* If we need to initialize a temporary
9498 with VAR because it is not addressable, and
9499 the variable hasn't been initialized yet, then
9500 we'll get a warning for the store to avar.
9501 Don't warn in that case, the mapping might
9503 TREE_NO_WARNING (var
) = 1;
9504 gimplify_assign (avar
, var
, &ilist
);
9506 avar
= build_fold_addr_expr (avar
);
9507 gimplify_assign (x
, avar
, &ilist
);
9508 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
9509 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
9510 && !TYPE_READONLY (TREE_TYPE (var
)))
9512 x
= unshare_expr (x
);
9513 x
= build_simple_mem_ref (x
);
9514 gimplify_assign (var
, x
, &olist
);
9519 var
= build_fold_addr_expr (var
);
9520 gimplify_assign (x
, var
, &ilist
);
9524 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
9526 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
9527 s
= TREE_TYPE (ovar
);
9528 if (TREE_CODE (s
) == REFERENCE_TYPE
)
9530 s
= TYPE_SIZE_UNIT (s
);
9533 s
= OMP_CLAUSE_SIZE (c
);
9535 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
9536 s
= fold_convert (size_type_node
, s
);
9537 purpose
= size_int (map_idx
++);
9538 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
9539 if (TREE_CODE (s
) != INTEGER_CST
)
9540 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
9542 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
9543 switch (OMP_CLAUSE_CODE (c
))
9545 case OMP_CLAUSE_MAP
:
9546 tkind
= OMP_CLAUSE_MAP_KIND (c
);
9548 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
9551 case GOMP_MAP_ALLOC
:
9554 case GOMP_MAP_TOFROM
:
9555 case GOMP_MAP_ALWAYS_TO
:
9556 case GOMP_MAP_ALWAYS_FROM
:
9557 case GOMP_MAP_ALWAYS_TOFROM
:
9558 case GOMP_MAP_RELEASE
:
9559 case GOMP_MAP_FORCE_TO
:
9560 case GOMP_MAP_FORCE_FROM
:
9561 case GOMP_MAP_FORCE_TOFROM
:
9562 case GOMP_MAP_FORCE_PRESENT
:
9563 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
9565 case GOMP_MAP_DELETE
:
9566 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
9570 if (tkind_zero
!= tkind
)
9572 if (integer_zerop (s
))
9574 else if (integer_nonzerop (s
))
9578 case OMP_CLAUSE_FIRSTPRIVATE
:
9579 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
9580 tkind
= GOMP_MAP_TO
;
9584 tkind
= GOMP_MAP_TO
;
9587 case OMP_CLAUSE_FROM
:
9588 tkind
= GOMP_MAP_FROM
;
9594 gcc_checking_assert (tkind
9595 < (HOST_WIDE_INT_C (1U) << talign_shift
));
9596 gcc_checking_assert (tkind_zero
9597 < (HOST_WIDE_INT_C (1U) << talign_shift
));
9598 talign
= ceil_log2 (talign
);
9599 tkind
|= talign
<< talign_shift
;
9600 tkind_zero
|= talign
<< talign_shift
;
9601 gcc_checking_assert (tkind
9602 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
9603 gcc_checking_assert (tkind_zero
9604 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
9605 if (tkind
== tkind_zero
)
9606 x
= build_int_cstu (tkind_type
, tkind
);
9609 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
9610 x
= build3 (COND_EXPR
, tkind_type
,
9611 fold_build2 (EQ_EXPR
, boolean_type_node
,
9612 unshare_expr (s
), size_zero_node
),
9613 build_int_cstu (tkind_type
, tkind_zero
),
9614 build_int_cstu (tkind_type
, tkind
));
9616 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
9621 case OMP_CLAUSE_FIRSTPRIVATE
:
9622 if (is_oacc_parallel (ctx
))
9623 goto oacc_firstprivate_map
;
9624 ovar
= OMP_CLAUSE_DECL (c
);
9625 if (omp_is_reference (ovar
))
9626 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
9628 talign
= DECL_ALIGN_UNIT (ovar
);
9629 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
9630 x
= build_sender_ref (ovar
, ctx
);
9631 tkind
= GOMP_MAP_FIRSTPRIVATE
;
9632 type
= TREE_TYPE (ovar
);
9633 if (omp_is_reference (ovar
))
9634 type
= TREE_TYPE (type
);
9635 if ((INTEGRAL_TYPE_P (type
)
9636 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
9637 || TREE_CODE (type
) == POINTER_TYPE
)
9639 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
9641 if (omp_is_reference (var
))
9642 t
= build_simple_mem_ref (var
);
9643 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
9644 TREE_NO_WARNING (var
) = 1;
9645 if (TREE_CODE (type
) != POINTER_TYPE
)
9646 t
= fold_convert (pointer_sized_int_node
, t
);
9647 t
= fold_convert (TREE_TYPE (x
), t
);
9648 gimplify_assign (x
, t
, &ilist
);
9650 else if (omp_is_reference (var
))
9651 gimplify_assign (x
, var
, &ilist
);
9652 else if (is_gimple_reg (var
))
9654 tree avar
= create_tmp_var (TREE_TYPE (var
));
9655 mark_addressable (avar
);
9656 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
9657 TREE_NO_WARNING (var
) = 1;
9658 gimplify_assign (avar
, var
, &ilist
);
9659 avar
= build_fold_addr_expr (avar
);
9660 gimplify_assign (x
, avar
, &ilist
);
9664 var
= build_fold_addr_expr (var
);
9665 gimplify_assign (x
, var
, &ilist
);
9667 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
9669 else if (omp_is_reference (ovar
))
9670 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
9672 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
9673 s
= fold_convert (size_type_node
, s
);
9674 purpose
= size_int (map_idx
++);
9675 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
9676 if (TREE_CODE (s
) != INTEGER_CST
)
9677 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
9679 gcc_checking_assert (tkind
9680 < (HOST_WIDE_INT_C (1U) << talign_shift
));
9681 talign
= ceil_log2 (talign
);
9682 tkind
|= talign
<< talign_shift
;
9683 gcc_checking_assert (tkind
9684 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
9685 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
9686 build_int_cstu (tkind_type
, tkind
));
9689 case OMP_CLAUSE_USE_DEVICE_PTR
:
9690 case OMP_CLAUSE_IS_DEVICE_PTR
:
9691 ovar
= OMP_CLAUSE_DECL (c
);
9692 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
9693 x
= build_sender_ref (ovar
, ctx
);
9694 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
9695 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
9697 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
9698 type
= TREE_TYPE (ovar
);
9699 if (TREE_CODE (type
) == ARRAY_TYPE
)
9700 var
= build_fold_addr_expr (var
);
9703 if (omp_is_reference (ovar
))
9705 type
= TREE_TYPE (type
);
9706 if (TREE_CODE (type
) != ARRAY_TYPE
)
9707 var
= build_simple_mem_ref (var
);
9708 var
= fold_convert (TREE_TYPE (x
), var
);
9711 gimplify_assign (x
, var
, &ilist
);
9713 purpose
= size_int (map_idx
++);
9714 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
9715 gcc_checking_assert (tkind
9716 < (HOST_WIDE_INT_C (1U) << talign_shift
));
9717 gcc_checking_assert (tkind
9718 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
9719 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
9720 build_int_cstu (tkind_type
, tkind
));
9724 gcc_assert (map_idx
== map_cnt
);
9726 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
9727 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
9728 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
9729 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
9730 for (int i
= 1; i
<= 2; i
++)
9731 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
9733 gimple_seq initlist
= NULL
;
9734 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
9735 TREE_VEC_ELT (t
, i
)),
9736 &initlist
, true, NULL_TREE
);
9737 gimple_seq_add_seq (&ilist
, initlist
);
9739 tree clobber
= build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, i
)),
9741 TREE_THIS_VOLATILE (clobber
) = 1;
9742 gimple_seq_add_stmt (&olist
,
9743 gimple_build_assign (TREE_VEC_ELT (t
, i
),
9747 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
9748 TREE_THIS_VOLATILE (clobber
) = 1;
9749 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
9753 /* Once all the expansions are done, sequence all the different
9754 fragments inside gimple_omp_body. */
9759 && ctx
->record_type
)
9761 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
9762 /* fixup_child_record_type might have changed receiver_decl's type. */
9763 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
9764 gimple_seq_add_stmt (&new_body
,
9765 gimple_build_assign (ctx
->receiver_decl
, t
));
9767 gimple_seq_add_seq (&new_body
, fplist
);
9769 if (offloaded
|| data_region
)
9771 tree prev
= NULL_TREE
;
9772 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9773 switch (OMP_CLAUSE_CODE (c
))
9778 case OMP_CLAUSE_FIRSTPRIVATE
:
9779 if (is_gimple_omp_oacc (ctx
->stmt
))
9781 var
= OMP_CLAUSE_DECL (c
);
9782 if (omp_is_reference (var
)
9783 || is_gimple_reg_type (TREE_TYPE (var
)))
9785 tree new_var
= lookup_decl (var
, ctx
);
9787 type
= TREE_TYPE (var
);
9788 if (omp_is_reference (var
))
9789 type
= TREE_TYPE (type
);
9790 if ((INTEGRAL_TYPE_P (type
)
9791 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
9792 || TREE_CODE (type
) == POINTER_TYPE
)
9794 x
= build_receiver_ref (var
, false, ctx
);
9795 if (TREE_CODE (type
) != POINTER_TYPE
)
9796 x
= fold_convert (pointer_sized_int_node
, x
);
9797 x
= fold_convert (type
, x
);
9798 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
9800 if (omp_is_reference (var
))
9802 tree v
= create_tmp_var_raw (type
, get_name (var
));
9803 gimple_add_tmp_var (v
);
9804 TREE_ADDRESSABLE (v
) = 1;
9805 gimple_seq_add_stmt (&new_body
,
9806 gimple_build_assign (v
, x
));
9807 x
= build_fold_addr_expr (v
);
9809 gimple_seq_add_stmt (&new_body
,
9810 gimple_build_assign (new_var
, x
));
9814 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
9815 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
9817 gimple_seq_add_stmt (&new_body
,
9818 gimple_build_assign (new_var
, x
));
9821 else if (is_variable_sized (var
))
9823 tree pvar
= DECL_VALUE_EXPR (var
);
9824 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
9825 pvar
= TREE_OPERAND (pvar
, 0);
9826 gcc_assert (DECL_P (pvar
));
9827 tree new_var
= lookup_decl (pvar
, ctx
);
9828 x
= build_receiver_ref (var
, false, ctx
);
9829 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
9830 gimple_seq_add_stmt (&new_body
,
9831 gimple_build_assign (new_var
, x
));
9834 case OMP_CLAUSE_PRIVATE
:
9835 if (is_gimple_omp_oacc (ctx
->stmt
))
9837 var
= OMP_CLAUSE_DECL (c
);
9838 if (omp_is_reference (var
))
9840 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9841 tree new_var
= lookup_decl (var
, ctx
);
9842 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
9843 if (TREE_CONSTANT (x
))
9845 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
9847 gimple_add_tmp_var (x
);
9848 TREE_ADDRESSABLE (x
) = 1;
9849 x
= build_fold_addr_expr_loc (clause_loc
, x
);
9854 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
9855 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
9856 gimple_seq_add_stmt (&new_body
,
9857 gimple_build_assign (new_var
, x
));
9860 case OMP_CLAUSE_USE_DEVICE_PTR
:
9861 case OMP_CLAUSE_IS_DEVICE_PTR
:
9862 var
= OMP_CLAUSE_DECL (c
);
9863 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
9864 x
= build_sender_ref (var
, ctx
);
9866 x
= build_receiver_ref (var
, false, ctx
);
9867 if (is_variable_sized (var
))
9869 tree pvar
= DECL_VALUE_EXPR (var
);
9870 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
9871 pvar
= TREE_OPERAND (pvar
, 0);
9872 gcc_assert (DECL_P (pvar
));
9873 tree new_var
= lookup_decl (pvar
, ctx
);
9874 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
9875 gimple_seq_add_stmt (&new_body
,
9876 gimple_build_assign (new_var
, x
));
9878 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
9880 tree new_var
= lookup_decl (var
, ctx
);
9881 new_var
= DECL_VALUE_EXPR (new_var
);
9882 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
9883 new_var
= TREE_OPERAND (new_var
, 0);
9884 gcc_assert (DECL_P (new_var
));
9885 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
9886 gimple_seq_add_stmt (&new_body
,
9887 gimple_build_assign (new_var
, x
));
9891 tree type
= TREE_TYPE (var
);
9892 tree new_var
= lookup_decl (var
, ctx
);
9893 if (omp_is_reference (var
))
9895 type
= TREE_TYPE (type
);
9896 if (TREE_CODE (type
) != ARRAY_TYPE
)
9898 tree v
= create_tmp_var_raw (type
, get_name (var
));
9899 gimple_add_tmp_var (v
);
9900 TREE_ADDRESSABLE (v
) = 1;
9901 x
= fold_convert (type
, x
);
9902 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
9904 gimple_seq_add_stmt (&new_body
,
9905 gimple_build_assign (v
, x
));
9906 x
= build_fold_addr_expr (v
);
9909 new_var
= DECL_VALUE_EXPR (new_var
);
9910 x
= fold_convert (TREE_TYPE (new_var
), x
);
9911 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
9912 gimple_seq_add_stmt (&new_body
,
9913 gimple_build_assign (new_var
, x
));
9917 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
9918 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
9919 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
9920 or references to VLAs. */
9921 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9922 switch (OMP_CLAUSE_CODE (c
))
9927 case OMP_CLAUSE_MAP
:
9928 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9929 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9931 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9932 poly_int64 offset
= 0;
9934 var
= OMP_CLAUSE_DECL (c
);
9936 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
9937 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
9939 && varpool_node::get_create (var
)->offloadable
)
9941 if (TREE_CODE (var
) == INDIRECT_REF
9942 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
9943 var
= TREE_OPERAND (var
, 0);
9944 if (TREE_CODE (var
) == COMPONENT_REF
)
9946 var
= get_addr_base_and_unit_offset (var
, &offset
);
9947 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
9949 else if (DECL_SIZE (var
)
9950 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
9952 tree var2
= DECL_VALUE_EXPR (var
);
9953 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
9954 var2
= TREE_OPERAND (var2
, 0);
9955 gcc_assert (DECL_P (var2
));
9958 tree new_var
= lookup_decl (var
, ctx
), x
;
9959 tree type
= TREE_TYPE (new_var
);
9961 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
9962 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
9965 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
9967 new_var
= build2 (MEM_REF
, type
,
9968 build_fold_addr_expr (new_var
),
9969 build_int_cst (build_pointer_type (type
),
9972 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
9974 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
9975 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
9976 new_var
= build2 (MEM_REF
, type
,
9977 build_fold_addr_expr (new_var
),
9978 build_int_cst (build_pointer_type (type
),
9982 is_ref
= omp_is_reference (var
);
9983 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9985 bool ref_to_array
= false;
9988 type
= TREE_TYPE (type
);
9989 if (TREE_CODE (type
) == ARRAY_TYPE
)
9991 type
= build_pointer_type (type
);
9992 ref_to_array
= true;
9995 else if (TREE_CODE (type
) == ARRAY_TYPE
)
9997 tree decl2
= DECL_VALUE_EXPR (new_var
);
9998 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
9999 decl2
= TREE_OPERAND (decl2
, 0);
10000 gcc_assert (DECL_P (decl2
));
10002 type
= TREE_TYPE (new_var
);
10004 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
10005 x
= fold_convert_loc (clause_loc
, type
, x
);
10006 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
10008 tree bias
= OMP_CLAUSE_SIZE (c
);
10010 bias
= lookup_decl (bias
, ctx
);
10011 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
10012 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
10014 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
10015 TREE_TYPE (x
), x
, bias
);
10018 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
10019 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
10020 if (is_ref
&& !ref_to_array
)
10022 tree t
= create_tmp_var_raw (type
, get_name (var
));
10023 gimple_add_tmp_var (t
);
10024 TREE_ADDRESSABLE (t
) = 1;
10025 gimple_seq_add_stmt (&new_body
,
10026 gimple_build_assign (t
, x
));
10027 x
= build_fold_addr_expr_loc (clause_loc
, t
);
10029 gimple_seq_add_stmt (&new_body
,
10030 gimple_build_assign (new_var
, x
));
10033 else if (OMP_CLAUSE_CHAIN (c
)
10034 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
10036 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
10037 == GOMP_MAP_FIRSTPRIVATE_POINTER
10038 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
10039 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
10042 case OMP_CLAUSE_PRIVATE
:
10043 var
= OMP_CLAUSE_DECL (c
);
10044 if (is_variable_sized (var
))
10046 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10047 tree new_var
= lookup_decl (var
, ctx
);
10048 tree pvar
= DECL_VALUE_EXPR (var
);
10049 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
10050 pvar
= TREE_OPERAND (pvar
, 0);
10051 gcc_assert (DECL_P (pvar
));
10052 tree new_pvar
= lookup_decl (pvar
, ctx
);
10053 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
10054 tree al
= size_int (DECL_ALIGN (var
));
10055 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
10056 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
10057 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
10058 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
10059 gimple_seq_add_stmt (&new_body
,
10060 gimple_build_assign (new_pvar
, x
));
10062 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
10064 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10065 tree new_var
= lookup_decl (var
, ctx
);
10066 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
10067 if (TREE_CONSTANT (x
))
10072 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
10073 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
10074 tree al
= size_int (TYPE_ALIGN (rtype
));
10075 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
10078 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
10079 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
10080 gimple_seq_add_stmt (&new_body
,
10081 gimple_build_assign (new_var
, x
));
10086 gimple_seq fork_seq
= NULL
;
10087 gimple_seq join_seq
= NULL
;
10089 if (is_oacc_parallel (ctx
))
10091 /* If there are reductions on the offloaded region itself, treat
10092 them as a dummy GANG loop. */
10093 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
10095 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
10096 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
10099 gimple_seq_add_seq (&new_body
, fork_seq
);
10100 gimple_seq_add_seq (&new_body
, tgt_body
);
10101 gimple_seq_add_seq (&new_body
, join_seq
);
10104 new_body
= maybe_catch_exception (new_body
);
10106 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
10107 gimple_omp_set_body (stmt
, new_body
);
10110 bind
= gimple_build_bind (NULL
, NULL
,
10111 tgt_bind
? gimple_bind_block (tgt_bind
)
10113 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
10114 gimple_bind_add_seq (bind
, ilist
);
10115 gimple_bind_add_stmt (bind
, stmt
);
10116 gimple_bind_add_seq (bind
, olist
);
10118 pop_gimplify_context (NULL
);
10122 gimple_bind_add_seq (dep_bind
, dep_ilist
);
10123 gimple_bind_add_stmt (dep_bind
, bind
);
10124 gimple_bind_add_seq (dep_bind
, dep_olist
);
10125 pop_gimplify_context (dep_bind
);
10129 /* Expand code for an OpenMP teams directive. */
10132 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10134 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
10135 push_gimplify_context ();
10137 tree block
= make_node (BLOCK
);
10138 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
10139 gsi_replace (gsi_p
, bind
, true);
10140 gimple_seq bind_body
= NULL
;
10141 gimple_seq dlist
= NULL
;
10142 gimple_seq olist
= NULL
;
10144 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
10145 OMP_CLAUSE_NUM_TEAMS
);
10146 if (num_teams
== NULL_TREE
)
10147 num_teams
= build_int_cst (unsigned_type_node
, 0);
10150 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
10151 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
10152 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
10154 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
10155 OMP_CLAUSE_THREAD_LIMIT
);
10156 if (thread_limit
== NULL_TREE
)
10157 thread_limit
= build_int_cst (unsigned_type_node
, 0);
10160 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
10161 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
10162 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
10166 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
10167 &bind_body
, &dlist
, ctx
, NULL
);
10168 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
10169 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
, ctx
);
10170 if (!gimple_omp_teams_grid_phony (teams_stmt
))
10172 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
10173 location_t loc
= gimple_location (teams_stmt
);
10174 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
10175 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
10176 gimple_set_location (call
, loc
);
10177 gimple_seq_add_stmt (&bind_body
, call
);
10180 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
10181 gimple_omp_set_body (teams_stmt
, NULL
);
10182 gimple_seq_add_seq (&bind_body
, olist
);
10183 gimple_seq_add_seq (&bind_body
, dlist
);
10184 if (!gimple_omp_teams_grid_phony (teams_stmt
))
10185 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
10186 gimple_bind_set_body (bind
, bind_body
);
10188 pop_gimplify_context (bind
);
10190 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10191 BLOCK_VARS (block
) = ctx
->block_vars
;
10192 if (BLOCK_VARS (block
))
10193 TREE_USED (block
) = 1;
10196 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
10199 lower_omp_grid_body (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10201 gimple
*stmt
= gsi_stmt (*gsi_p
);
10202 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10203 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt
),
10204 gimple_build_omp_return (false));
10208 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
10209 regimplified. If DATA is non-NULL, lower_omp_1 is outside
10210 of OMP context, but with task_shared_vars set. */
10213 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
10218 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
10219 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
10222 if (task_shared_vars
10224 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
10227 /* If a global variable has been privatized, TREE_CONSTANT on
10228 ADDR_EXPR might be wrong. */
10229 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
10230 recompute_tree_invariant_for_addr_expr (t
);
10232 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
10236 /* Data to be communicated between lower_omp_regimplify_operands and
10237 lower_omp_regimplify_operands_p. */
10239 struct lower_omp_regimplify_operands_data
10245 /* Helper function for lower_omp_regimplify_operands. Find
10246 omp_member_access_dummy_var vars and adjust temporarily their
10247 DECL_VALUE_EXPRs if needed. */
10250 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
10253 tree t
= omp_member_access_dummy_var (*tp
);
10256 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
10257 lower_omp_regimplify_operands_data
*ldata
10258 = (lower_omp_regimplify_operands_data
*) wi
->info
;
10259 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
10262 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
10263 ldata
->decls
->safe_push (*tp
);
10264 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
10265 SET_DECL_VALUE_EXPR (*tp
, v
);
10268 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
10272 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
10273 of omp_member_access_dummy_var vars during regimplification. */
10276 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
10277 gimple_stmt_iterator
*gsi_p
)
10279 auto_vec
<tree
, 10> decls
;
10282 struct walk_stmt_info wi
;
10283 memset (&wi
, '\0', sizeof (wi
));
10284 struct lower_omp_regimplify_operands_data data
;
10286 data
.decls
= &decls
;
10288 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
10290 gimple_regimplify_operands (stmt
, gsi_p
);
10291 while (!decls
.is_empty ())
10293 tree t
= decls
.pop ();
10294 tree v
= decls
.pop ();
10295 SET_DECL_VALUE_EXPR (t
, v
);
10300 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10302 gimple
*stmt
= gsi_stmt (*gsi_p
);
10303 struct walk_stmt_info wi
;
10306 if (gimple_has_location (stmt
))
10307 input_location
= gimple_location (stmt
);
10309 if (task_shared_vars
)
10310 memset (&wi
, '\0', sizeof (wi
));
10312 /* If we have issued syntax errors, avoid doing any heavy lifting.
10313 Just replace the OMP directives with a NOP to avoid
10314 confusing RTL expansion. */
10315 if (seen_error () && is_gimple_omp (stmt
))
10317 gsi_replace (gsi_p
, gimple_build_nop (), true);
10321 switch (gimple_code (stmt
))
10325 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
10326 if ((ctx
|| task_shared_vars
)
10327 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
10328 lower_omp_regimplify_p
,
10329 ctx
? NULL
: &wi
, NULL
)
10330 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
10331 lower_omp_regimplify_p
,
10332 ctx
? NULL
: &wi
, NULL
)))
10333 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
10337 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
10339 case GIMPLE_EH_FILTER
:
10340 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
10343 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
10344 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
10346 case GIMPLE_TRANSACTION
:
10347 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
10351 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
10352 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
10354 case GIMPLE_OMP_PARALLEL
:
10355 case GIMPLE_OMP_TASK
:
10356 ctx
= maybe_lookup_ctx (stmt
);
10358 if (ctx
->cancellable
)
10359 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
10360 lower_omp_taskreg (gsi_p
, ctx
);
10362 case GIMPLE_OMP_FOR
:
10363 ctx
= maybe_lookup_ctx (stmt
);
10365 if (ctx
->cancellable
)
10366 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
10367 lower_omp_for (gsi_p
, ctx
);
10369 case GIMPLE_OMP_SECTIONS
:
10370 ctx
= maybe_lookup_ctx (stmt
);
10372 if (ctx
->cancellable
)
10373 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
10374 lower_omp_sections (gsi_p
, ctx
);
10376 case GIMPLE_OMP_SINGLE
:
10377 ctx
= maybe_lookup_ctx (stmt
);
10379 lower_omp_single (gsi_p
, ctx
);
10381 case GIMPLE_OMP_MASTER
:
10382 ctx
= maybe_lookup_ctx (stmt
);
10384 lower_omp_master (gsi_p
, ctx
);
10386 case GIMPLE_OMP_TASKGROUP
:
10387 ctx
= maybe_lookup_ctx (stmt
);
10389 lower_omp_taskgroup (gsi_p
, ctx
);
10391 case GIMPLE_OMP_ORDERED
:
10392 ctx
= maybe_lookup_ctx (stmt
);
10394 lower_omp_ordered (gsi_p
, ctx
);
10396 case GIMPLE_OMP_CRITICAL
:
10397 ctx
= maybe_lookup_ctx (stmt
);
10399 lower_omp_critical (gsi_p
, ctx
);
10401 case GIMPLE_OMP_ATOMIC_LOAD
:
10402 if ((ctx
|| task_shared_vars
)
10403 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
10404 as_a
<gomp_atomic_load
*> (stmt
)),
10405 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
10406 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
10408 case GIMPLE_OMP_TARGET
:
10409 ctx
= maybe_lookup_ctx (stmt
);
10411 lower_omp_target (gsi_p
, ctx
);
10413 case GIMPLE_OMP_TEAMS
:
10414 ctx
= maybe_lookup_ctx (stmt
);
10416 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
10417 lower_omp_taskreg (gsi_p
, ctx
);
10419 lower_omp_teams (gsi_p
, ctx
);
10421 case GIMPLE_OMP_GRID_BODY
:
10422 ctx
= maybe_lookup_ctx (stmt
);
10424 lower_omp_grid_body (gsi_p
, ctx
);
10428 call_stmt
= as_a
<gcall
*> (stmt
);
10429 fndecl
= gimple_call_fndecl (call_stmt
);
10431 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
10432 switch (DECL_FUNCTION_CODE (fndecl
))
10434 case BUILT_IN_GOMP_BARRIER
:
10438 case BUILT_IN_GOMP_CANCEL
:
10439 case BUILT_IN_GOMP_CANCELLATION_POINT
:
10442 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
10443 cctx
= cctx
->outer
;
10444 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
10445 if (!cctx
->cancellable
)
10447 if (DECL_FUNCTION_CODE (fndecl
)
10448 == BUILT_IN_GOMP_CANCELLATION_POINT
)
10450 stmt
= gimple_build_nop ();
10451 gsi_replace (gsi_p
, stmt
, false);
10455 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
10457 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
10458 gimple_call_set_fndecl (call_stmt
, fndecl
);
10459 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
10462 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
10463 gimple_call_set_lhs (call_stmt
, lhs
);
10464 tree fallthru_label
;
10465 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
10467 g
= gimple_build_label (fallthru_label
);
10468 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
10469 g
= gimple_build_cond (NE_EXPR
, lhs
,
10470 fold_convert (TREE_TYPE (lhs
),
10471 boolean_false_node
),
10472 cctx
->cancel_label
, fallthru_label
);
10473 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
10480 if ((ctx
|| task_shared_vars
)
10481 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
10484 /* Just remove clobbers, this should happen only if we have
10485 "privatized" local addressable variables in SIMD regions,
10486 the clobber isn't needed in that case and gimplifying address
10487 of the ARRAY_REF into a pointer and creating MEM_REF based
10488 clobber would create worse code than we get with the clobber
10490 if (gimple_clobber_p (stmt
))
10492 gsi_replace (gsi_p
, gimple_build_nop (), true);
10495 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
10502 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
10504 location_t saved_location
= input_location
;
10505 gimple_stmt_iterator gsi
;
10506 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
10507 lower_omp_1 (&gsi
, ctx
);
10508 /* During gimplification, we haven't folded statments inside offloading
10509 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
10510 if (target_nesting_level
|| taskreg_nesting_level
)
10511 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
10513 input_location
= saved_location
;
10516 /* Main entry point. */
10518 static unsigned int
10519 execute_lower_omp (void)
10525 /* This pass always runs, to provide PROP_gimple_lomp.
10526 But often, there is nothing to do. */
10527 if (flag_openacc
== 0 && flag_openmp
== 0
10528 && flag_openmp_simd
== 0)
10531 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
10532 delete_omp_context
);
10534 body
= gimple_body (current_function_decl
);
10536 if (hsa_gen_requested_p ())
10537 omp_grid_gridify_all_targets (&body
);
10539 scan_omp (&body
, NULL
);
10540 gcc_assert (taskreg_nesting_level
== 0);
10541 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
10542 finish_taskreg_scan (ctx
);
10543 taskreg_contexts
.release ();
10545 if (all_contexts
->root
)
10547 if (task_shared_vars
)
10548 push_gimplify_context ();
10549 lower_omp (&body
, NULL
);
10550 if (task_shared_vars
)
10551 pop_gimplify_context (NULL
);
10556 splay_tree_delete (all_contexts
);
10557 all_contexts
= NULL
;
10559 BITMAP_FREE (task_shared_vars
);
10561 /* If current function is a method, remove artificial dummy VAR_DECL created
10562 for non-static data member privatization, they aren't needed for
10563 debuginfo nor anything else, have been already replaced everywhere in the
10564 IL and cause problems with LTO. */
10565 if (DECL_ARGUMENTS (current_function_decl
)
10566 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
10567 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
10569 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
10575 const pass_data pass_data_lower_omp
=
10577 GIMPLE_PASS
, /* type */
10578 "omplower", /* name */
10579 OPTGROUP_OMP
, /* optinfo_flags */
10580 TV_NONE
, /* tv_id */
10581 PROP_gimple_any
, /* properties_required */
10582 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
10583 0, /* properties_destroyed */
10584 0, /* todo_flags_start */
10585 0, /* todo_flags_finish */
10588 class pass_lower_omp
: public gimple_opt_pass
10591 pass_lower_omp (gcc::context
*ctxt
)
10592 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
10595 /* opt_pass methods: */
10596 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
10598 }; // class pass_lower_omp
10600 } // anon namespace
10603 make_pass_lower_omp (gcc::context
*ctxt
)
10605 return new pass_lower_omp (ctxt
);
10608 /* The following is a utility to diagnose structured block violations.
10609 It is not part of the "omplower" pass, as that's invoked too late. It
10610 should be invoked by the respective front ends after gimplification. */
10612 static splay_tree all_labels
;
10614 /* Check for mismatched contexts and generate an error if needed. Return
10615 true if an error is detected. */
10618 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
10619 gimple
*branch_ctx
, gimple
*label_ctx
)
10621 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
10622 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
10624 if (label_ctx
== branch_ctx
)
10627 const char* kind
= NULL
;
10631 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
10632 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
10634 gcc_checking_assert (kind
== NULL
);
10640 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
10644 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
10645 so we could traverse it and issue a correct "exit" or "enter" error
10646 message upon a structured block violation.
10648 We built the context by building a list with tree_cons'ing, but there is
10649 no easy counterpart in gimple tuples. It seems like far too much work
10650 for issuing exit/enter error messages. If someone really misses the
10651 distinct error message... patches welcome. */
10654 /* Try to avoid confusing the user by producing and error message
10655 with correct "exit" or "enter" verbiage. We prefer "exit"
10656 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
10657 if (branch_ctx
== NULL
)
10663 if (TREE_VALUE (label_ctx
) == branch_ctx
)
10668 label_ctx
= TREE_CHAIN (label_ctx
);
10673 error ("invalid exit from %s structured block", kind
);
10675 error ("invalid entry to %s structured block", kind
);
10678 /* If it's obvious we have an invalid entry, be specific about the error. */
10679 if (branch_ctx
== NULL
)
10680 error ("invalid entry to %s structured block", kind
);
10683 /* Otherwise, be vague and lazy, but efficient. */
10684 error ("invalid branch to/from %s structured block", kind
);
10687 gsi_replace (gsi_p
, gimple_build_nop (), false);
10691 /* Pass 1: Create a minimal tree of structured blocks, and record
10692 where each label is found. */
10695 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10696 struct walk_stmt_info
*wi
)
10698 gimple
*context
= (gimple
*) wi
->info
;
10699 gimple
*inner_context
;
10700 gimple
*stmt
= gsi_stmt (*gsi_p
);
10702 *handled_ops_p
= true;
10704 switch (gimple_code (stmt
))
10708 case GIMPLE_OMP_PARALLEL
:
10709 case GIMPLE_OMP_TASK
:
10710 case GIMPLE_OMP_SECTIONS
:
10711 case GIMPLE_OMP_SINGLE
:
10712 case GIMPLE_OMP_SECTION
:
10713 case GIMPLE_OMP_MASTER
:
10714 case GIMPLE_OMP_ORDERED
:
10715 case GIMPLE_OMP_CRITICAL
:
10716 case GIMPLE_OMP_TARGET
:
10717 case GIMPLE_OMP_TEAMS
:
10718 case GIMPLE_OMP_TASKGROUP
:
10719 /* The minimal context here is just the current OMP construct. */
10720 inner_context
= stmt
;
10721 wi
->info
= inner_context
;
10722 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
10723 wi
->info
= context
;
10726 case GIMPLE_OMP_FOR
:
10727 inner_context
= stmt
;
10728 wi
->info
= inner_context
;
10729 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10731 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
10732 diagnose_sb_1
, NULL
, wi
);
10733 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
10734 wi
->info
= context
;
10738 splay_tree_insert (all_labels
,
10739 (splay_tree_key
) gimple_label_label (
10740 as_a
<glabel
*> (stmt
)),
10741 (splay_tree_value
) context
);
10751 /* Pass 2: Check each branch and see if its context differs from that of
10752 the destination label's context. */
10755 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10756 struct walk_stmt_info
*wi
)
10758 gimple
*context
= (gimple
*) wi
->info
;
10760 gimple
*stmt
= gsi_stmt (*gsi_p
);
10762 *handled_ops_p
= true;
10764 switch (gimple_code (stmt
))
10768 case GIMPLE_OMP_PARALLEL
:
10769 case GIMPLE_OMP_TASK
:
10770 case GIMPLE_OMP_SECTIONS
:
10771 case GIMPLE_OMP_SINGLE
:
10772 case GIMPLE_OMP_SECTION
:
10773 case GIMPLE_OMP_MASTER
:
10774 case GIMPLE_OMP_ORDERED
:
10775 case GIMPLE_OMP_CRITICAL
:
10776 case GIMPLE_OMP_TARGET
:
10777 case GIMPLE_OMP_TEAMS
:
10778 case GIMPLE_OMP_TASKGROUP
:
10780 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
10781 wi
->info
= context
;
10784 case GIMPLE_OMP_FOR
:
10786 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10788 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
10789 diagnose_sb_2
, NULL
, wi
);
10790 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
10791 wi
->info
= context
;
10796 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
10797 tree lab
= gimple_cond_true_label (cond_stmt
);
10800 n
= splay_tree_lookup (all_labels
,
10801 (splay_tree_key
) lab
);
10802 diagnose_sb_0 (gsi_p
, context
,
10803 n
? (gimple
*) n
->value
: NULL
);
10805 lab
= gimple_cond_false_label (cond_stmt
);
10808 n
= splay_tree_lookup (all_labels
,
10809 (splay_tree_key
) lab
);
10810 diagnose_sb_0 (gsi_p
, context
,
10811 n
? (gimple
*) n
->value
: NULL
);
10818 tree lab
= gimple_goto_dest (stmt
);
10819 if (TREE_CODE (lab
) != LABEL_DECL
)
10822 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
10823 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
10827 case GIMPLE_SWITCH
:
10829 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
10831 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
10833 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
10834 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
10835 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
10841 case GIMPLE_RETURN
:
10842 diagnose_sb_0 (gsi_p
, context
, NULL
);
10852 static unsigned int
10853 diagnose_omp_structured_block_errors (void)
10855 struct walk_stmt_info wi
;
10856 gimple_seq body
= gimple_body (current_function_decl
);
10858 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
10860 memset (&wi
, 0, sizeof (wi
));
10861 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
10863 memset (&wi
, 0, sizeof (wi
));
10864 wi
.want_locations
= true;
10865 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
10867 gimple_set_body (current_function_decl
, body
);
10869 splay_tree_delete (all_labels
);
10877 const pass_data pass_data_diagnose_omp_blocks
=
10879 GIMPLE_PASS
, /* type */
10880 "*diagnose_omp_blocks", /* name */
10881 OPTGROUP_OMP
, /* optinfo_flags */
10882 TV_NONE
, /* tv_id */
10883 PROP_gimple_any
, /* properties_required */
10884 0, /* properties_provided */
10885 0, /* properties_destroyed */
10886 0, /* todo_flags_start */
10887 0, /* todo_flags_finish */
10890 class pass_diagnose_omp_blocks
: public gimple_opt_pass
10893 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
10894 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
10897 /* opt_pass methods: */
10898 virtual bool gate (function
*)
10900 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
10902 virtual unsigned int execute (function
*)
10904 return diagnose_omp_structured_block_errors ();
10907 }; // class pass_diagnose_omp_blocks
10909 } // anon namespace
10912 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
10914 return new pass_diagnose_omp_blocks (ctxt
);
10918 #include "gt-omp-low.h"