1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
135 /* True if this parallel directive is nested within another. */
138 /* True if this construct can be cancelled. */
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
143 bool combined_into_simd_safelen0
;
145 /* True if there is nested scan context with inclusive clause. */
148 /* True if there is nested scan context with exclusive clause. */
152 static splay_tree all_contexts
;
153 static int taskreg_nesting_level
;
154 static int target_nesting_level
;
155 static bitmap task_shared_vars
;
156 static vec
<omp_context
*> taskreg_contexts
;
158 static void scan_omp (gimple_seq
*, omp_context
*);
159 static tree
scan_omp_1_op (tree
*, int *, void *);
161 #define WALK_SUBSTMTS \
165 case GIMPLE_EH_FILTER: \
166 case GIMPLE_TRANSACTION: \
167 /* The sub-statements for these should be walked. */ \
168 *handled_ops_p = false; \
171 /* Return true if CTX corresponds to an oacc parallel region. */
174 is_oacc_parallel (omp_context
*ctx
)
176 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
177 return ((outer_type
== GIMPLE_OMP_TARGET
)
178 && (gimple_omp_target_kind (ctx
->stmt
)
179 == GF_OMP_TARGET_KIND_OACC_PARALLEL
));
182 /* Return true if CTX corresponds to an oacc kernels region. */
185 is_oacc_kernels (omp_context
*ctx
)
187 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
188 return ((outer_type
== GIMPLE_OMP_TARGET
)
189 && (gimple_omp_target_kind (ctx
->stmt
)
190 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
193 /* If DECL is the artificial dummy VAR_DECL created for non-static
194 data member privatization, return the underlying "this" parameter,
195 otherwise return NULL. */
198 omp_member_access_dummy_var (tree decl
)
201 || !DECL_ARTIFICIAL (decl
)
202 || !DECL_IGNORED_P (decl
)
203 || !DECL_HAS_VALUE_EXPR_P (decl
)
204 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
207 tree v
= DECL_VALUE_EXPR (decl
);
208 if (TREE_CODE (v
) != COMPONENT_REF
)
212 switch (TREE_CODE (v
))
218 case POINTER_PLUS_EXPR
:
219 v
= TREE_OPERAND (v
, 0);
222 if (DECL_CONTEXT (v
) == current_function_decl
223 && DECL_ARTIFICIAL (v
)
224 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
232 /* Helper for unshare_and_remap, called through walk_tree. */
235 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
237 tree
*pair
= (tree
*) data
;
240 *tp
= unshare_expr (pair
[1]);
243 else if (IS_TYPE_OR_DECL_P (*tp
))
248 /* Return unshare_expr (X) with all occurrences of FROM
252 unshare_and_remap (tree x
, tree from
, tree to
)
254 tree pair
[2] = { from
, to
};
255 x
= unshare_expr (x
);
256 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
260 /* Convenience function for calling scan_omp_1_op on tree operands. */
263 scan_omp_op (tree
*tp
, omp_context
*ctx
)
265 struct walk_stmt_info wi
;
267 memset (&wi
, 0, sizeof (wi
));
269 wi
.want_locations
= true;
271 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
274 static void lower_omp (gimple_seq
*, omp_context
*);
275 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
276 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
278 /* Return true if CTX is for an omp parallel. */
281 is_parallel_ctx (omp_context
*ctx
)
283 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
287 /* Return true if CTX is for an omp task. */
290 is_task_ctx (omp_context
*ctx
)
292 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
296 /* Return true if CTX is for an omp taskloop. */
299 is_taskloop_ctx (omp_context
*ctx
)
301 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
302 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
306 /* Return true if CTX is for a host omp teams. */
309 is_host_teams_ctx (omp_context
*ctx
)
311 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
312 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
315 /* Return true if CTX is for an omp parallel or omp task or host omp teams
316 (the last one is strictly not a task region in OpenMP speak, but we
317 need to treat it similarly). */
320 is_taskreg_ctx (omp_context
*ctx
)
322 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
325 /* Return true if EXPR is variable sized. */
328 is_variable_sized (const_tree expr
)
330 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
333 /* Lookup variables. The "maybe" form
334 allows for the variable form to not have been entered, otherwise we
335 assert that the variable must have been entered. */
338 lookup_decl (tree var
, omp_context
*ctx
)
340 tree
*n
= ctx
->cb
.decl_map
->get (var
);
345 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
347 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
348 return n
? *n
: NULL_TREE
;
352 lookup_field (tree var
, omp_context
*ctx
)
355 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
356 return (tree
) n
->value
;
360 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
363 n
= splay_tree_lookup (ctx
->sfield_map
364 ? ctx
->sfield_map
: ctx
->field_map
, key
);
365 return (tree
) n
->value
;
369 lookup_sfield (tree var
, omp_context
*ctx
)
371 return lookup_sfield ((splay_tree_key
) var
, ctx
);
375 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
378 n
= splay_tree_lookup (ctx
->field_map
, key
);
379 return n
? (tree
) n
->value
: NULL_TREE
;
383 maybe_lookup_field (tree var
, omp_context
*ctx
)
385 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
388 /* Return true if DECL should be copied by pointer. SHARED_CTX is
389 the parallel context if DECL is to be shared. */
392 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
394 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
395 || TYPE_ATOMIC (TREE_TYPE (decl
)))
398 /* We can only use copy-in/copy-out semantics for shared variables
399 when we know the value is not accessible from an outer scope. */
402 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
404 /* ??? Trivially accessible from anywhere. But why would we even
405 be passing an address in this case? Should we simply assert
406 this to be false, or should we have a cleanup pass that removes
407 these from the list of mappings? */
408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
411 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
412 without analyzing the expression whether or not its location
413 is accessible to anyone else. In the case of nested parallel
414 regions it certainly may be. */
415 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
418 /* Do not use copy-in/copy-out for variables that have their
420 if (TREE_ADDRESSABLE (decl
))
423 /* lower_send_shared_vars only uses copy-in, but not copy-out
425 if (TREE_READONLY (decl
)
426 || ((TREE_CODE (decl
) == RESULT_DECL
427 || TREE_CODE (decl
) == PARM_DECL
)
428 && DECL_BY_REFERENCE (decl
)))
431 /* Disallow copy-in/out in nested parallel if
432 decl is shared in outer parallel, otherwise
433 each thread could store the shared variable
434 in its own copy-in location, making the
435 variable no longer really shared. */
436 if (shared_ctx
->is_nested
)
440 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
441 if (is_taskreg_ctx (up
) && maybe_lookup_decl (decl
, up
))
448 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
449 c
; c
= OMP_CLAUSE_CHAIN (c
))
450 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
451 && OMP_CLAUSE_DECL (c
) == decl
)
455 goto maybe_mark_addressable_and_ret
;
459 /* For tasks avoid using copy-in/out. As tasks can be
460 deferred or executed in different thread, when GOMP_task
461 returns, the task hasn't necessarily terminated. */
462 if (is_task_ctx (shared_ctx
))
465 maybe_mark_addressable_and_ret
:
466 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
467 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
469 /* Taking address of OUTER in lower_send_shared_vars
470 might need regimplification of everything that uses the
472 if (!task_shared_vars
)
473 task_shared_vars
= BITMAP_ALLOC (NULL
);
474 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
475 TREE_ADDRESSABLE (outer
) = 1;
484 /* Construct a new automatic decl similar to VAR. */
487 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
489 tree copy
= copy_var_decl (var
, name
, type
);
491 DECL_CONTEXT (copy
) = current_function_decl
;
492 DECL_CHAIN (copy
) = ctx
->block_vars
;
493 /* If VAR is listed in task_shared_vars, it means it wasn't
494 originally addressable and is just because task needs to take
495 it's address. But we don't need to take address of privatizations
497 if (TREE_ADDRESSABLE (var
)
499 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
500 TREE_ADDRESSABLE (copy
) = 0;
501 ctx
->block_vars
= copy
;
507 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
509 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
512 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
515 omp_build_component_ref (tree obj
, tree field
)
517 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
518 if (TREE_THIS_VOLATILE (field
))
519 TREE_THIS_VOLATILE (ret
) |= 1;
520 if (TREE_READONLY (field
))
521 TREE_READONLY (ret
) |= 1;
525 /* Build tree nodes to access the field for VAR on the receiver side. */
528 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
530 tree x
, field
= lookup_field (var
, ctx
);
532 /* If the receiver record type was remapped in the child function,
533 remap the field into the new record type. */
534 x
= maybe_lookup_field (field
, ctx
);
538 x
= build_simple_mem_ref (ctx
->receiver_decl
);
539 TREE_THIS_NOTRAP (x
) = 1;
540 x
= omp_build_component_ref (x
, field
);
543 x
= build_simple_mem_ref (x
);
544 TREE_THIS_NOTRAP (x
) = 1;
550 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
551 of a parallel, this is a component reference; for workshare constructs
552 this is some variable. */
555 build_outer_var_ref (tree var
, omp_context
*ctx
,
556 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
559 omp_context
*outer
= ctx
->outer
;
560 while (outer
&& gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
561 outer
= outer
->outer
;
563 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
565 else if (is_variable_sized (var
))
567 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
568 x
= build_outer_var_ref (x
, ctx
, code
);
569 x
= build_simple_mem_ref (x
);
571 else if (is_taskreg_ctx (ctx
))
573 bool by_ref
= use_pointer_for_field (var
, NULL
);
574 x
= build_receiver_ref (var
, by_ref
, ctx
);
576 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
577 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
578 || (code
== OMP_CLAUSE_PRIVATE
579 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
580 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
581 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
583 /* #pragma omp simd isn't a worksharing construct, and can reference
584 even private vars in its linear etc. clauses.
585 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
586 to private vars in all worksharing constructs. */
588 if (outer
&& is_taskreg_ctx (outer
))
589 x
= lookup_decl (var
, outer
);
591 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
595 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
599 = splay_tree_lookup (outer
->field_map
,
600 (splay_tree_key
) &DECL_UID (var
));
603 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
606 x
= lookup_decl (var
, outer
);
610 tree field
= (tree
) n
->value
;
611 /* If the receiver record type was remapped in the child function,
612 remap the field into the new record type. */
613 x
= maybe_lookup_field (field
, outer
);
617 x
= build_simple_mem_ref (outer
->receiver_decl
);
618 x
= omp_build_component_ref (x
, field
);
619 if (use_pointer_for_field (var
, outer
))
620 x
= build_simple_mem_ref (x
);
625 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_GRID_BODY
)
627 outer
= outer
->outer
;
629 && gimple_code (outer
->stmt
) != GIMPLE_OMP_GRID_BODY
);
631 x
= lookup_decl (var
, outer
);
633 else if (omp_is_reference (var
))
634 /* This can happen with orphaned constructs. If var is reference, it is
635 possible it is shared and as such valid. */
637 else if (omp_member_access_dummy_var (var
))
644 tree t
= omp_member_access_dummy_var (var
);
647 x
= DECL_VALUE_EXPR (var
);
648 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
650 x
= unshare_and_remap (x
, t
, o
);
652 x
= unshare_expr (x
);
656 if (omp_is_reference (var
))
657 x
= build_simple_mem_ref (x
);
662 /* Build tree nodes to access the field for VAR on the sender side. */
665 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
667 tree field
= lookup_sfield (key
, ctx
);
668 return omp_build_component_ref (ctx
->sender_decl
, field
);
672 build_sender_ref (tree var
, omp_context
*ctx
)
674 return build_sender_ref ((splay_tree_key
) var
, ctx
);
677 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
678 BASE_POINTERS_RESTRICT, declare the field with restrict. */
681 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
683 tree field
, type
, sfield
= NULL_TREE
;
684 splay_tree_key key
= (splay_tree_key
) var
;
688 key
= (splay_tree_key
) &DECL_UID (var
);
689 gcc_checking_assert (key
!= (splay_tree_key
) var
);
691 gcc_assert ((mask
& 1) == 0
692 || !splay_tree_lookup (ctx
->field_map
, key
));
693 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
694 || !splay_tree_lookup (ctx
->sfield_map
, key
));
695 gcc_assert ((mask
& 3) == 3
696 || !is_gimple_omp_oacc (ctx
->stmt
));
698 type
= TREE_TYPE (var
);
699 /* Prevent redeclaring the var in the split-off function with a restrict
700 pointer type. Note that we only clear type itself, restrict qualifiers in
701 the pointed-to type will be ignored by points-to analysis. */
702 if (POINTER_TYPE_P (type
)
703 && TYPE_RESTRICT (type
))
704 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
708 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
709 type
= build_pointer_type (build_pointer_type (type
));
712 type
= build_pointer_type (type
);
713 else if ((mask
& 3) == 1 && omp_is_reference (var
))
714 type
= TREE_TYPE (type
);
716 field
= build_decl (DECL_SOURCE_LOCATION (var
),
717 FIELD_DECL
, DECL_NAME (var
), type
);
719 /* Remember what variable this field was created for. This does have a
720 side effect of making dwarf2out ignore this member, so for helpful
721 debugging we clear it later in delete_omp_context. */
722 DECL_ABSTRACT_ORIGIN (field
) = var
;
723 if (type
== TREE_TYPE (var
))
725 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
726 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
727 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
730 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
734 insert_field_into_struct (ctx
->record_type
, field
);
735 if (ctx
->srecord_type
)
737 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
738 FIELD_DECL
, DECL_NAME (var
), type
);
739 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
740 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
741 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
742 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
743 insert_field_into_struct (ctx
->srecord_type
, sfield
);
748 if (ctx
->srecord_type
== NULL_TREE
)
752 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
753 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
754 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
756 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
757 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
758 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
759 insert_field_into_struct (ctx
->srecord_type
, sfield
);
760 splay_tree_insert (ctx
->sfield_map
,
761 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
762 (splay_tree_value
) sfield
);
766 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
767 : ctx
->srecord_type
, field
);
771 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
772 if ((mask
& 2) && ctx
->sfield_map
)
773 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
777 install_var_local (tree var
, omp_context
*ctx
)
779 tree new_var
= omp_copy_decl_1 (var
, ctx
);
780 insert_decl_map (&ctx
->cb
, var
, new_var
);
784 /* Adjust the replacement for DECL in CTX for the new context. This means
785 copying the DECL_VALUE_EXPR, and fixing up the type. */
788 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
792 new_decl
= lookup_decl (decl
, ctx
);
794 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
796 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
797 && DECL_HAS_VALUE_EXPR_P (decl
))
799 tree ve
= DECL_VALUE_EXPR (decl
);
800 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
801 SET_DECL_VALUE_EXPR (new_decl
, ve
);
802 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
805 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
807 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
808 if (size
== error_mark_node
)
809 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
810 DECL_SIZE (new_decl
) = size
;
812 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
813 if (size
== error_mark_node
)
814 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
815 DECL_SIZE_UNIT (new_decl
) = size
;
819 /* The callback for remap_decl. Search all containing contexts for a
820 mapping of the variable; this avoids having to duplicate the splay
821 tree ahead of time. We know a mapping doesn't already exist in the
822 given context. Create new mappings to implement default semantics. */
825 omp_copy_decl (tree var
, copy_body_data
*cb
)
827 omp_context
*ctx
= (omp_context
*) cb
;
830 if (TREE_CODE (var
) == LABEL_DECL
)
832 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
834 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
835 DECL_CONTEXT (new_var
) = current_function_decl
;
836 insert_decl_map (&ctx
->cb
, var
, new_var
);
840 while (!is_taskreg_ctx (ctx
))
845 new_var
= maybe_lookup_decl (var
, ctx
);
850 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
853 return error_mark_node
;
856 /* Create a new context, with OUTER_CTX being the surrounding context. */
859 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
861 omp_context
*ctx
= XCNEW (omp_context
);
863 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
864 (splay_tree_value
) ctx
);
869 ctx
->outer
= outer_ctx
;
870 ctx
->cb
= outer_ctx
->cb
;
871 ctx
->cb
.block
= NULL
;
872 ctx
->depth
= outer_ctx
->depth
+ 1;
876 ctx
->cb
.src_fn
= current_function_decl
;
877 ctx
->cb
.dst_fn
= current_function_decl
;
878 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
879 gcc_checking_assert (ctx
->cb
.src_node
);
880 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
881 ctx
->cb
.src_cfun
= cfun
;
882 ctx
->cb
.copy_decl
= omp_copy_decl
;
883 ctx
->cb
.eh_lp_nr
= 0;
884 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
885 ctx
->cb
.adjust_array_error_bounds
= true;
886 ctx
->cb
.dont_remap_vla_if_no_change
= true;
890 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
895 static gimple_seq
maybe_catch_exception (gimple_seq
);
897 /* Finalize task copyfn. */
900 finalize_task_copyfn (gomp_task
*task_stmt
)
902 struct function
*child_cfun
;
904 gimple_seq seq
= NULL
, new_seq
;
907 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
908 if (child_fn
== NULL_TREE
)
911 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
912 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
914 push_cfun (child_cfun
);
915 bind
= gimplify_body (child_fn
, false);
916 gimple_seq_add_stmt (&seq
, bind
);
917 new_seq
= maybe_catch_exception (seq
);
920 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
922 gimple_seq_add_stmt (&seq
, bind
);
924 gimple_set_body (child_fn
, seq
);
927 /* Inform the callgraph about the new function. */
928 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
929 node
->parallelized_function
= 1;
930 cgraph_node::add_new_function (child_fn
, false);
933 /* Destroy a omp_context data structures. Called through the splay tree
934 value delete callback. */
937 delete_omp_context (splay_tree_value value
)
939 omp_context
*ctx
= (omp_context
*) value
;
941 delete ctx
->cb
.decl_map
;
944 splay_tree_delete (ctx
->field_map
);
946 splay_tree_delete (ctx
->sfield_map
);
948 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
949 it produces corrupt debug information. */
950 if (ctx
->record_type
)
953 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
954 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
956 if (ctx
->srecord_type
)
959 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
960 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
963 if (is_task_ctx (ctx
))
964 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
966 if (ctx
->task_reduction_map
)
968 ctx
->task_reductions
.release ();
969 delete ctx
->task_reduction_map
;
972 delete ctx
->lastprivate_conditional_map
;
977 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
981 fixup_child_record_type (omp_context
*ctx
)
983 tree f
, type
= ctx
->record_type
;
985 if (!ctx
->receiver_decl
)
987 /* ??? It isn't sufficient to just call remap_type here, because
988 variably_modified_type_p doesn't work the way we expect for
989 record types. Testing each field for whether it needs remapping
990 and creating a new record by hand works, however. */
991 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
992 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
996 tree name
, new_fields
= NULL
;
998 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
999 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1000 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1001 TYPE_DECL
, name
, type
);
1002 TYPE_NAME (type
) = name
;
1004 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1006 tree new_f
= copy_node (f
);
1007 DECL_CONTEXT (new_f
) = type
;
1008 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1009 DECL_CHAIN (new_f
) = new_fields
;
1010 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1011 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1013 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1017 /* Arrange to be able to look up the receiver field
1018 given the sender field. */
1019 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1020 (splay_tree_value
) new_f
);
1022 TYPE_FIELDS (type
) = nreverse (new_fields
);
1026 /* In a target region we never modify any of the pointers in *.omp_data_i,
1027 so attempt to help the optimizers. */
1028 if (is_gimple_omp_offloaded (ctx
->stmt
))
1029 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1031 TREE_TYPE (ctx
->receiver_decl
)
1032 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1035 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1036 specified by CLAUSES. */
1039 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1042 bool scan_array_reductions
= false;
1044 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1048 switch (OMP_CLAUSE_CODE (c
))
1050 case OMP_CLAUSE_PRIVATE
:
1051 decl
= OMP_CLAUSE_DECL (c
);
1052 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1054 else if (!is_variable_sized (decl
))
1055 install_var_local (decl
, ctx
);
1058 case OMP_CLAUSE_SHARED
:
1059 decl
= OMP_CLAUSE_DECL (c
);
1060 /* Ignore shared directives in teams construct inside of
1061 target construct. */
1062 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1063 && !is_host_teams_ctx (ctx
))
1065 /* Global variables don't need to be copied,
1066 the receiver side will use them directly. */
1067 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1068 if (is_global_var (odecl
))
1070 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1073 gcc_assert (is_taskreg_ctx (ctx
));
1074 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1075 || !is_variable_sized (decl
));
1076 /* Global variables don't need to be copied,
1077 the receiver side will use them directly. */
1078 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1080 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1082 use_pointer_for_field (decl
, ctx
);
1085 by_ref
= use_pointer_for_field (decl
, NULL
);
1086 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1087 || TREE_ADDRESSABLE (decl
)
1089 || omp_is_reference (decl
))
1091 by_ref
= use_pointer_for_field (decl
, ctx
);
1092 install_var_field (decl
, by_ref
, 3, ctx
);
1093 install_var_local (decl
, ctx
);
1096 /* We don't need to copy const scalar vars back. */
1097 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1100 case OMP_CLAUSE_REDUCTION
:
1101 case OMP_CLAUSE_IN_REDUCTION
:
1102 decl
= OMP_CLAUSE_DECL (c
);
1103 if (TREE_CODE (decl
) == MEM_REF
)
1105 tree t
= TREE_OPERAND (decl
, 0);
1106 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1107 t
= TREE_OPERAND (t
, 0);
1108 if (TREE_CODE (t
) == INDIRECT_REF
1109 || TREE_CODE (t
) == ADDR_EXPR
)
1110 t
= TREE_OPERAND (t
, 0);
1111 install_var_local (t
, ctx
);
1112 if (is_taskreg_ctx (ctx
)
1113 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1114 || (is_task_ctx (ctx
)
1115 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1116 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1117 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1118 == POINTER_TYPE
)))))
1119 && !is_variable_sized (t
)
1120 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1121 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1122 && !is_task_ctx (ctx
))))
1124 by_ref
= use_pointer_for_field (t
, NULL
);
1125 if (is_task_ctx (ctx
)
1126 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1127 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1129 install_var_field (t
, false, 1, ctx
);
1130 install_var_field (t
, by_ref
, 2, ctx
);
1133 install_var_field (t
, by_ref
, 3, ctx
);
1137 if (is_task_ctx (ctx
)
1138 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1139 && OMP_CLAUSE_REDUCTION_TASK (c
)
1140 && is_parallel_ctx (ctx
)))
1142 /* Global variables don't need to be copied,
1143 the receiver side will use them directly. */
1144 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1146 by_ref
= use_pointer_for_field (decl
, ctx
);
1147 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1148 install_var_field (decl
, by_ref
, 3, ctx
);
1150 install_var_local (decl
, ctx
);
1153 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1154 && OMP_CLAUSE_REDUCTION_TASK (c
))
1156 install_var_local (decl
, ctx
);
1161 case OMP_CLAUSE_LASTPRIVATE
:
1162 /* Let the corresponding firstprivate clause create
1164 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1168 case OMP_CLAUSE_FIRSTPRIVATE
:
1169 case OMP_CLAUSE_LINEAR
:
1170 decl
= OMP_CLAUSE_DECL (c
);
1172 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1173 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1174 && is_gimple_omp_offloaded (ctx
->stmt
))
1176 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1177 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1178 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1179 install_var_field (decl
, true, 3, ctx
);
1181 install_var_field (decl
, false, 3, ctx
);
1183 if (is_variable_sized (decl
))
1185 if (is_task_ctx (ctx
))
1186 install_var_field (decl
, false, 1, ctx
);
1189 else if (is_taskreg_ctx (ctx
))
1192 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1193 by_ref
= use_pointer_for_field (decl
, NULL
);
1195 if (is_task_ctx (ctx
)
1196 && (global
|| by_ref
|| omp_is_reference (decl
)))
1198 install_var_field (decl
, false, 1, ctx
);
1200 install_var_field (decl
, by_ref
, 2, ctx
);
1203 install_var_field (decl
, by_ref
, 3, ctx
);
1205 install_var_local (decl
, ctx
);
1208 case OMP_CLAUSE_USE_DEVICE_PTR
:
1209 decl
= OMP_CLAUSE_DECL (c
);
1210 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1211 install_var_field (decl
, true, 3, ctx
);
1213 install_var_field (decl
, false, 3, ctx
);
1214 if (DECL_SIZE (decl
)
1215 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1217 tree decl2
= DECL_VALUE_EXPR (decl
);
1218 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1219 decl2
= TREE_OPERAND (decl2
, 0);
1220 gcc_assert (DECL_P (decl2
));
1221 install_var_local (decl2
, ctx
);
1223 install_var_local (decl
, ctx
);
1226 case OMP_CLAUSE_IS_DEVICE_PTR
:
1227 decl
= OMP_CLAUSE_DECL (c
);
1230 case OMP_CLAUSE__LOOPTEMP_
:
1231 case OMP_CLAUSE__REDUCTEMP_
:
1232 gcc_assert (is_taskreg_ctx (ctx
));
1233 decl
= OMP_CLAUSE_DECL (c
);
1234 install_var_field (decl
, false, 3, ctx
);
1235 install_var_local (decl
, ctx
);
1238 case OMP_CLAUSE_COPYPRIVATE
:
1239 case OMP_CLAUSE_COPYIN
:
1240 decl
= OMP_CLAUSE_DECL (c
);
1241 by_ref
= use_pointer_for_field (decl
, NULL
);
1242 install_var_field (decl
, by_ref
, 3, ctx
);
1245 case OMP_CLAUSE_FINAL
:
1247 case OMP_CLAUSE_NUM_THREADS
:
1248 case OMP_CLAUSE_NUM_TEAMS
:
1249 case OMP_CLAUSE_THREAD_LIMIT
:
1250 case OMP_CLAUSE_DEVICE
:
1251 case OMP_CLAUSE_SCHEDULE
:
1252 case OMP_CLAUSE_DIST_SCHEDULE
:
1253 case OMP_CLAUSE_DEPEND
:
1254 case OMP_CLAUSE_PRIORITY
:
1255 case OMP_CLAUSE_GRAINSIZE
:
1256 case OMP_CLAUSE_NUM_TASKS
:
1257 case OMP_CLAUSE_NUM_GANGS
:
1258 case OMP_CLAUSE_NUM_WORKERS
:
1259 case OMP_CLAUSE_VECTOR_LENGTH
:
1261 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1265 case OMP_CLAUSE_FROM
:
1266 case OMP_CLAUSE_MAP
:
1268 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1269 decl
= OMP_CLAUSE_DECL (c
);
1270 /* Global variables with "omp declare target" attribute
1271 don't need to be copied, the receiver side will use them
1272 directly. However, global variables with "omp declare target link"
1273 attribute need to be copied. Or when ALWAYS modifier is used. */
1274 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1276 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1277 && (OMP_CLAUSE_MAP_KIND (c
)
1278 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1279 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1280 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1281 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1282 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1283 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1284 && varpool_node::get_create (decl
)->offloadable
1285 && !lookup_attribute ("omp declare target link",
1286 DECL_ATTRIBUTES (decl
)))
1288 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1289 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1291 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1292 not offloaded; there is nothing to map for those. */
1293 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1294 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1295 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1298 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1299 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1300 || (OMP_CLAUSE_MAP_KIND (c
)
1301 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1303 if (TREE_CODE (decl
) == COMPONENT_REF
1304 || (TREE_CODE (decl
) == INDIRECT_REF
1305 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1306 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1307 == REFERENCE_TYPE
)))
1309 if (DECL_SIZE (decl
)
1310 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1312 tree decl2
= DECL_VALUE_EXPR (decl
);
1313 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1314 decl2
= TREE_OPERAND (decl2
, 0);
1315 gcc_assert (DECL_P (decl2
));
1316 install_var_local (decl2
, ctx
);
1318 install_var_local (decl
, ctx
);
1323 if (DECL_SIZE (decl
)
1324 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1326 tree decl2
= DECL_VALUE_EXPR (decl
);
1327 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1328 decl2
= TREE_OPERAND (decl2
, 0);
1329 gcc_assert (DECL_P (decl2
));
1330 install_var_field (decl2
, true, 3, ctx
);
1331 install_var_local (decl2
, ctx
);
1332 install_var_local (decl
, ctx
);
1336 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1337 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1338 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1339 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1340 install_var_field (decl
, true, 7, ctx
);
1342 install_var_field (decl
, true, 3, ctx
);
1343 if (is_gimple_omp_offloaded (ctx
->stmt
)
1344 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1345 install_var_local (decl
, ctx
);
1350 tree base
= get_base_address (decl
);
1351 tree nc
= OMP_CLAUSE_CHAIN (c
);
1354 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1355 && OMP_CLAUSE_DECL (nc
) == base
1356 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1357 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1359 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1360 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1366 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1367 decl
= OMP_CLAUSE_DECL (c
);
1369 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1370 (splay_tree_key
) decl
));
1372 = build_decl (OMP_CLAUSE_LOCATION (c
),
1373 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1374 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1375 insert_field_into_struct (ctx
->record_type
, field
);
1376 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1377 (splay_tree_value
) field
);
1382 case OMP_CLAUSE__GRIDDIM_
:
1385 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c
), ctx
->outer
);
1386 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c
), ctx
->outer
);
1390 case OMP_CLAUSE_NOWAIT
:
1391 case OMP_CLAUSE_ORDERED
:
1392 case OMP_CLAUSE_COLLAPSE
:
1393 case OMP_CLAUSE_UNTIED
:
1394 case OMP_CLAUSE_MERGEABLE
:
1395 case OMP_CLAUSE_PROC_BIND
:
1396 case OMP_CLAUSE_SAFELEN
:
1397 case OMP_CLAUSE_SIMDLEN
:
1398 case OMP_CLAUSE_THREADS
:
1399 case OMP_CLAUSE_SIMD
:
1400 case OMP_CLAUSE_NOGROUP
:
1401 case OMP_CLAUSE_DEFAULTMAP
:
1402 case OMP_CLAUSE_ASYNC
:
1403 case OMP_CLAUSE_WAIT
:
1404 case OMP_CLAUSE_GANG
:
1405 case OMP_CLAUSE_WORKER
:
1406 case OMP_CLAUSE_VECTOR
:
1407 case OMP_CLAUSE_INDEPENDENT
:
1408 case OMP_CLAUSE_AUTO
:
1409 case OMP_CLAUSE_SEQ
:
1410 case OMP_CLAUSE_TILE
:
1411 case OMP_CLAUSE__SIMT_
:
1412 case OMP_CLAUSE_DEFAULT
:
1413 case OMP_CLAUSE_NONTEMPORAL
:
1414 case OMP_CLAUSE_IF_PRESENT
:
1415 case OMP_CLAUSE_FINALIZE
:
1416 case OMP_CLAUSE_TASK_REDUCTION
:
1419 case OMP_CLAUSE_ALIGNED
:
1420 decl
= OMP_CLAUSE_DECL (c
);
1421 if (is_global_var (decl
)
1422 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1423 install_var_local (decl
, ctx
);
1426 case OMP_CLAUSE__CONDTEMP_
:
1427 decl
= OMP_CLAUSE_DECL (c
);
1428 if (is_parallel_ctx (ctx
))
1430 install_var_field (decl
, false, 3, ctx
);
1431 install_var_local (decl
, ctx
);
1433 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1434 && (gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
1435 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1436 install_var_local (decl
, ctx
);
1439 case OMP_CLAUSE__CACHE_
:
1445 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1447 switch (OMP_CLAUSE_CODE (c
))
1449 case OMP_CLAUSE_LASTPRIVATE
:
1450 /* Let the corresponding firstprivate clause create
1452 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1453 scan_array_reductions
= true;
1454 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1458 case OMP_CLAUSE_FIRSTPRIVATE
:
1459 case OMP_CLAUSE_PRIVATE
:
1460 case OMP_CLAUSE_LINEAR
:
1461 case OMP_CLAUSE_IS_DEVICE_PTR
:
1462 decl
= OMP_CLAUSE_DECL (c
);
1463 if (is_variable_sized (decl
))
1465 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1466 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1467 && is_gimple_omp_offloaded (ctx
->stmt
))
1469 tree decl2
= DECL_VALUE_EXPR (decl
);
1470 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1471 decl2
= TREE_OPERAND (decl2
, 0);
1472 gcc_assert (DECL_P (decl2
));
1473 install_var_local (decl2
, ctx
);
1474 fixup_remapped_decl (decl2
, ctx
, false);
1476 install_var_local (decl
, ctx
);
1478 fixup_remapped_decl (decl
, ctx
,
1479 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1480 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1481 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1482 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1483 scan_array_reductions
= true;
1486 case OMP_CLAUSE_REDUCTION
:
1487 case OMP_CLAUSE_IN_REDUCTION
:
1488 decl
= OMP_CLAUSE_DECL (c
);
1489 if (TREE_CODE (decl
) != MEM_REF
)
1491 if (is_variable_sized (decl
))
1492 install_var_local (decl
, ctx
);
1493 fixup_remapped_decl (decl
, ctx
, false);
1495 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1496 scan_array_reductions
= true;
1499 case OMP_CLAUSE_TASK_REDUCTION
:
1500 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1501 scan_array_reductions
= true;
1504 case OMP_CLAUSE_SHARED
:
1505 /* Ignore shared directives in teams construct inside of
1506 target construct. */
1507 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1508 && !is_host_teams_ctx (ctx
))
1510 decl
= OMP_CLAUSE_DECL (c
);
1511 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1513 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1515 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1518 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1519 install_var_field (decl
, by_ref
, 11, ctx
);
1522 fixup_remapped_decl (decl
, ctx
, false);
1525 case OMP_CLAUSE_MAP
:
1526 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1528 decl
= OMP_CLAUSE_DECL (c
);
1530 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1531 && (OMP_CLAUSE_MAP_KIND (c
)
1532 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1533 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1534 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1535 && varpool_node::get_create (decl
)->offloadable
)
1539 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1540 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1541 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1542 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1544 tree new_decl
= lookup_decl (decl
, ctx
);
1545 TREE_TYPE (new_decl
)
1546 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1548 else if (DECL_SIZE (decl
)
1549 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1551 tree decl2
= DECL_VALUE_EXPR (decl
);
1552 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1553 decl2
= TREE_OPERAND (decl2
, 0);
1554 gcc_assert (DECL_P (decl2
));
1555 fixup_remapped_decl (decl2
, ctx
, false);
1556 fixup_remapped_decl (decl
, ctx
, true);
1559 fixup_remapped_decl (decl
, ctx
, false);
1563 case OMP_CLAUSE_COPYPRIVATE
:
1564 case OMP_CLAUSE_COPYIN
:
1565 case OMP_CLAUSE_DEFAULT
:
1567 case OMP_CLAUSE_NUM_THREADS
:
1568 case OMP_CLAUSE_NUM_TEAMS
:
1569 case OMP_CLAUSE_THREAD_LIMIT
:
1570 case OMP_CLAUSE_DEVICE
:
1571 case OMP_CLAUSE_SCHEDULE
:
1572 case OMP_CLAUSE_DIST_SCHEDULE
:
1573 case OMP_CLAUSE_NOWAIT
:
1574 case OMP_CLAUSE_ORDERED
:
1575 case OMP_CLAUSE_COLLAPSE
:
1576 case OMP_CLAUSE_UNTIED
:
1577 case OMP_CLAUSE_FINAL
:
1578 case OMP_CLAUSE_MERGEABLE
:
1579 case OMP_CLAUSE_PROC_BIND
:
1580 case OMP_CLAUSE_SAFELEN
:
1581 case OMP_CLAUSE_SIMDLEN
:
1582 case OMP_CLAUSE_ALIGNED
:
1583 case OMP_CLAUSE_DEPEND
:
1584 case OMP_CLAUSE__LOOPTEMP_
:
1585 case OMP_CLAUSE__REDUCTEMP_
:
1587 case OMP_CLAUSE_FROM
:
1588 case OMP_CLAUSE_PRIORITY
:
1589 case OMP_CLAUSE_GRAINSIZE
:
1590 case OMP_CLAUSE_NUM_TASKS
:
1591 case OMP_CLAUSE_THREADS
:
1592 case OMP_CLAUSE_SIMD
:
1593 case OMP_CLAUSE_NOGROUP
:
1594 case OMP_CLAUSE_DEFAULTMAP
:
1595 case OMP_CLAUSE_USE_DEVICE_PTR
:
1596 case OMP_CLAUSE_NONTEMPORAL
:
1597 case OMP_CLAUSE_ASYNC
:
1598 case OMP_CLAUSE_WAIT
:
1599 case OMP_CLAUSE_NUM_GANGS
:
1600 case OMP_CLAUSE_NUM_WORKERS
:
1601 case OMP_CLAUSE_VECTOR_LENGTH
:
1602 case OMP_CLAUSE_GANG
:
1603 case OMP_CLAUSE_WORKER
:
1604 case OMP_CLAUSE_VECTOR
:
1605 case OMP_CLAUSE_INDEPENDENT
:
1606 case OMP_CLAUSE_AUTO
:
1607 case OMP_CLAUSE_SEQ
:
1608 case OMP_CLAUSE_TILE
:
1609 case OMP_CLAUSE__GRIDDIM_
:
1610 case OMP_CLAUSE__SIMT_
:
1611 case OMP_CLAUSE_IF_PRESENT
:
1612 case OMP_CLAUSE_FINALIZE
:
1613 case OMP_CLAUSE__CONDTEMP_
:
1616 case OMP_CLAUSE__CACHE_
:
1622 gcc_checking_assert (!scan_array_reductions
1623 || !is_gimple_omp_oacc (ctx
->stmt
));
1624 if (scan_array_reductions
)
1626 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1627 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1628 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1629 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1630 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1632 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1633 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1635 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1636 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1637 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1638 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1639 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1640 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1644 /* Create a new name for omp child function. Returns an identifier. */
1647 create_omp_child_function_name (bool task_copy
)
1649 return clone_function_name_numbered (current_function_decl
,
1650 task_copy
? "_omp_cpyfn" : "_omp_fn");
1653 /* Return true if CTX may belong to offloaded code: either if current function
1654 is offloaded, or any enclosing context corresponds to a target region. */
1657 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1659 if (cgraph_node::get (current_function_decl
)->offloadable
)
1661 for (; ctx
; ctx
= ctx
->outer
)
1662 if (is_gimple_omp_offloaded (ctx
->stmt
))
1667 /* Build a decl for the omp child function. It'll not contain a body
1668 yet, just the bare decl. */
1671 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1673 tree decl
, type
, name
, t
;
1675 name
= create_omp_child_function_name (task_copy
);
1677 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1678 ptr_type_node
, NULL_TREE
);
1680 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1682 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1684 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1687 ctx
->cb
.dst_fn
= decl
;
1689 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1691 TREE_STATIC (decl
) = 1;
1692 TREE_USED (decl
) = 1;
1693 DECL_ARTIFICIAL (decl
) = 1;
1694 DECL_IGNORED_P (decl
) = 0;
1695 TREE_PUBLIC (decl
) = 0;
1696 DECL_UNINLINABLE (decl
) = 1;
1697 DECL_EXTERNAL (decl
) = 0;
1698 DECL_CONTEXT (decl
) = NULL_TREE
;
1699 DECL_INITIAL (decl
) = make_node (BLOCK
);
1700 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1701 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1702 /* Remove omp declare simd attribute from the new attributes. */
1703 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1705 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1708 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1709 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1710 *p
= TREE_CHAIN (*p
);
1713 tree chain
= TREE_CHAIN (*p
);
1714 *p
= copy_node (*p
);
1715 p
= &TREE_CHAIN (*p
);
1719 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1720 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1721 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1722 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1723 DECL_FUNCTION_VERSIONED (decl
)
1724 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1726 if (omp_maybe_offloaded_ctx (ctx
))
1728 cgraph_node::get_create (decl
)->offloadable
= 1;
1729 if (ENABLE_OFFLOADING
)
1730 g
->have_offload
= true;
1733 if (cgraph_node::get_create (decl
)->offloadable
1734 && !lookup_attribute ("omp declare target",
1735 DECL_ATTRIBUTES (current_function_decl
)))
1737 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1738 ? "omp target entrypoint"
1739 : "omp declare target");
1740 DECL_ATTRIBUTES (decl
)
1741 = tree_cons (get_identifier (target_attr
),
1742 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1745 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1746 RESULT_DECL
, NULL_TREE
, void_type_node
);
1747 DECL_ARTIFICIAL (t
) = 1;
1748 DECL_IGNORED_P (t
) = 1;
1749 DECL_CONTEXT (t
) = decl
;
1750 DECL_RESULT (decl
) = t
;
1752 tree data_name
= get_identifier (".omp_data_i");
1753 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1755 DECL_ARTIFICIAL (t
) = 1;
1756 DECL_NAMELESS (t
) = 1;
1757 DECL_ARG_TYPE (t
) = ptr_type_node
;
1758 DECL_CONTEXT (t
) = current_function_decl
;
1760 TREE_READONLY (t
) = 1;
1761 DECL_ARGUMENTS (decl
) = t
;
1763 ctx
->receiver_decl
= t
;
1766 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1767 PARM_DECL
, get_identifier (".omp_data_o"),
1769 DECL_ARTIFICIAL (t
) = 1;
1770 DECL_NAMELESS (t
) = 1;
1771 DECL_ARG_TYPE (t
) = ptr_type_node
;
1772 DECL_CONTEXT (t
) = current_function_decl
;
1774 TREE_ADDRESSABLE (t
) = 1;
1775 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1776 DECL_ARGUMENTS (decl
) = t
;
1779 /* Allocate memory for the function structure. The call to
1780 allocate_struct_function clobbers CFUN, so we need to restore
1782 push_struct_function (decl
);
1783 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1784 init_tree_ssa (cfun
);
1788 /* Callback for walk_gimple_seq. Check if combined parallel
1789 contains gimple_omp_for_combined_into_p OMP_FOR. */
1792 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1793 bool *handled_ops_p
,
1794 struct walk_stmt_info
*wi
)
1796 gimple
*stmt
= gsi_stmt (*gsi_p
);
1798 *handled_ops_p
= true;
1799 switch (gimple_code (stmt
))
1803 case GIMPLE_OMP_FOR
:
1804 if (gimple_omp_for_combined_into_p (stmt
)
1805 && gimple_omp_for_kind (stmt
)
1806 == *(const enum gf_mask
*) (wi
->info
))
1809 return integer_zero_node
;
1818 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1821 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1822 omp_context
*outer_ctx
)
1824 struct walk_stmt_info wi
;
1826 memset (&wi
, 0, sizeof (wi
));
1828 wi
.info
= (void *) &msk
;
1829 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1830 if (wi
.info
!= (void *) &msk
)
1832 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1833 struct omp_for_data fd
;
1834 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1835 /* We need two temporaries with fd.loop.v type (istart/iend)
1836 and then (fd.collapse - 1) temporaries with the same
1837 type for count2 ... countN-1 vars if not constant. */
1838 size_t count
= 2, i
;
1839 tree type
= fd
.iter_type
;
1841 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1843 count
+= fd
.collapse
- 1;
1844 /* If there are lastprivate clauses on the inner
1845 GIMPLE_OMP_FOR, add one more temporaries for the total number
1846 of iterations (product of count1 ... countN-1). */
1847 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1848 OMP_CLAUSE_LASTPRIVATE
))
1850 else if (msk
== GF_OMP_FOR_KIND_FOR
1851 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1852 OMP_CLAUSE_LASTPRIVATE
))
1855 for (i
= 0; i
< count
; i
++)
1857 tree temp
= create_tmp_var (type
);
1858 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1859 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1860 OMP_CLAUSE_DECL (c
) = temp
;
1861 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1862 gimple_omp_taskreg_set_clauses (stmt
, c
);
1865 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
1866 && omp_find_clause (gimple_omp_task_clauses (stmt
),
1867 OMP_CLAUSE_REDUCTION
))
1869 tree type
= build_pointer_type (pointer_sized_int_node
);
1870 tree temp
= create_tmp_var (type
);
1871 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1872 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1873 OMP_CLAUSE_DECL (c
) = temp
;
1874 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
1875 gimple_omp_task_set_clauses (stmt
, c
);
1879 /* Scan an OpenMP parallel directive. */
1882 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1886 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1888 /* Ignore parallel directives with empty bodies, unless there
1889 are copyin clauses. */
1891 && empty_body_p (gimple_omp_body (stmt
))
1892 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1893 OMP_CLAUSE_COPYIN
) == NULL
)
1895 gsi_replace (gsi
, gimple_build_nop (), false);
1899 if (gimple_omp_parallel_combined_p (stmt
))
1900 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1901 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1902 OMP_CLAUSE_REDUCTION
);
1903 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
1904 if (OMP_CLAUSE_REDUCTION_TASK (c
))
1906 tree type
= build_pointer_type (pointer_sized_int_node
);
1907 tree temp
= create_tmp_var (type
);
1908 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1910 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1911 OMP_CLAUSE_DECL (c
) = temp
;
1912 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
1913 gimple_omp_parallel_set_clauses (stmt
, c
);
1916 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
1919 ctx
= new_omp_context (stmt
, outer_ctx
);
1920 taskreg_contexts
.safe_push (ctx
);
1921 if (taskreg_nesting_level
> 1)
1922 ctx
->is_nested
= true;
1923 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1924 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1925 name
= create_tmp_var_name (".omp_data_s");
1926 name
= build_decl (gimple_location (stmt
),
1927 TYPE_DECL
, name
, ctx
->record_type
);
1928 DECL_ARTIFICIAL (name
) = 1;
1929 DECL_NAMELESS (name
) = 1;
1930 TYPE_NAME (ctx
->record_type
) = name
;
1931 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1932 if (!gimple_omp_parallel_grid_phony (stmt
))
1934 create_omp_child_function (ctx
, false);
1935 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1938 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
1939 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1941 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1942 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1945 /* Scan an OpenMP task directive. */
1948 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1952 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
1954 /* Ignore task directives with empty bodies, unless they have depend
1957 && gimple_omp_body (stmt
)
1958 && empty_body_p (gimple_omp_body (stmt
))
1959 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
1961 gsi_replace (gsi
, gimple_build_nop (), false);
1965 if (gimple_omp_task_taskloop_p (stmt
))
1966 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
1968 ctx
= new_omp_context (stmt
, outer_ctx
);
1970 if (gimple_omp_task_taskwait_p (stmt
))
1972 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1976 taskreg_contexts
.safe_push (ctx
);
1977 if (taskreg_nesting_level
> 1)
1978 ctx
->is_nested
= true;
1979 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1980 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1981 name
= create_tmp_var_name (".omp_data_s");
1982 name
= build_decl (gimple_location (stmt
),
1983 TYPE_DECL
, name
, ctx
->record_type
);
1984 DECL_ARTIFICIAL (name
) = 1;
1985 DECL_NAMELESS (name
) = 1;
1986 TYPE_NAME (ctx
->record_type
) = name
;
1987 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1988 create_omp_child_function (ctx
, false);
1989 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1991 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1993 if (ctx
->srecord_type
)
1995 name
= create_tmp_var_name (".omp_data_a");
1996 name
= build_decl (gimple_location (stmt
),
1997 TYPE_DECL
, name
, ctx
->srecord_type
);
1998 DECL_ARTIFICIAL (name
) = 1;
1999 DECL_NAMELESS (name
) = 1;
2000 TYPE_NAME (ctx
->srecord_type
) = name
;
2001 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2002 create_omp_child_function (ctx
, true);
2005 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2007 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2009 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2010 t
= build_int_cst (long_integer_type_node
, 0);
2011 gimple_omp_task_set_arg_size (stmt
, t
);
2012 t
= build_int_cst (long_integer_type_node
, 1);
2013 gimple_omp_task_set_arg_align (stmt
, t
);
2017 /* Helper function for finish_taskreg_scan, called through walk_tree.
2018 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2019 tree, replace it in the expression. */
2022 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2026 omp_context
*ctx
= (omp_context
*) data
;
2027 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2030 if (DECL_HAS_VALUE_EXPR_P (t
))
2031 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2036 else if (IS_TYPE_OR_DECL_P (*tp
))
2041 /* If any decls have been made addressable during scan_omp,
2042 adjust their fields if needed, and layout record types
2043 of parallel/task constructs. */
2046 finish_taskreg_scan (omp_context
*ctx
)
2048 if (ctx
->record_type
== NULL_TREE
)
2051 /* If any task_shared_vars were needed, verify all
2052 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2053 statements if use_pointer_for_field hasn't changed
2054 because of that. If it did, update field types now. */
2055 if (task_shared_vars
)
2059 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2060 c
; c
= OMP_CLAUSE_CHAIN (c
))
2061 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2062 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2064 tree decl
= OMP_CLAUSE_DECL (c
);
2066 /* Global variables don't need to be copied,
2067 the receiver side will use them directly. */
2068 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2070 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2071 || !use_pointer_for_field (decl
, ctx
))
2073 tree field
= lookup_field (decl
, ctx
);
2074 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2075 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2077 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2078 TREE_THIS_VOLATILE (field
) = 0;
2079 DECL_USER_ALIGN (field
) = 0;
2080 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2081 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2082 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2083 if (ctx
->srecord_type
)
2085 tree sfield
= lookup_sfield (decl
, ctx
);
2086 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2087 TREE_THIS_VOLATILE (sfield
) = 0;
2088 DECL_USER_ALIGN (sfield
) = 0;
2089 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2090 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2091 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2096 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2098 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2099 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2102 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2103 expects to find it at the start of data. */
2104 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2105 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2109 *p
= DECL_CHAIN (*p
);
2113 p
= &DECL_CHAIN (*p
);
2114 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2115 TYPE_FIELDS (ctx
->record_type
) = f
;
2117 layout_type (ctx
->record_type
);
2118 fixup_child_record_type (ctx
);
2120 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2122 layout_type (ctx
->record_type
);
2123 fixup_child_record_type (ctx
);
2127 location_t loc
= gimple_location (ctx
->stmt
);
2128 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2129 /* Move VLA fields to the end. */
2130 p
= &TYPE_FIELDS (ctx
->record_type
);
2132 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2133 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2136 *p
= TREE_CHAIN (*p
);
2137 TREE_CHAIN (*q
) = NULL_TREE
;
2138 q
= &TREE_CHAIN (*q
);
2141 p
= &DECL_CHAIN (*p
);
2143 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2145 /* Move fields corresponding to first and second _looptemp_
2146 clause first. There are filled by GOMP_taskloop
2147 and thus need to be in specific positions. */
2148 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2149 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2150 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2151 OMP_CLAUSE__LOOPTEMP_
);
2152 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2153 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2154 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2155 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2156 p
= &TYPE_FIELDS (ctx
->record_type
);
2158 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2159 *p
= DECL_CHAIN (*p
);
2161 p
= &DECL_CHAIN (*p
);
2162 DECL_CHAIN (f1
) = f2
;
2165 DECL_CHAIN (f2
) = f3
;
2166 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2169 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2170 TYPE_FIELDS (ctx
->record_type
) = f1
;
2171 if (ctx
->srecord_type
)
2173 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2174 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2176 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2177 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2179 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2180 *p
= DECL_CHAIN (*p
);
2182 p
= &DECL_CHAIN (*p
);
2183 DECL_CHAIN (f1
) = f2
;
2184 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2187 DECL_CHAIN (f2
) = f3
;
2188 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2191 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2192 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2195 layout_type (ctx
->record_type
);
2196 fixup_child_record_type (ctx
);
2197 if (ctx
->srecord_type
)
2198 layout_type (ctx
->srecord_type
);
2199 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2200 TYPE_SIZE_UNIT (ctx
->record_type
));
2201 if (TREE_CODE (t
) != INTEGER_CST
)
2203 t
= unshare_expr (t
);
2204 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2206 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2207 t
= build_int_cst (long_integer_type_node
,
2208 TYPE_ALIGN_UNIT (ctx
->record_type
));
2209 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2213 /* Find the enclosing offload context. */
2215 static omp_context
*
2216 enclosing_target_ctx (omp_context
*ctx
)
2218 for (; ctx
; ctx
= ctx
->outer
)
2219 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2225 /* Return true if ctx is part of an oacc kernels region. */
2228 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2230 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2232 gimple
*stmt
= ctx
->stmt
;
2233 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2234 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2241 /* Check the parallelism clauses inside a kernels regions.
2242 Until kernels handling moves to use the same loop indirection
2243 scheme as parallel, we need to do this checking early. */
2246 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2248 bool checking
= true;
2249 unsigned outer_mask
= 0;
2250 unsigned this_mask
= 0;
2251 bool has_seq
= false, has_auto
= false;
2254 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2258 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2260 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2263 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2265 switch (OMP_CLAUSE_CODE (c
))
2267 case OMP_CLAUSE_GANG
:
2268 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2270 case OMP_CLAUSE_WORKER
:
2271 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2273 case OMP_CLAUSE_VECTOR
:
2274 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2276 case OMP_CLAUSE_SEQ
:
2279 case OMP_CLAUSE_AUTO
:
2289 if (has_seq
&& (this_mask
|| has_auto
))
2290 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2291 " OpenACC loop specifiers");
2292 else if (has_auto
&& this_mask
)
2293 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2294 " OpenACC loop specifiers");
2296 if (this_mask
& outer_mask
)
2297 error_at (gimple_location (stmt
), "inner loop uses same"
2298 " OpenACC parallelism as containing loop");
2301 return outer_mask
| this_mask
;
2304 /* Scan a GIMPLE_OMP_FOR. */
2306 static omp_context
*
2307 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2311 tree clauses
= gimple_omp_for_clauses (stmt
);
2313 ctx
= new_omp_context (stmt
, outer_ctx
);
2315 if (is_gimple_omp_oacc (stmt
))
2317 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2319 if (!tgt
|| is_oacc_parallel (tgt
))
2320 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2322 char const *check
= NULL
;
2324 switch (OMP_CLAUSE_CODE (c
))
2326 case OMP_CLAUSE_GANG
:
2330 case OMP_CLAUSE_WORKER
:
2334 case OMP_CLAUSE_VECTOR
:
2342 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2343 error_at (gimple_location (stmt
),
2344 "argument not permitted on %qs clause in"
2345 " OpenACC %<parallel%>", check
);
2348 if (tgt
&& is_oacc_kernels (tgt
))
2350 /* Strip out reductions, as they are not handled yet. */
2351 tree
*prev_ptr
= &clauses
;
2353 while (tree probe
= *prev_ptr
)
2355 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2357 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2358 *prev_ptr
= *next_ptr
;
2360 prev_ptr
= next_ptr
;
2363 gimple_omp_for_set_clauses (stmt
, clauses
);
2364 check_oacc_kernel_gwv (stmt
, ctx
);
2368 scan_sharing_clauses (clauses
, ctx
);
2370 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2371 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2373 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2374 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2375 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2376 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2378 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2382 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2385 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2386 omp_context
*outer_ctx
)
2388 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2389 gsi_replace (gsi
, bind
, false);
2390 gimple_seq seq
= NULL
;
2391 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2392 tree cond
= create_tmp_var_raw (integer_type_node
);
2393 DECL_CONTEXT (cond
) = current_function_decl
;
2394 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2395 gimple_bind_set_vars (bind
, cond
);
2396 gimple_call_set_lhs (g
, cond
);
2397 gimple_seq_add_stmt (&seq
, g
);
2398 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2399 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2400 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2401 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2402 gimple_seq_add_stmt (&seq
, g
);
2403 g
= gimple_build_label (lab1
);
2404 gimple_seq_add_stmt (&seq
, g
);
2405 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2406 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2407 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2408 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2409 gimple_omp_for_set_clauses (new_stmt
, clause
);
2410 gimple_seq_add_stmt (&seq
, new_stmt
);
2411 g
= gimple_build_goto (lab3
);
2412 gimple_seq_add_stmt (&seq
, g
);
2413 g
= gimple_build_label (lab2
);
2414 gimple_seq_add_stmt (&seq
, g
);
2415 gimple_seq_add_stmt (&seq
, stmt
);
2416 g
= gimple_build_label (lab3
);
2417 gimple_seq_add_stmt (&seq
, g
);
2418 gimple_bind_set_body (bind
, seq
);
2420 scan_omp_for (new_stmt
, outer_ctx
);
2421 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2424 /* Scan an OpenMP sections directive. */
2427 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2431 ctx
= new_omp_context (stmt
, outer_ctx
);
2432 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2433 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2436 /* Scan an OpenMP single directive. */
2439 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2444 ctx
= new_omp_context (stmt
, outer_ctx
);
2445 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2446 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2447 name
= create_tmp_var_name (".omp_copy_s");
2448 name
= build_decl (gimple_location (stmt
),
2449 TYPE_DECL
, name
, ctx
->record_type
);
2450 TYPE_NAME (ctx
->record_type
) = name
;
2452 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2453 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2455 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2456 ctx
->record_type
= NULL
;
2458 layout_type (ctx
->record_type
);
2461 /* Scan a GIMPLE_OMP_TARGET. */
2464 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2468 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2469 tree clauses
= gimple_omp_target_clauses (stmt
);
2471 ctx
= new_omp_context (stmt
, outer_ctx
);
2472 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2473 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2474 name
= create_tmp_var_name (".omp_data_t");
2475 name
= build_decl (gimple_location (stmt
),
2476 TYPE_DECL
, name
, ctx
->record_type
);
2477 DECL_ARTIFICIAL (name
) = 1;
2478 DECL_NAMELESS (name
) = 1;
2479 TYPE_NAME (ctx
->record_type
) = name
;
2480 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2484 create_omp_child_function (ctx
, false);
2485 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2488 scan_sharing_clauses (clauses
, ctx
);
2489 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2491 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2492 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2495 TYPE_FIELDS (ctx
->record_type
)
2496 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2499 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2500 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2502 field
= DECL_CHAIN (field
))
2503 gcc_assert (DECL_ALIGN (field
) == align
);
2505 layout_type (ctx
->record_type
);
2507 fixup_child_record_type (ctx
);
2511 /* Scan an OpenMP teams directive. */
2514 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2516 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2518 if (!gimple_omp_teams_host (stmt
))
2520 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2521 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2524 taskreg_contexts
.safe_push (ctx
);
2525 gcc_assert (taskreg_nesting_level
== 1);
2526 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2527 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2528 tree name
= create_tmp_var_name (".omp_data_s");
2529 name
= build_decl (gimple_location (stmt
),
2530 TYPE_DECL
, name
, ctx
->record_type
);
2531 DECL_ARTIFICIAL (name
) = 1;
2532 DECL_NAMELESS (name
) = 1;
2533 TYPE_NAME (ctx
->record_type
) = name
;
2534 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2535 create_omp_child_function (ctx
, false);
2536 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2538 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2539 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2541 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2542 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2545 /* Check nesting restrictions. */
2547 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2551 if (ctx
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_GRID_BODY
)
2552 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2553 the original copy of its contents. */
2556 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2557 inside an OpenACC CTX. */
2558 if (!(is_gimple_omp (stmt
)
2559 && is_gimple_omp_oacc (stmt
))
2560 /* Except for atomic codes that we share with OpenMP. */
2561 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2562 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2564 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2566 error_at (gimple_location (stmt
),
2567 "non-OpenACC construct inside of OpenACC routine");
2571 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2572 if (is_gimple_omp (octx
->stmt
)
2573 && is_gimple_omp_oacc (octx
->stmt
))
2575 error_at (gimple_location (stmt
),
2576 "non-OpenACC construct inside of OpenACC region");
2583 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
2585 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
2587 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2588 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
2591 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2593 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2594 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2596 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2597 && (ctx
->outer
== NULL
2598 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2599 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2600 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2601 != GF_OMP_FOR_KIND_FOR
)
2602 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2604 error_at (gimple_location (stmt
),
2605 "%<ordered simd threads%> must be closely "
2606 "nested inside of %<for simd%> region");
2612 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2613 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
2614 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
2616 error_at (gimple_location (stmt
),
2617 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2618 " or %<#pragma omp atomic%> may not be nested inside"
2619 " %<simd%> region");
2622 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2624 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2625 || ((gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
)
2626 && (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
)))
2627 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2629 error_at (gimple_location (stmt
),
2630 "only %<distribute%> or %<parallel%> regions are "
2631 "allowed to be strictly nested inside %<teams%> "
2637 switch (gimple_code (stmt
))
2639 case GIMPLE_OMP_FOR
:
2640 if (gimple_omp_for_kind (stmt
) & GF_OMP_FOR_SIMD
)
2642 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2644 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2646 error_at (gimple_location (stmt
),
2647 "%<distribute%> region must be strictly nested "
2648 "inside %<teams%> construct");
2653 /* We split taskloop into task and nested taskloop in it. */
2654 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2656 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2661 switch (gimple_code (ctx
->stmt
))
2663 case GIMPLE_OMP_FOR
:
2664 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2665 == GF_OMP_FOR_KIND_OACC_LOOP
);
2668 case GIMPLE_OMP_TARGET
:
2669 switch (gimple_omp_target_kind (ctx
->stmt
))
2671 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2672 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2683 else if (oacc_get_fn_attrib (current_function_decl
))
2687 error_at (gimple_location (stmt
),
2688 "OpenACC loop directive must be associated with"
2689 " an OpenACC compute region");
2695 if (is_gimple_call (stmt
)
2696 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2697 == BUILT_IN_GOMP_CANCEL
2698 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2699 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2701 const char *bad
= NULL
;
2702 const char *kind
= NULL
;
2703 const char *construct
2704 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2705 == BUILT_IN_GOMP_CANCEL
)
2706 ? "#pragma omp cancel"
2707 : "#pragma omp cancellation point";
2710 error_at (gimple_location (stmt
), "orphaned %qs construct",
2714 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2715 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2719 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2720 bad
= "#pragma omp parallel";
2721 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2722 == BUILT_IN_GOMP_CANCEL
2723 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2724 ctx
->cancellable
= true;
2728 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2729 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2730 bad
= "#pragma omp for";
2731 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2732 == BUILT_IN_GOMP_CANCEL
2733 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2735 ctx
->cancellable
= true;
2736 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2738 warning_at (gimple_location (stmt
), 0,
2739 "%<#pragma omp cancel for%> inside "
2740 "%<nowait%> for construct");
2741 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2742 OMP_CLAUSE_ORDERED
))
2743 warning_at (gimple_location (stmt
), 0,
2744 "%<#pragma omp cancel for%> inside "
2745 "%<ordered%> for construct");
2750 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
2751 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
2752 bad
= "#pragma omp sections";
2753 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2754 == BUILT_IN_GOMP_CANCEL
2755 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2757 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
2759 ctx
->cancellable
= true;
2760 if (omp_find_clause (gimple_omp_sections_clauses
2763 warning_at (gimple_location (stmt
), 0,
2764 "%<#pragma omp cancel sections%> inside "
2765 "%<nowait%> sections construct");
2769 gcc_assert (ctx
->outer
2770 && gimple_code (ctx
->outer
->stmt
)
2771 == GIMPLE_OMP_SECTIONS
);
2772 ctx
->outer
->cancellable
= true;
2773 if (omp_find_clause (gimple_omp_sections_clauses
2776 warning_at (gimple_location (stmt
), 0,
2777 "%<#pragma omp cancel sections%> inside "
2778 "%<nowait%> sections construct");
2784 if (!is_task_ctx (ctx
)
2785 && (!is_taskloop_ctx (ctx
)
2786 || ctx
->outer
== NULL
2787 || !is_task_ctx (ctx
->outer
)))
2788 bad
= "#pragma omp task";
2791 for (omp_context
*octx
= ctx
->outer
;
2792 octx
; octx
= octx
->outer
)
2794 switch (gimple_code (octx
->stmt
))
2796 case GIMPLE_OMP_TASKGROUP
:
2798 case GIMPLE_OMP_TARGET
:
2799 if (gimple_omp_target_kind (octx
->stmt
)
2800 != GF_OMP_TARGET_KIND_REGION
)
2803 case GIMPLE_OMP_PARALLEL
:
2804 case GIMPLE_OMP_TEAMS
:
2805 error_at (gimple_location (stmt
),
2806 "%<%s taskgroup%> construct not closely "
2807 "nested inside of %<taskgroup%> region",
2810 case GIMPLE_OMP_TASK
:
2811 if (gimple_omp_task_taskloop_p (octx
->stmt
)
2813 && is_taskloop_ctx (octx
->outer
))
2816 = gimple_omp_for_clauses (octx
->outer
->stmt
);
2817 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
2826 ctx
->cancellable
= true;
2831 error_at (gimple_location (stmt
), "invalid arguments");
2836 error_at (gimple_location (stmt
),
2837 "%<%s %s%> construct not closely nested inside of %qs",
2838 construct
, kind
, bad
);
2843 case GIMPLE_OMP_SECTIONS
:
2844 case GIMPLE_OMP_SINGLE
:
2845 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2846 switch (gimple_code (ctx
->stmt
))
2848 case GIMPLE_OMP_FOR
:
2849 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2850 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2853 case GIMPLE_OMP_SECTIONS
:
2854 case GIMPLE_OMP_SINGLE
:
2855 case GIMPLE_OMP_ORDERED
:
2856 case GIMPLE_OMP_MASTER
:
2857 case GIMPLE_OMP_TASK
:
2858 case GIMPLE_OMP_CRITICAL
:
2859 if (is_gimple_call (stmt
))
2861 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2862 != BUILT_IN_GOMP_BARRIER
)
2864 error_at (gimple_location (stmt
),
2865 "barrier region may not be closely nested inside "
2866 "of work-sharing, %<critical%>, %<ordered%>, "
2867 "%<master%>, explicit %<task%> or %<taskloop%> "
2871 error_at (gimple_location (stmt
),
2872 "work-sharing region may not be closely nested inside "
2873 "of work-sharing, %<critical%>, %<ordered%>, "
2874 "%<master%>, explicit %<task%> or %<taskloop%> region");
2876 case GIMPLE_OMP_PARALLEL
:
2877 case GIMPLE_OMP_TEAMS
:
2879 case GIMPLE_OMP_TARGET
:
2880 if (gimple_omp_target_kind (ctx
->stmt
)
2881 == GF_OMP_TARGET_KIND_REGION
)
2888 case GIMPLE_OMP_MASTER
:
2889 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2890 switch (gimple_code (ctx
->stmt
))
2892 case GIMPLE_OMP_FOR
:
2893 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2894 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2897 case GIMPLE_OMP_SECTIONS
:
2898 case GIMPLE_OMP_SINGLE
:
2899 case GIMPLE_OMP_TASK
:
2900 error_at (gimple_location (stmt
),
2901 "%<master%> region may not be closely nested inside "
2902 "of work-sharing, explicit %<task%> or %<taskloop%> "
2905 case GIMPLE_OMP_PARALLEL
:
2906 case GIMPLE_OMP_TEAMS
:
2908 case GIMPLE_OMP_TARGET
:
2909 if (gimple_omp_target_kind (ctx
->stmt
)
2910 == GF_OMP_TARGET_KIND_REGION
)
2917 case GIMPLE_OMP_TASK
:
2918 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2919 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
2920 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
2921 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
2923 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2924 error_at (OMP_CLAUSE_LOCATION (c
),
2925 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2926 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
2930 case GIMPLE_OMP_ORDERED
:
2931 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2932 c
; c
= OMP_CLAUSE_CHAIN (c
))
2934 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
2936 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
2937 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
2940 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
2941 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
2942 || kind
== OMP_CLAUSE_DEPEND_SINK
)
2945 /* Look for containing ordered(N) loop. */
2947 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2949 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2950 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
2952 error_at (OMP_CLAUSE_LOCATION (c
),
2953 "%<ordered%> construct with %<depend%> clause "
2954 "must be closely nested inside an %<ordered%> "
2958 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
2960 error_at (OMP_CLAUSE_LOCATION (c
),
2961 "%<ordered%> construct with %<depend%> clause "
2962 "must be closely nested inside a loop with "
2963 "%<ordered%> clause with a parameter");
2969 error_at (OMP_CLAUSE_LOCATION (c
),
2970 "invalid depend kind in omp %<ordered%> %<depend%>");
2974 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2975 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2977 /* ordered simd must be closely nested inside of simd region,
2978 and simd region must not encounter constructs other than
2979 ordered simd, therefore ordered simd may be either orphaned,
2980 or ctx->stmt must be simd. The latter case is handled already
2984 error_at (gimple_location (stmt
),
2985 "%<ordered%> %<simd%> must be closely nested inside "
2990 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2991 switch (gimple_code (ctx
->stmt
))
2993 case GIMPLE_OMP_CRITICAL
:
2994 case GIMPLE_OMP_TASK
:
2995 case GIMPLE_OMP_ORDERED
:
2996 ordered_in_taskloop
:
2997 error_at (gimple_location (stmt
),
2998 "%<ordered%> region may not be closely nested inside "
2999 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3000 "%<taskloop%> region");
3002 case GIMPLE_OMP_FOR
:
3003 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3004 goto ordered_in_taskloop
;
3006 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3007 OMP_CLAUSE_ORDERED
);
3010 error_at (gimple_location (stmt
),
3011 "%<ordered%> region must be closely nested inside "
3012 "a loop region with an %<ordered%> clause");
3015 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3016 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3018 error_at (gimple_location (stmt
),
3019 "%<ordered%> region without %<depend%> clause may "
3020 "not be closely nested inside a loop region with "
3021 "an %<ordered%> clause with a parameter");
3025 case GIMPLE_OMP_TARGET
:
3026 if (gimple_omp_target_kind (ctx
->stmt
)
3027 != GF_OMP_TARGET_KIND_REGION
)
3030 case GIMPLE_OMP_PARALLEL
:
3031 case GIMPLE_OMP_TEAMS
:
3032 error_at (gimple_location (stmt
),
3033 "%<ordered%> region must be closely nested inside "
3034 "a loop region with an %<ordered%> clause");
3040 case GIMPLE_OMP_CRITICAL
:
3043 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3044 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3045 if (gomp_critical
*other_crit
3046 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3047 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3049 error_at (gimple_location (stmt
),
3050 "%<critical%> region may not be nested inside "
3051 "a %<critical%> region with the same name");
3056 case GIMPLE_OMP_TEAMS
:
3059 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3060 || (gimple_omp_target_kind (ctx
->stmt
)
3061 != GF_OMP_TARGET_KIND_REGION
))
3063 /* Teams construct can appear either strictly nested inside of
3064 target construct with no intervening stmts, or can be encountered
3065 only by initial task (so must not appear inside any OpenMP
3067 error_at (gimple_location (stmt
),
3068 "%<teams%> construct must be closely nested inside of "
3069 "%<target%> construct or not nested in any OpenMP "
3074 case GIMPLE_OMP_TARGET
:
3075 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3076 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3077 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3078 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3080 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3081 error_at (OMP_CLAUSE_LOCATION (c
),
3082 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3083 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3086 if (is_gimple_omp_offloaded (stmt
)
3087 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3089 error_at (gimple_location (stmt
),
3090 "OpenACC region inside of OpenACC routine, nested "
3091 "parallelism not supported yet");
3094 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3096 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3098 if (is_gimple_omp (stmt
)
3099 && is_gimple_omp_oacc (stmt
)
3100 && is_gimple_omp (ctx
->stmt
))
3102 error_at (gimple_location (stmt
),
3103 "OpenACC construct inside of non-OpenACC region");
3109 const char *stmt_name
, *ctx_stmt_name
;
3110 switch (gimple_omp_target_kind (stmt
))
3112 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3113 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3114 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3115 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3116 stmt_name
= "target enter data"; break;
3117 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3118 stmt_name
= "target exit data"; break;
3119 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3120 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3121 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3122 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3123 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
3124 stmt_name
= "enter/exit data"; break;
3125 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3126 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3128 default: gcc_unreachable ();
3130 switch (gimple_omp_target_kind (ctx
->stmt
))
3132 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3133 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3134 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3135 ctx_stmt_name
= "parallel"; break;
3136 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3137 ctx_stmt_name
= "kernels"; break;
3138 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3139 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3140 ctx_stmt_name
= "host_data"; break;
3141 default: gcc_unreachable ();
3144 /* OpenACC/OpenMP mismatch? */
3145 if (is_gimple_omp_oacc (stmt
)
3146 != is_gimple_omp_oacc (ctx
->stmt
))
3148 error_at (gimple_location (stmt
),
3149 "%s %qs construct inside of %s %qs region",
3150 (is_gimple_omp_oacc (stmt
)
3151 ? "OpenACC" : "OpenMP"), stmt_name
,
3152 (is_gimple_omp_oacc (ctx
->stmt
)
3153 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3156 if (is_gimple_omp_offloaded (ctx
->stmt
))
3158 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3159 if (is_gimple_omp_oacc (ctx
->stmt
))
3161 error_at (gimple_location (stmt
),
3162 "%qs construct inside of %qs region",
3163 stmt_name
, ctx_stmt_name
);
3168 warning_at (gimple_location (stmt
), 0,
3169 "%qs construct inside of %qs region",
3170 stmt_name
, ctx_stmt_name
);
3182 /* Helper function scan_omp.
3184 Callback for walk_tree or operators in walk_gimple_stmt used to
3185 scan for OMP directives in TP. */
3188 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3190 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3191 omp_context
*ctx
= (omp_context
*) wi
->info
;
3194 switch (TREE_CODE (t
))
3202 tree repl
= remap_decl (t
, &ctx
->cb
);
3203 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3209 if (ctx
&& TYPE_P (t
))
3210 *tp
= remap_type (t
, &ctx
->cb
);
3211 else if (!DECL_P (t
))
3216 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3217 if (tem
!= TREE_TYPE (t
))
3219 if (TREE_CODE (t
) == INTEGER_CST
)
3220 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3222 TREE_TYPE (t
) = tem
;
3232 /* Return true if FNDECL is a setjmp or a longjmp. */
3235 setjmp_or_longjmp_p (const_tree fndecl
)
3237 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3238 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3241 tree declname
= DECL_NAME (fndecl
);
3244 const char *name
= IDENTIFIER_POINTER (declname
);
3245 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3249 /* Helper function for scan_omp.
3251 Callback for walk_gimple_stmt used to scan for OMP directives in
3252 the current statement in GSI. */
3255 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3256 struct walk_stmt_info
*wi
)
3258 gimple
*stmt
= gsi_stmt (*gsi
);
3259 omp_context
*ctx
= (omp_context
*) wi
->info
;
3261 if (gimple_has_location (stmt
))
3262 input_location
= gimple_location (stmt
);
3264 /* Check the nesting restrictions. */
3265 bool remove
= false;
3266 if (is_gimple_omp (stmt
))
3267 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3268 else if (is_gimple_call (stmt
))
3270 tree fndecl
= gimple_call_fndecl (stmt
);
3273 if (setjmp_or_longjmp_p (fndecl
)
3275 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3276 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
3279 error_at (gimple_location (stmt
),
3280 "setjmp/longjmp inside simd construct");
3282 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3283 switch (DECL_FUNCTION_CODE (fndecl
))
3285 case BUILT_IN_GOMP_BARRIER
:
3286 case BUILT_IN_GOMP_CANCEL
:
3287 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3288 case BUILT_IN_GOMP_TASKYIELD
:
3289 case BUILT_IN_GOMP_TASKWAIT
:
3290 case BUILT_IN_GOMP_TASKGROUP_START
:
3291 case BUILT_IN_GOMP_TASKGROUP_END
:
3292 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3301 stmt
= gimple_build_nop ();
3302 gsi_replace (gsi
, stmt
, false);
3305 *handled_ops_p
= true;
3307 switch (gimple_code (stmt
))
3309 case GIMPLE_OMP_PARALLEL
:
3310 taskreg_nesting_level
++;
3311 scan_omp_parallel (gsi
, ctx
);
3312 taskreg_nesting_level
--;
3315 case GIMPLE_OMP_TASK
:
3316 taskreg_nesting_level
++;
3317 scan_omp_task (gsi
, ctx
);
3318 taskreg_nesting_level
--;
3321 case GIMPLE_OMP_FOR
:
3322 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3323 == GF_OMP_FOR_KIND_SIMD
)
3324 && omp_maybe_offloaded_ctx (ctx
)
3325 && omp_max_simt_vf ())
3326 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3328 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3331 case GIMPLE_OMP_SECTIONS
:
3332 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3335 case GIMPLE_OMP_SINGLE
:
3336 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3339 case GIMPLE_OMP_SCAN
:
3340 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
3342 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
3343 ctx
->scan_inclusive
= true;
3344 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
3345 ctx
->scan_exclusive
= true;
3348 case GIMPLE_OMP_SECTION
:
3349 case GIMPLE_OMP_MASTER
:
3350 case GIMPLE_OMP_ORDERED
:
3351 case GIMPLE_OMP_CRITICAL
:
3352 case GIMPLE_OMP_GRID_BODY
:
3353 ctx
= new_omp_context (stmt
, ctx
);
3354 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3357 case GIMPLE_OMP_TASKGROUP
:
3358 ctx
= new_omp_context (stmt
, ctx
);
3359 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
3360 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3363 case GIMPLE_OMP_TARGET
:
3364 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3367 case GIMPLE_OMP_TEAMS
:
3368 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
3370 taskreg_nesting_level
++;
3371 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3372 taskreg_nesting_level
--;
3375 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3382 *handled_ops_p
= false;
3384 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3386 var
= DECL_CHAIN (var
))
3387 insert_decl_map (&ctx
->cb
, var
, var
);
3391 *handled_ops_p
= false;
3399 /* Scan all the statements starting at the current statement. CTX
3400 contains context information about the OMP directives and
3401 clauses found during the scan. */
3404 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3406 location_t saved_location
;
3407 struct walk_stmt_info wi
;
3409 memset (&wi
, 0, sizeof (wi
));
3411 wi
.want_locations
= true;
3413 saved_location
= input_location
;
3414 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3415 input_location
= saved_location
;
3418 /* Re-gimplification and code generation routines. */
3420 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3421 of BIND if in a method. */
3424 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3426 if (DECL_ARGUMENTS (current_function_decl
)
3427 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3428 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3431 tree vars
= gimple_bind_vars (bind
);
3432 for (tree
*pvar
= &vars
; *pvar
; )
3433 if (omp_member_access_dummy_var (*pvar
))
3434 *pvar
= DECL_CHAIN (*pvar
);
3436 pvar
= &DECL_CHAIN (*pvar
);
3437 gimple_bind_set_vars (bind
, vars
);
3441 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3442 block and its subblocks. */
3445 remove_member_access_dummy_vars (tree block
)
3447 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3448 if (omp_member_access_dummy_var (*pvar
))
3449 *pvar
= DECL_CHAIN (*pvar
);
3451 pvar
= &DECL_CHAIN (*pvar
);
3453 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3454 remove_member_access_dummy_vars (block
);
3457 /* If a context was created for STMT when it was scanned, return it. */
3459 static omp_context
*
3460 maybe_lookup_ctx (gimple
*stmt
)
3463 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3464 return n
? (omp_context
*) n
->value
: NULL
;
3468 /* Find the mapping for DECL in CTX or the immediately enclosing
3469 context that has a mapping for DECL.
3471 If CTX is a nested parallel directive, we may have to use the decl
3472 mappings created in CTX's parent context. Suppose that we have the
3473 following parallel nesting (variable UIDs showed for clarity):
3476 #omp parallel shared(iD.1562) -> outer parallel
3477 iD.1562 = iD.1562 + 1;
3479 #omp parallel shared (iD.1562) -> inner parallel
3480 iD.1562 = iD.1562 - 1;
3482 Each parallel structure will create a distinct .omp_data_s structure
3483 for copying iD.1562 in/out of the directive:
3485 outer parallel .omp_data_s.1.i -> iD.1562
3486 inner parallel .omp_data_s.2.i -> iD.1562
3488 A shared variable mapping will produce a copy-out operation before
3489 the parallel directive and a copy-in operation after it. So, in
3490 this case we would have:
3493 .omp_data_o.1.i = iD.1562;
3494 #omp parallel shared(iD.1562) -> outer parallel
3495 .omp_data_i.1 = &.omp_data_o.1
3496 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3498 .omp_data_o.2.i = iD.1562; -> **
3499 #omp parallel shared(iD.1562) -> inner parallel
3500 .omp_data_i.2 = &.omp_data_o.2
3501 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3504 ** This is a problem. The symbol iD.1562 cannot be referenced
3505 inside the body of the outer parallel region. But since we are
3506 emitting this copy operation while expanding the inner parallel
3507 directive, we need to access the CTX structure of the outer
3508 parallel directive to get the correct mapping:
3510 .omp_data_o.2.i = .omp_data_i.1->i
3512 Since there may be other workshare or parallel directives enclosing
3513 the parallel directive, it may be necessary to walk up the context
3514 parent chain. This is not a problem in general because nested
3515 parallelism happens only rarely. */
3518 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3523 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3524 t
= maybe_lookup_decl (decl
, up
);
3526 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3528 return t
? t
: decl
;
3532 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3533 in outer contexts. */
3536 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3541 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3542 t
= maybe_lookup_decl (decl
, up
);
3544 return t
? t
: decl
;
3548 /* Construct the initialization value for reduction operation OP. */
3551 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3560 case TRUTH_ORIF_EXPR
:
3561 case TRUTH_XOR_EXPR
:
3563 return build_zero_cst (type
);
3566 case TRUTH_AND_EXPR
:
3567 case TRUTH_ANDIF_EXPR
:
3569 return fold_convert_loc (loc
, type
, integer_one_node
);
3572 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3575 if (SCALAR_FLOAT_TYPE_P (type
))
3577 REAL_VALUE_TYPE max
, min
;
3578 if (HONOR_INFINITIES (type
))
3581 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3584 real_maxval (&min
, 1, TYPE_MODE (type
));
3585 return build_real (type
, min
);
3587 else if (POINTER_TYPE_P (type
))
3590 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3591 return wide_int_to_tree (type
, min
);
3595 gcc_assert (INTEGRAL_TYPE_P (type
));
3596 return TYPE_MIN_VALUE (type
);
3600 if (SCALAR_FLOAT_TYPE_P (type
))
3602 REAL_VALUE_TYPE max
;
3603 if (HONOR_INFINITIES (type
))
3606 real_maxval (&max
, 0, TYPE_MODE (type
));
3607 return build_real (type
, max
);
3609 else if (POINTER_TYPE_P (type
))
3612 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3613 return wide_int_to_tree (type
, max
);
3617 gcc_assert (INTEGRAL_TYPE_P (type
));
3618 return TYPE_MAX_VALUE (type
);
3626 /* Construct the initialization value for reduction CLAUSE. */
3629 omp_reduction_init (tree clause
, tree type
)
3631 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
3632 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
3635 /* Return alignment to be assumed for var in CLAUSE, which should be
3636 OMP_CLAUSE_ALIGNED. */
3639 omp_clause_aligned_alignment (tree clause
)
3641 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
3642 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
3644 /* Otherwise return implementation defined alignment. */
3645 unsigned int al
= 1;
3646 opt_scalar_mode mode_iter
;
3647 auto_vector_sizes sizes
;
3648 targetm
.vectorize
.autovectorize_vector_sizes (&sizes
, true);
3650 for (unsigned int i
= 0; i
< sizes
.length (); ++i
)
3651 vs
= ordered_max (vs
, sizes
[i
]);
3652 static enum mode_class classes
[]
3653 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
3654 for (int i
= 0; i
< 4; i
+= 2)
3655 /* The for loop above dictates that we only walk through scalar classes. */
3656 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
3658 scalar_mode mode
= mode_iter
.require ();
3659 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
3660 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
3662 while (maybe_ne (vs
, 0U)
3663 && known_lt (GET_MODE_SIZE (vmode
), vs
)
3664 && GET_MODE_2XWIDER_MODE (vmode
).exists ())
3665 vmode
= GET_MODE_2XWIDER_MODE (vmode
).require ();
3667 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3668 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
3670 poly_uint64 nelts
= exact_div (GET_MODE_SIZE (vmode
),
3671 GET_MODE_SIZE (mode
));
3672 type
= build_vector_type (type
, nelts
);
3673 if (TYPE_MODE (type
) != vmode
)
3675 if (TYPE_ALIGN_UNIT (type
) > al
)
3676 al
= TYPE_ALIGN_UNIT (type
);
3678 return build_int_cst (integer_type_node
, al
);
3682 /* This structure is part of the interface between lower_rec_simd_input_clauses
3683 and lower_rec_input_clauses. */
3685 struct omplow_simd_context
{
3686 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3690 vec
<tree
, va_heap
> simt_eargs
;
3691 gimple_seq simt_dlist
;
3692 poly_uint64_pod max_vf
;
3696 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3700 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
3701 omplow_simd_context
*sctx
, tree
&ivar
,
3702 tree
&lvar
, tree
*rvar
= NULL
,
3705 if (known_eq (sctx
->max_vf
, 0U))
3707 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
3708 if (maybe_gt (sctx
->max_vf
, 1U))
3710 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3711 OMP_CLAUSE_SAFELEN
);
3714 poly_uint64 safe_len
;
3715 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
3716 || maybe_lt (safe_len
, 1U))
3719 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
3722 if (maybe_gt (sctx
->max_vf
, 1U))
3724 sctx
->idx
= create_tmp_var (unsigned_type_node
);
3725 sctx
->lane
= create_tmp_var (unsigned_type_node
);
3728 if (known_eq (sctx
->max_vf
, 1U))
3733 if (is_gimple_reg (new_var
))
3735 ivar
= lvar
= new_var
;
3738 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
3739 ivar
= lvar
= create_tmp_var (type
);
3740 TREE_ADDRESSABLE (ivar
) = 1;
3741 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
3742 NULL
, DECL_ATTRIBUTES (ivar
));
3743 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
3744 tree clobber
= build_constructor (type
, NULL
);
3745 TREE_THIS_VOLATILE (clobber
) = 1;
3746 gimple
*g
= gimple_build_assign (ivar
, clobber
);
3747 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
3751 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
3752 tree avar
= create_tmp_var_raw (atype
);
3753 if (TREE_ADDRESSABLE (new_var
))
3754 TREE_ADDRESSABLE (avar
) = 1;
3755 DECL_ATTRIBUTES (avar
)
3756 = tree_cons (get_identifier ("omp simd array"), NULL
,
3757 DECL_ATTRIBUTES (avar
));
3758 gimple_add_tmp_var (avar
);
3762 /* For inscan reductions, create another array temporary,
3763 which will hold the reduced value. */
3764 iavar
= create_tmp_var_raw (atype
);
3765 if (TREE_ADDRESSABLE (new_var
))
3766 TREE_ADDRESSABLE (iavar
) = 1;
3767 DECL_ATTRIBUTES (iavar
)
3768 = tree_cons (get_identifier ("omp simd array"), NULL
,
3769 tree_cons (get_identifier ("omp simd inscan"), NULL
,
3770 DECL_ATTRIBUTES (iavar
)));
3771 gimple_add_tmp_var (iavar
);
3772 ctx
->cb
.decl_map
->put (avar
, iavar
);
3773 if (sctx
->lastlane
== NULL_TREE
)
3774 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
3775 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
3776 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
3777 TREE_THIS_NOTRAP (*rvar
) = 1;
3779 if (ctx
->scan_exclusive
)
3781 /* And for exclusive scan yet another one, which will
3782 hold the value during the scan phase. */
3783 tree savar
= create_tmp_var_raw (atype
);
3784 if (TREE_ADDRESSABLE (new_var
))
3785 TREE_ADDRESSABLE (savar
) = 1;
3786 DECL_ATTRIBUTES (savar
)
3787 = tree_cons (get_identifier ("omp simd array"), NULL
,
3788 tree_cons (get_identifier ("omp simd inscan "
3790 DECL_ATTRIBUTES (savar
)));
3791 gimple_add_tmp_var (savar
);
3792 ctx
->cb
.decl_map
->put (iavar
, savar
);
3793 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
3794 sctx
->idx
, NULL_TREE
, NULL_TREE
);
3795 TREE_THIS_NOTRAP (*rvar2
) = 1;
3798 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
3799 NULL_TREE
, NULL_TREE
);
3800 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
3801 NULL_TREE
, NULL_TREE
);
3802 TREE_THIS_NOTRAP (ivar
) = 1;
3803 TREE_THIS_NOTRAP (lvar
) = 1;
3805 if (DECL_P (new_var
))
3807 SET_DECL_VALUE_EXPR (new_var
, lvar
);
3808 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3813 /* Helper function of lower_rec_input_clauses. For a reference
3814 in simd reduction, add an underlying variable it will reference. */
3817 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
3819 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
3820 if (TREE_CONSTANT (z
))
3822 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
3823 get_name (new_vard
));
3824 gimple_add_tmp_var (z
);
3825 TREE_ADDRESSABLE (z
) = 1;
3826 z
= build_fold_addr_expr_loc (loc
, z
);
3827 gimplify_assign (new_vard
, z
, ilist
);
3831 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3832 code to emit (type) (tskred_temp[idx]). */
3835 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
3838 unsigned HOST_WIDE_INT sz
3839 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
3840 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
3841 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
3843 tree v
= create_tmp_var (pointer_sized_int_node
);
3844 gimple
*g
= gimple_build_assign (v
, r
);
3845 gimple_seq_add_stmt (ilist
, g
);
3846 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
3848 v
= create_tmp_var (type
);
3849 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
3850 gimple_seq_add_stmt (ilist
, g
);
3855 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3856 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3857 private variables. Initialization statements go in ILIST, while calls
3858 to destructors go in DLIST. */
3861 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
3862 omp_context
*ctx
, struct omp_for_data
*fd
)
3864 tree c
, copyin_seq
, x
, ptr
;
3865 bool copyin_by_ref
= false;
3866 bool lastprivate_firstprivate
= false;
3867 bool reduction_omp_orig_ref
= false;
3869 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3870 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
);
3871 omplow_simd_context sctx
= omplow_simd_context ();
3872 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
3873 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
3874 gimple_seq llist
[4] = { };
3875 tree nonconst_simd_if
= NULL_TREE
;
3878 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
3880 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3881 with data sharing clauses referencing variable sized vars. That
3882 is unnecessarily hard to support and very unlikely to result in
3883 vectorized code anyway. */
3885 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3886 switch (OMP_CLAUSE_CODE (c
))
3888 case OMP_CLAUSE_LINEAR
:
3889 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
3892 case OMP_CLAUSE_PRIVATE
:
3893 case OMP_CLAUSE_FIRSTPRIVATE
:
3894 case OMP_CLAUSE_LASTPRIVATE
:
3895 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
3897 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
3899 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
3900 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
3904 case OMP_CLAUSE_REDUCTION
:
3905 case OMP_CLAUSE_IN_REDUCTION
:
3906 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
3907 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
3909 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
3911 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
3912 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
3917 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
3919 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
3920 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
3922 case OMP_CLAUSE_SIMDLEN
:
3923 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
3926 case OMP_CLAUSE__CONDTEMP_
:
3927 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
3935 /* Add a placeholder for simduid. */
3936 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
3937 sctx
.simt_eargs
.safe_push (NULL_TREE
);
3939 unsigned task_reduction_cnt
= 0;
3940 unsigned task_reduction_cntorig
= 0;
3941 unsigned task_reduction_cnt_full
= 0;
3942 unsigned task_reduction_cntorig_full
= 0;
3943 unsigned task_reduction_other_cnt
= 0;
3944 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
3945 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
3946 /* Do all the fixed sized types in the first pass, and the variable sized
3947 types in the second pass. This makes sure that the scalar arguments to
3948 the variable sized types are processed before we use them in the
3949 variable sized operations. For task reductions we use 4 passes, in the
3950 first two we ignore them, in the third one gather arguments for
3951 GOMP_task_reduction_remap call and in the last pass actually handle
3952 the task reductions. */
3953 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
3956 if (pass
== 2 && task_reduction_cnt
)
3959 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
3960 + task_reduction_cntorig
);
3961 tskred_avar
= create_tmp_var_raw (tskred_atype
);
3962 gimple_add_tmp_var (tskred_avar
);
3963 TREE_ADDRESSABLE (tskred_avar
) = 1;
3964 task_reduction_cnt_full
= task_reduction_cnt
;
3965 task_reduction_cntorig_full
= task_reduction_cntorig
;
3967 else if (pass
== 3 && task_reduction_cnt
)
3969 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
3971 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
3972 size_int (task_reduction_cntorig
),
3973 build_fold_addr_expr (tskred_avar
));
3974 gimple_seq_add_stmt (ilist
, g
);
3976 if (pass
== 3 && task_reduction_other_cnt
)
3978 /* For reduction clauses, build
3979 tskred_base = (void *) tskred_temp[2]
3980 + omp_get_thread_num () * tskred_temp[1]
3981 or if tskred_temp[1] is known to be constant, that constant
3982 directly. This is the start of the private reduction copy block
3983 for the current thread. */
3984 tree v
= create_tmp_var (integer_type_node
);
3985 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
3986 gimple
*g
= gimple_build_call (x
, 0);
3987 gimple_call_set_lhs (g
, v
);
3988 gimple_seq_add_stmt (ilist
, g
);
3989 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
3990 tskred_temp
= OMP_CLAUSE_DECL (c
);
3991 if (is_taskreg_ctx (ctx
))
3992 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
3993 tree v2
= create_tmp_var (sizetype
);
3994 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
3995 gimple_seq_add_stmt (ilist
, g
);
3996 if (ctx
->task_reductions
[0])
3997 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
3999 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4000 tree v3
= create_tmp_var (sizetype
);
4001 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4002 gimple_seq_add_stmt (ilist
, g
);
4003 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4004 tskred_base
= create_tmp_var (ptr_type_node
);
4005 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4006 gimple_seq_add_stmt (ilist
, g
);
4008 task_reduction_cnt
= 0;
4009 task_reduction_cntorig
= 0;
4010 task_reduction_other_cnt
= 0;
4011 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4013 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4016 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4017 bool task_reduction_p
= false;
4018 bool task_reduction_needs_orig_p
= false;
4019 tree cond
= NULL_TREE
;
4023 case OMP_CLAUSE_PRIVATE
:
4024 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4027 case OMP_CLAUSE_SHARED
:
4028 /* Ignore shared directives in teams construct inside
4029 of target construct. */
4030 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4031 && !is_host_teams_ctx (ctx
))
4033 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
4035 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
4036 || is_global_var (OMP_CLAUSE_DECL (c
)));
4039 case OMP_CLAUSE_FIRSTPRIVATE
:
4040 case OMP_CLAUSE_COPYIN
:
4042 case OMP_CLAUSE_LINEAR
:
4043 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
4044 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4045 lastprivate_firstprivate
= true;
4047 case OMP_CLAUSE_REDUCTION
:
4048 case OMP_CLAUSE_IN_REDUCTION
:
4049 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
4051 task_reduction_p
= true;
4052 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4054 task_reduction_other_cnt
++;
4059 task_reduction_cnt
++;
4060 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4062 var
= OMP_CLAUSE_DECL (c
);
4063 /* If var is a global variable that isn't privatized
4064 in outer contexts, we don't need to look up the
4065 original address, it is always the address of the
4066 global variable itself. */
4068 || omp_is_reference (var
)
4070 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4072 task_reduction_needs_orig_p
= true;
4073 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4074 task_reduction_cntorig
++;
4078 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4079 reduction_omp_orig_ref
= true;
4081 case OMP_CLAUSE__REDUCTEMP_
:
4082 if (!is_taskreg_ctx (ctx
))
4085 case OMP_CLAUSE__LOOPTEMP_
:
4086 /* Handle _looptemp_/_reductemp_ clauses only on
4091 case OMP_CLAUSE_LASTPRIVATE
:
4092 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4094 lastprivate_firstprivate
= true;
4095 if (pass
!= 0 || is_taskloop_ctx (ctx
))
4098 /* Even without corresponding firstprivate, if
4099 decl is Fortran allocatable, it needs outer var
4102 && lang_hooks
.decls
.omp_private_outer_ref
4103 (OMP_CLAUSE_DECL (c
)))
4104 lastprivate_firstprivate
= true;
4106 case OMP_CLAUSE_ALIGNED
:
4109 var
= OMP_CLAUSE_DECL (c
);
4110 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
4111 && !is_global_var (var
))
4113 new_var
= maybe_lookup_decl (var
, ctx
);
4114 if (new_var
== NULL_TREE
)
4115 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4116 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4117 tree alarg
= omp_clause_aligned_alignment (c
);
4118 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4119 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
4120 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4121 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4122 gimplify_and_add (x
, ilist
);
4124 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
4125 && is_global_var (var
))
4127 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
4128 new_var
= lookup_decl (var
, ctx
);
4129 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4130 t
= build_fold_addr_expr_loc (clause_loc
, t
);
4131 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4132 tree alarg
= omp_clause_aligned_alignment (c
);
4133 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4134 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
4135 t
= fold_convert_loc (clause_loc
, ptype
, t
);
4136 x
= create_tmp_var (ptype
);
4137 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
4138 gimplify_and_add (t
, ilist
);
4139 t
= build_simple_mem_ref_loc (clause_loc
, x
);
4140 SET_DECL_VALUE_EXPR (new_var
, t
);
4141 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4144 case OMP_CLAUSE__CONDTEMP_
:
4145 if (is_parallel_ctx (ctx
)
4146 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
4153 if (task_reduction_p
!= (pass
>= 2))
4156 new_var
= var
= OMP_CLAUSE_DECL (c
);
4157 if ((c_kind
== OMP_CLAUSE_REDUCTION
4158 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4159 && TREE_CODE (var
) == MEM_REF
)
4161 var
= TREE_OPERAND (var
, 0);
4162 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
4163 var
= TREE_OPERAND (var
, 0);
4164 if (TREE_CODE (var
) == INDIRECT_REF
4165 || TREE_CODE (var
) == ADDR_EXPR
)
4166 var
= TREE_OPERAND (var
, 0);
4167 if (is_variable_sized (var
))
4169 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
4170 var
= DECL_VALUE_EXPR (var
);
4171 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
4172 var
= TREE_OPERAND (var
, 0);
4173 gcc_assert (DECL_P (var
));
4177 if (c_kind
!= OMP_CLAUSE_COPYIN
)
4178 new_var
= lookup_decl (var
, ctx
);
4180 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
4185 /* C/C++ array section reductions. */
4186 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4187 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4188 && var
!= OMP_CLAUSE_DECL (c
))
4193 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
4194 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
4196 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
4198 tree b
= TREE_OPERAND (orig_var
, 1);
4199 b
= maybe_lookup_decl (b
, ctx
);
4202 b
= TREE_OPERAND (orig_var
, 1);
4203 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
4205 if (integer_zerop (bias
))
4209 bias
= fold_convert_loc (clause_loc
,
4210 TREE_TYPE (b
), bias
);
4211 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4212 TREE_TYPE (b
), b
, bias
);
4214 orig_var
= TREE_OPERAND (orig_var
, 0);
4218 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4219 if (is_global_var (out
)
4220 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
4221 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
4222 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
4227 bool by_ref
= use_pointer_for_field (var
, NULL
);
4228 x
= build_receiver_ref (var
, by_ref
, ctx
);
4229 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
4230 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
4232 x
= build_fold_addr_expr (x
);
4234 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
4235 x
= build_simple_mem_ref (x
);
4236 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
4238 if (var
== TREE_OPERAND (orig_var
, 0))
4239 x
= build_fold_addr_expr (x
);
4241 bias
= fold_convert (sizetype
, bias
);
4242 x
= fold_convert (ptr_type_node
, x
);
4243 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
4244 TREE_TYPE (x
), x
, bias
);
4245 unsigned cnt
= task_reduction_cnt
- 1;
4246 if (!task_reduction_needs_orig_p
)
4247 cnt
+= (task_reduction_cntorig_full
4248 - task_reduction_cntorig
);
4250 cnt
= task_reduction_cntorig
- 1;
4251 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4252 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4253 gimplify_assign (r
, x
, ilist
);
4257 if (TREE_CODE (orig_var
) == INDIRECT_REF
4258 || TREE_CODE (orig_var
) == ADDR_EXPR
)
4259 orig_var
= TREE_OPERAND (orig_var
, 0);
4260 tree d
= OMP_CLAUSE_DECL (c
);
4261 tree type
= TREE_TYPE (d
);
4262 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
4263 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
4264 const char *name
= get_name (orig_var
);
4267 tree xv
= create_tmp_var (ptr_type_node
);
4268 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4270 unsigned cnt
= task_reduction_cnt
- 1;
4271 if (!task_reduction_needs_orig_p
)
4272 cnt
+= (task_reduction_cntorig_full
4273 - task_reduction_cntorig
);
4275 cnt
= task_reduction_cntorig
- 1;
4276 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4277 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4279 gimple
*g
= gimple_build_assign (xv
, x
);
4280 gimple_seq_add_stmt (ilist
, g
);
4284 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4286 if (ctx
->task_reductions
[1 + idx
])
4287 off
= fold_convert (sizetype
,
4288 ctx
->task_reductions
[1 + idx
]);
4290 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4292 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
4294 gimple_seq_add_stmt (ilist
, g
);
4296 x
= fold_convert (build_pointer_type (boolean_type_node
),
4298 if (TREE_CONSTANT (v
))
4299 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
4300 TYPE_SIZE_UNIT (type
));
4303 tree t
= maybe_lookup_decl (v
, ctx
);
4307 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4308 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
4310 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4312 build_int_cst (TREE_TYPE (v
), 1));
4313 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4315 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4316 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4318 cond
= create_tmp_var (TREE_TYPE (x
));
4319 gimplify_assign (cond
, x
, ilist
);
4322 else if (TREE_CONSTANT (v
))
4324 x
= create_tmp_var_raw (type
, name
);
4325 gimple_add_tmp_var (x
);
4326 TREE_ADDRESSABLE (x
) = 1;
4327 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4332 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4333 tree t
= maybe_lookup_decl (v
, ctx
);
4337 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4338 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
4339 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4341 build_int_cst (TREE_TYPE (v
), 1));
4342 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4344 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4345 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
4346 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
4349 tree ptype
= build_pointer_type (TREE_TYPE (type
));
4350 x
= fold_convert_loc (clause_loc
, ptype
, x
);
4351 tree y
= create_tmp_var (ptype
, name
);
4352 gimplify_assign (y
, x
, ilist
);
4356 if (!integer_zerop (bias
))
4358 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4360 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4362 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
4363 pointer_sized_int_node
, yb
, bias
);
4364 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
4365 yb
= create_tmp_var (ptype
, name
);
4366 gimplify_assign (yb
, x
, ilist
);
4370 d
= TREE_OPERAND (d
, 0);
4371 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
4372 d
= TREE_OPERAND (d
, 0);
4373 if (TREE_CODE (d
) == ADDR_EXPR
)
4375 if (orig_var
!= var
)
4377 gcc_assert (is_variable_sized (orig_var
));
4378 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
4380 gimplify_assign (new_var
, x
, ilist
);
4381 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
4382 tree t
= build_fold_indirect_ref (new_var
);
4383 DECL_IGNORED_P (new_var
) = 0;
4384 TREE_THIS_NOTRAP (t
) = 1;
4385 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
4386 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
4390 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
4391 build_int_cst (ptype
, 0));
4392 SET_DECL_VALUE_EXPR (new_var
, x
);
4393 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4398 gcc_assert (orig_var
== var
);
4399 if (TREE_CODE (d
) == INDIRECT_REF
)
4401 x
= create_tmp_var (ptype
, name
);
4402 TREE_ADDRESSABLE (x
) = 1;
4403 gimplify_assign (x
, yb
, ilist
);
4404 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4406 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4407 gimplify_assign (new_var
, x
, ilist
);
4409 /* GOMP_taskgroup_reduction_register memsets the whole
4410 array to zero. If the initializer is zero, we don't
4411 need to initialize it again, just mark it as ever
4412 used unconditionally, i.e. cond = true. */
4414 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
4415 && initializer_zerop (omp_reduction_init (c
,
4418 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
4420 gimple_seq_add_stmt (ilist
, g
);
4423 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4427 if (!is_parallel_ctx (ctx
))
4429 tree condv
= create_tmp_var (boolean_type_node
);
4430 g
= gimple_build_assign (condv
,
4431 build_simple_mem_ref (cond
));
4432 gimple_seq_add_stmt (ilist
, g
);
4433 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
4434 g
= gimple_build_cond (NE_EXPR
, condv
,
4435 boolean_false_node
, end
, lab1
);
4436 gimple_seq_add_stmt (ilist
, g
);
4437 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
4439 g
= gimple_build_assign (build_simple_mem_ref (cond
),
4441 gimple_seq_add_stmt (ilist
, g
);
4444 tree y1
= create_tmp_var (ptype
);
4445 gimplify_assign (y1
, y
, ilist
);
4446 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
4447 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
4448 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
4449 if (task_reduction_needs_orig_p
)
4451 y3
= create_tmp_var (ptype
);
4453 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4454 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4455 size_int (task_reduction_cnt_full
4456 + task_reduction_cntorig
- 1),
4457 NULL_TREE
, NULL_TREE
);
4460 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4461 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
4464 gimplify_assign (y3
, ref
, ilist
);
4466 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
4470 y2
= create_tmp_var (ptype
);
4471 gimplify_assign (y2
, y
, ilist
);
4473 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4475 tree ref
= build_outer_var_ref (var
, ctx
);
4476 /* For ref build_outer_var_ref already performs this. */
4477 if (TREE_CODE (d
) == INDIRECT_REF
)
4478 gcc_assert (omp_is_reference (var
));
4479 else if (TREE_CODE (d
) == ADDR_EXPR
)
4480 ref
= build_fold_addr_expr (ref
);
4481 else if (omp_is_reference (var
))
4482 ref
= build_fold_addr_expr (ref
);
4483 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
4484 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
4485 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4487 y3
= create_tmp_var (ptype
);
4488 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
4492 y4
= create_tmp_var (ptype
);
4493 gimplify_assign (y4
, ref
, dlist
);
4497 tree i
= create_tmp_var (TREE_TYPE (v
));
4498 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
4499 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4500 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
4503 i2
= create_tmp_var (TREE_TYPE (v
));
4504 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
4505 body2
= create_artificial_label (UNKNOWN_LOCATION
);
4506 end2
= create_artificial_label (UNKNOWN_LOCATION
);
4507 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
4509 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4511 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4512 tree decl_placeholder
4513 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
4514 SET_DECL_VALUE_EXPR (decl_placeholder
,
4515 build_simple_mem_ref (y1
));
4516 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
4517 SET_DECL_VALUE_EXPR (placeholder
,
4518 y3
? build_simple_mem_ref (y3
)
4520 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4521 x
= lang_hooks
.decls
.omp_clause_default_ctor
4522 (c
, build_simple_mem_ref (y1
),
4523 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
4525 gimplify_and_add (x
, ilist
);
4526 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4528 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4529 lower_omp (&tseq
, ctx
);
4530 gimple_seq_add_seq (ilist
, tseq
);
4532 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4535 SET_DECL_VALUE_EXPR (decl_placeholder
,
4536 build_simple_mem_ref (y2
));
4537 SET_DECL_VALUE_EXPR (placeholder
,
4538 build_simple_mem_ref (y4
));
4539 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4540 lower_omp (&tseq
, ctx
);
4541 gimple_seq_add_seq (dlist
, tseq
);
4542 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4544 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4545 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
4548 x
= lang_hooks
.decls
.omp_clause_dtor
4549 (c
, build_simple_mem_ref (y2
));
4551 gimplify_and_add (x
, dlist
);
4556 x
= omp_reduction_init (c
, TREE_TYPE (type
));
4557 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4559 /* reduction(-:var) sums up the partial results, so it
4560 acts identically to reduction(+:var). */
4561 if (code
== MINUS_EXPR
)
4564 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
4567 x
= build2 (code
, TREE_TYPE (type
),
4568 build_simple_mem_ref (y4
),
4569 build_simple_mem_ref (y2
));
4570 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
4574 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
4575 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4576 gimple_seq_add_stmt (ilist
, g
);
4579 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
4580 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4581 gimple_seq_add_stmt (ilist
, g
);
4583 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
4584 build_int_cst (TREE_TYPE (i
), 1));
4585 gimple_seq_add_stmt (ilist
, g
);
4586 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
4587 gimple_seq_add_stmt (ilist
, g
);
4588 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
4591 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
4592 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4593 gimple_seq_add_stmt (dlist
, g
);
4596 g
= gimple_build_assign
4597 (y4
, POINTER_PLUS_EXPR
, y4
,
4598 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4599 gimple_seq_add_stmt (dlist
, g
);
4601 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
4602 build_int_cst (TREE_TYPE (i2
), 1));
4603 gimple_seq_add_stmt (dlist
, g
);
4604 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
4605 gimple_seq_add_stmt (dlist
, g
);
4606 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
4612 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4616 bool by_ref
= use_pointer_for_field (var
, ctx
);
4617 x
= build_receiver_ref (var
, by_ref
, ctx
);
4619 if (!omp_is_reference (var
))
4620 x
= build_fold_addr_expr (x
);
4621 x
= fold_convert (ptr_type_node
, x
);
4622 unsigned cnt
= task_reduction_cnt
- 1;
4623 if (!task_reduction_needs_orig_p
)
4624 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
4626 cnt
= task_reduction_cntorig
- 1;
4627 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4628 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4629 gimplify_assign (r
, x
, ilist
);
4634 tree type
= TREE_TYPE (new_var
);
4635 if (!omp_is_reference (var
))
4636 type
= build_pointer_type (type
);
4637 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4639 unsigned cnt
= task_reduction_cnt
- 1;
4640 if (!task_reduction_needs_orig_p
)
4641 cnt
+= (task_reduction_cntorig_full
4642 - task_reduction_cntorig
);
4644 cnt
= task_reduction_cntorig
- 1;
4645 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4646 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4650 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4652 if (ctx
->task_reductions
[1 + idx
])
4653 off
= fold_convert (sizetype
,
4654 ctx
->task_reductions
[1 + idx
]);
4656 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4658 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
4661 x
= fold_convert (type
, x
);
4663 if (omp_is_reference (var
))
4665 gimplify_assign (new_var
, x
, ilist
);
4667 new_var
= build_simple_mem_ref (new_var
);
4671 t
= create_tmp_var (type
);
4672 gimplify_assign (t
, x
, ilist
);
4673 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
4674 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4676 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
4677 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
4678 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4679 cond
= create_tmp_var (TREE_TYPE (t
));
4680 gimplify_assign (cond
, t
, ilist
);
4682 else if (is_variable_sized (var
))
4684 /* For variable sized types, we need to allocate the
4685 actual storage here. Call alloca and store the
4686 result in the pointer decl that we created elsewhere. */
4690 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
4695 ptr
= DECL_VALUE_EXPR (new_var
);
4696 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
4697 ptr
= TREE_OPERAND (ptr
, 0);
4698 gcc_assert (DECL_P (ptr
));
4699 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
4701 /* void *tmp = __builtin_alloca */
4702 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4703 stmt
= gimple_build_call (atmp
, 2, x
,
4704 size_int (DECL_ALIGN (var
)));
4705 tmp
= create_tmp_var_raw (ptr_type_node
);
4706 gimple_add_tmp_var (tmp
);
4707 gimple_call_set_lhs (stmt
, tmp
);
4709 gimple_seq_add_stmt (ilist
, stmt
);
4711 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
4712 gimplify_assign (ptr
, x
, ilist
);
4715 else if (omp_is_reference (var
)
4716 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
4717 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
4719 /* For references that are being privatized for Fortran,
4720 allocate new backing storage for the new pointer
4721 variable. This allows us to avoid changing all the
4722 code that expects a pointer to something that expects
4723 a direct variable. */
4727 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
4728 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
4730 x
= build_receiver_ref (var
, false, ctx
);
4731 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4733 else if (TREE_CONSTANT (x
))
4735 /* For reduction in SIMD loop, defer adding the
4736 initialization of the reference, because if we decide
4737 to use SIMD array for it, the initilization could cause
4738 expansion ICE. Ditto for other privatization clauses. */
4743 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
4745 gimple_add_tmp_var (x
);
4746 TREE_ADDRESSABLE (x
) = 1;
4747 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4753 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4754 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
4755 tree al
= size_int (TYPE_ALIGN (rtype
));
4756 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
4761 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4762 gimplify_assign (new_var
, x
, ilist
);
4765 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4767 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4768 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4769 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4777 switch (OMP_CLAUSE_CODE (c
))
4779 case OMP_CLAUSE_SHARED
:
4780 /* Ignore shared directives in teams construct inside
4781 target construct. */
4782 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4783 && !is_host_teams_ctx (ctx
))
4785 /* Shared global vars are just accessed directly. */
4786 if (is_global_var (new_var
))
4788 /* For taskloop firstprivate/lastprivate, represented
4789 as firstprivate and shared clause on the task, new_var
4790 is the firstprivate var. */
4791 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
4793 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4794 needs to be delayed until after fixup_child_record_type so
4795 that we get the correct type during the dereference. */
4796 by_ref
= use_pointer_for_field (var
, ctx
);
4797 x
= build_receiver_ref (var
, by_ref
, ctx
);
4798 SET_DECL_VALUE_EXPR (new_var
, x
);
4799 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4801 /* ??? If VAR is not passed by reference, and the variable
4802 hasn't been initialized yet, then we'll get a warning for
4803 the store into the omp_data_s structure. Ideally, we'd be
4804 able to notice this and not store anything at all, but
4805 we're generating code too early. Suppress the warning. */
4807 TREE_NO_WARNING (var
) = 1;
4810 case OMP_CLAUSE__CONDTEMP_
:
4811 if (is_parallel_ctx (ctx
))
4813 x
= build_receiver_ref (var
, false, ctx
);
4814 SET_DECL_VALUE_EXPR (new_var
, x
);
4815 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4817 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
4819 x
= build_zero_cst (TREE_TYPE (var
));
4824 case OMP_CLAUSE_LASTPRIVATE
:
4825 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4829 case OMP_CLAUSE_PRIVATE
:
4830 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
4831 x
= build_outer_var_ref (var
, ctx
);
4832 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
4834 if (is_task_ctx (ctx
))
4835 x
= build_receiver_ref (var
, false, ctx
);
4837 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
4843 nx
= lang_hooks
.decls
.omp_clause_default_ctor
4844 (c
, unshare_expr (new_var
), x
);
4847 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4848 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
4849 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4850 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
4851 || omp_is_reference (var
))
4852 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4855 if (omp_is_reference (var
))
4857 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4858 tree new_vard
= TREE_OPERAND (new_var
, 0);
4859 gcc_assert (DECL_P (new_vard
));
4860 SET_DECL_VALUE_EXPR (new_vard
,
4861 build_fold_addr_expr (lvar
));
4862 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4866 x
= lang_hooks
.decls
.omp_clause_default_ctor
4867 (c
, unshare_expr (ivar
), x
);
4868 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
4870 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
4871 unshare_expr (ivar
), x
);
4875 gimplify_and_add (x
, &llist
[0]);
4876 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4877 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
4882 gcc_assert (TREE_CODE (v
) == MEM_REF
);
4883 v
= TREE_OPERAND (v
, 0);
4884 gcc_assert (DECL_P (v
));
4886 v
= *ctx
->lastprivate_conditional_map
->get (v
);
4887 tree t
= create_tmp_var (TREE_TYPE (v
));
4888 tree z
= build_zero_cst (TREE_TYPE (v
));
4890 = build_outer_var_ref (var
, ctx
,
4891 OMP_CLAUSE_LASTPRIVATE
);
4892 gimple_seq_add_stmt (dlist
,
4893 gimple_build_assign (t
, z
));
4894 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
4895 tree civar
= DECL_VALUE_EXPR (v
);
4896 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
4897 civar
= unshare_expr (civar
);
4898 TREE_OPERAND (civar
, 1) = sctx
.idx
;
4899 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
4900 unshare_expr (civar
));
4901 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
4902 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
4903 orig_v
, unshare_expr (ivar
)));
4904 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
4906 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
4908 gimple_seq tseq
= NULL
;
4909 gimplify_and_add (x
, &tseq
);
4911 lower_omp (&tseq
, ctx
->outer
);
4912 gimple_seq_add_seq (&llist
[1], tseq
);
4916 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
4918 gimplify_and_add (y
, &llist
[1]);
4922 if (omp_is_reference (var
))
4924 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4925 tree new_vard
= TREE_OPERAND (new_var
, 0);
4926 gcc_assert (DECL_P (new_vard
));
4927 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
4928 x
= TYPE_SIZE_UNIT (type
);
4929 if (TREE_CONSTANT (x
))
4931 x
= create_tmp_var_raw (type
, get_name (var
));
4932 gimple_add_tmp_var (x
);
4933 TREE_ADDRESSABLE (x
) = 1;
4934 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4935 x
= fold_convert_loc (clause_loc
,
4936 TREE_TYPE (new_vard
), x
);
4937 gimplify_assign (new_vard
, x
, ilist
);
4942 gimplify_and_add (nx
, ilist
);
4946 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4948 gimplify_and_add (x
, dlist
);
4951 case OMP_CLAUSE_LINEAR
:
4952 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
4953 goto do_firstprivate
;
4954 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4957 x
= build_outer_var_ref (var
, ctx
);
4960 case OMP_CLAUSE_FIRSTPRIVATE
:
4961 if (is_task_ctx (ctx
))
4963 if ((omp_is_reference (var
)
4964 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
4965 || is_variable_sized (var
))
4967 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
4969 || use_pointer_for_field (var
, NULL
))
4971 x
= build_receiver_ref (var
, false, ctx
);
4972 SET_DECL_VALUE_EXPR (new_var
, x
);
4973 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4977 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
4978 && omp_is_reference (var
))
4980 x
= build_outer_var_ref (var
, ctx
);
4981 gcc_assert (TREE_CODE (x
) == MEM_REF
4982 && integer_zerop (TREE_OPERAND (x
, 1)));
4983 x
= TREE_OPERAND (x
, 0);
4984 x
= lang_hooks
.decls
.omp_clause_copy_ctor
4985 (c
, unshare_expr (new_var
), x
);
4986 gimplify_and_add (x
, ilist
);
4990 x
= build_outer_var_ref (var
, ctx
);
4993 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4994 && gimple_omp_for_combined_into_p (ctx
->stmt
))
4996 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
4997 tree stept
= TREE_TYPE (t
);
4998 tree ct
= omp_find_clause (clauses
,
4999 OMP_CLAUSE__LOOPTEMP_
);
5001 tree l
= OMP_CLAUSE_DECL (ct
);
5002 tree n1
= fd
->loop
.n1
;
5003 tree step
= fd
->loop
.step
;
5004 tree itype
= TREE_TYPE (l
);
5005 if (POINTER_TYPE_P (itype
))
5006 itype
= signed_type_for (itype
);
5007 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
5008 if (TYPE_UNSIGNED (itype
)
5009 && fd
->loop
.cond_code
== GT_EXPR
)
5010 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
5011 fold_build1 (NEGATE_EXPR
, itype
, l
),
5012 fold_build1 (NEGATE_EXPR
,
5015 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
5016 t
= fold_build2 (MULT_EXPR
, stept
,
5017 fold_convert (stept
, l
), t
);
5019 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
5021 if (omp_is_reference (var
))
5023 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5024 tree new_vard
= TREE_OPERAND (new_var
, 0);
5025 gcc_assert (DECL_P (new_vard
));
5026 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5027 nx
= TYPE_SIZE_UNIT (type
);
5028 if (TREE_CONSTANT (nx
))
5030 nx
= create_tmp_var_raw (type
,
5032 gimple_add_tmp_var (nx
);
5033 TREE_ADDRESSABLE (nx
) = 1;
5034 nx
= build_fold_addr_expr_loc (clause_loc
,
5036 nx
= fold_convert_loc (clause_loc
,
5037 TREE_TYPE (new_vard
),
5039 gimplify_assign (new_vard
, nx
, ilist
);
5043 x
= lang_hooks
.decls
.omp_clause_linear_ctor
5045 gimplify_and_add (x
, ilist
);
5049 if (POINTER_TYPE_P (TREE_TYPE (x
)))
5050 x
= fold_build2 (POINTER_PLUS_EXPR
,
5051 TREE_TYPE (x
), x
, t
);
5053 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5056 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
5057 || TREE_ADDRESSABLE (new_var
)
5058 || omp_is_reference (var
))
5059 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5062 if (omp_is_reference (var
))
5064 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5065 tree new_vard
= TREE_OPERAND (new_var
, 0);
5066 gcc_assert (DECL_P (new_vard
));
5067 SET_DECL_VALUE_EXPR (new_vard
,
5068 build_fold_addr_expr (lvar
));
5069 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5071 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
5073 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
5074 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
5075 gimplify_and_add (x
, ilist
);
5076 gimple_stmt_iterator gsi
5077 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5079 = gimple_build_assign (unshare_expr (lvar
), iv
);
5080 gsi_insert_before_without_update (&gsi
, g
,
5082 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5083 enum tree_code code
= PLUS_EXPR
;
5084 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
5085 code
= POINTER_PLUS_EXPR
;
5086 g
= gimple_build_assign (iv
, code
, iv
, t
);
5087 gsi_insert_before_without_update (&gsi
, g
,
5091 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5092 (c
, unshare_expr (ivar
), x
);
5093 gimplify_and_add (x
, &llist
[0]);
5094 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5096 gimplify_and_add (x
, &llist
[1]);
5099 if (omp_is_reference (var
))
5101 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5102 tree new_vard
= TREE_OPERAND (new_var
, 0);
5103 gcc_assert (DECL_P (new_vard
));
5104 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5105 nx
= TYPE_SIZE_UNIT (type
);
5106 if (TREE_CONSTANT (nx
))
5108 nx
= create_tmp_var_raw (type
, get_name (var
));
5109 gimple_add_tmp_var (nx
);
5110 TREE_ADDRESSABLE (nx
) = 1;
5111 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
5112 nx
= fold_convert_loc (clause_loc
,
5113 TREE_TYPE (new_vard
), nx
);
5114 gimplify_assign (new_vard
, nx
, ilist
);
5118 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5119 (c
, unshare_expr (new_var
), x
);
5120 gimplify_and_add (x
, ilist
);
5123 case OMP_CLAUSE__LOOPTEMP_
:
5124 case OMP_CLAUSE__REDUCTEMP_
:
5125 gcc_assert (is_taskreg_ctx (ctx
));
5126 x
= build_outer_var_ref (var
, ctx
);
5127 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5128 gimplify_and_add (x
, ilist
);
5131 case OMP_CLAUSE_COPYIN
:
5132 by_ref
= use_pointer_for_field (var
, NULL
);
5133 x
= build_receiver_ref (var
, by_ref
, ctx
);
5134 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
5135 append_to_statement_list (x
, ©in_seq
);
5136 copyin_by_ref
|= by_ref
;
5139 case OMP_CLAUSE_REDUCTION
:
5140 case OMP_CLAUSE_IN_REDUCTION
:
5141 /* OpenACC reductions are initialized using the
5142 GOACC_REDUCTION internal function. */
5143 if (is_gimple_omp_oacc (ctx
->stmt
))
5145 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5147 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5149 tree ptype
= TREE_TYPE (placeholder
);
5152 x
= error_mark_node
;
5153 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
5154 && !task_reduction_needs_orig_p
)
5156 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5158 tree pptype
= build_pointer_type (ptype
);
5159 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5160 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5161 size_int (task_reduction_cnt_full
5162 + task_reduction_cntorig
- 1),
5163 NULL_TREE
, NULL_TREE
);
5167 = *ctx
->task_reduction_map
->get (c
);
5168 x
= task_reduction_read (ilist
, tskred_temp
,
5169 pptype
, 7 + 3 * idx
);
5171 x
= fold_convert (pptype
, x
);
5172 x
= build_simple_mem_ref (x
);
5177 x
= build_outer_var_ref (var
, ctx
);
5179 if (omp_is_reference (var
)
5180 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
5181 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5183 SET_DECL_VALUE_EXPR (placeholder
, x
);
5184 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5185 tree new_vard
= new_var
;
5186 if (omp_is_reference (var
))
5188 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5189 new_vard
= TREE_OPERAND (new_var
, 0);
5190 gcc_assert (DECL_P (new_vard
));
5192 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5194 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5195 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5198 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5202 if (new_vard
== new_var
)
5204 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
5205 SET_DECL_VALUE_EXPR (new_var
, ivar
);
5209 SET_DECL_VALUE_EXPR (new_vard
,
5210 build_fold_addr_expr (ivar
));
5211 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5213 x
= lang_hooks
.decls
.omp_clause_default_ctor
5214 (c
, unshare_expr (ivar
),
5215 build_outer_var_ref (var
, ctx
));
5220 gimplify_and_add (x
, &llist
[0]);
5222 tree ivar2
= unshare_expr (lvar
);
5223 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5224 x
= lang_hooks
.decls
.omp_clause_default_ctor
5225 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
5226 gimplify_and_add (x
, &llist
[0]);
5230 x
= lang_hooks
.decls
.omp_clause_default_ctor
5231 (c
, unshare_expr (rvar2
),
5232 build_outer_var_ref (var
, ctx
));
5233 gimplify_and_add (x
, &llist
[0]);
5236 /* For types that need construction, add another
5237 private var which will be default constructed
5238 and optionally initialized with
5239 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5240 loop we want to assign this value instead of
5241 constructing and destructing it in each
5243 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
5244 gimple_add_tmp_var (nv
);
5245 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
5249 x
= lang_hooks
.decls
.omp_clause_default_ctor
5250 (c
, nv
, build_outer_var_ref (var
, ctx
));
5251 gimplify_and_add (x
, ilist
);
5253 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5255 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5256 x
= DECL_VALUE_EXPR (new_vard
);
5258 if (new_vard
!= new_var
)
5259 vexpr
= build_fold_addr_expr (nv
);
5260 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5261 lower_omp (&tseq
, ctx
);
5262 SET_DECL_VALUE_EXPR (new_vard
, x
);
5263 gimple_seq_add_seq (ilist
, tseq
);
5264 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5267 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5269 gimplify_and_add (x
, dlist
);
5272 tree ref
= build_outer_var_ref (var
, ctx
);
5273 x
= unshare_expr (ivar
);
5274 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5276 gimplify_and_add (x
, &llist
[0]);
5278 ref
= build_outer_var_ref (var
, ctx
);
5279 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
5281 gimplify_and_add (x
, &llist
[3]);
5283 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5284 if (new_vard
== new_var
)
5285 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5287 SET_DECL_VALUE_EXPR (new_vard
,
5288 build_fold_addr_expr (lvar
));
5290 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5292 gimplify_and_add (x
, &llist
[1]);
5294 tree ivar2
= unshare_expr (lvar
);
5295 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5296 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
5298 gimplify_and_add (x
, &llist
[1]);
5302 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
5304 gimplify_and_add (x
, &llist
[1]);
5309 gimplify_and_add (x
, &llist
[0]);
5310 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5312 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5313 lower_omp (&tseq
, ctx
);
5314 gimple_seq_add_seq (&llist
[0], tseq
);
5316 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5317 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5318 lower_omp (&tseq
, ctx
);
5319 gimple_seq_add_seq (&llist
[1], tseq
);
5320 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5321 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5322 if (new_vard
== new_var
)
5323 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5325 SET_DECL_VALUE_EXPR (new_vard
,
5326 build_fold_addr_expr (lvar
));
5327 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5329 gimplify_and_add (x
, &llist
[1]);
5332 /* If this is a reference to constant size reduction var
5333 with placeholder, we haven't emitted the initializer
5334 for it because it is undesirable if SIMD arrays are used.
5335 But if they aren't used, we need to emit the deferred
5336 initialization now. */
5337 else if (omp_is_reference (var
) && is_simd
)
5338 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5340 tree lab2
= NULL_TREE
;
5344 if (!is_parallel_ctx (ctx
))
5346 tree condv
= create_tmp_var (boolean_type_node
);
5347 tree m
= build_simple_mem_ref (cond
);
5348 g
= gimple_build_assign (condv
, m
);
5349 gimple_seq_add_stmt (ilist
, g
);
5351 = create_artificial_label (UNKNOWN_LOCATION
);
5352 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5353 g
= gimple_build_cond (NE_EXPR
, condv
,
5356 gimple_seq_add_stmt (ilist
, g
);
5357 gimple_seq_add_stmt (ilist
,
5358 gimple_build_label (lab1
));
5360 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5362 gimple_seq_add_stmt (ilist
, g
);
5364 x
= lang_hooks
.decls
.omp_clause_default_ctor
5365 (c
, unshare_expr (new_var
),
5367 : build_outer_var_ref (var
, ctx
));
5369 gimplify_and_add (x
, ilist
);
5371 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5372 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5375 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
5377 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
5378 gimple_add_tmp_var (nv
);
5379 ctx
->cb
.decl_map
->put (new_vard
, nv
);
5380 x
= lang_hooks
.decls
.omp_clause_default_ctor
5381 (c
, nv
, build_outer_var_ref (var
, ctx
));
5383 gimplify_and_add (x
, ilist
);
5384 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5386 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5388 if (new_vard
!= new_var
)
5389 vexpr
= build_fold_addr_expr (nv
);
5390 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5391 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5392 lower_omp (&tseq
, ctx
);
5393 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
5394 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
5395 gimple_seq_add_seq (ilist
, tseq
);
5397 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5398 if (is_simd
&& ctx
->scan_exclusive
)
5401 = create_tmp_var_raw (TREE_TYPE (new_var
));
5402 gimple_add_tmp_var (nv2
);
5403 ctx
->cb
.decl_map
->put (nv
, nv2
);
5404 x
= lang_hooks
.decls
.omp_clause_default_ctor
5405 (c
, nv2
, build_outer_var_ref (var
, ctx
));
5406 gimplify_and_add (x
, ilist
);
5407 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5409 gimplify_and_add (x
, dlist
);
5411 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5413 gimplify_and_add (x
, dlist
);
5416 && ctx
->scan_exclusive
5417 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
5419 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
5420 gimple_add_tmp_var (nv2
);
5421 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
5422 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5424 gimplify_and_add (x
, dlist
);
5426 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5430 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5432 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5433 lower_omp (&tseq
, ctx
);
5434 gimple_seq_add_seq (ilist
, tseq
);
5436 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5439 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5440 lower_omp (&tseq
, ctx
);
5441 gimple_seq_add_seq (dlist
, tseq
);
5442 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5444 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5448 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5455 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
5456 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
5457 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5462 tree lab2
= NULL_TREE
;
5463 /* GOMP_taskgroup_reduction_register memsets the whole
5464 array to zero. If the initializer is zero, we don't
5465 need to initialize it again, just mark it as ever
5466 used unconditionally, i.e. cond = true. */
5467 if (initializer_zerop (x
))
5469 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5471 gimple_seq_add_stmt (ilist
, g
);
5476 if (!cond) { cond = true; new_var = x; } */
5477 if (!is_parallel_ctx (ctx
))
5479 tree condv
= create_tmp_var (boolean_type_node
);
5480 tree m
= build_simple_mem_ref (cond
);
5481 g
= gimple_build_assign (condv
, m
);
5482 gimple_seq_add_stmt (ilist
, g
);
5484 = create_artificial_label (UNKNOWN_LOCATION
);
5485 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5486 g
= gimple_build_cond (NE_EXPR
, condv
,
5489 gimple_seq_add_stmt (ilist
, g
);
5490 gimple_seq_add_stmt (ilist
,
5491 gimple_build_label (lab1
));
5493 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5495 gimple_seq_add_stmt (ilist
, g
);
5496 gimplify_assign (new_var
, x
, ilist
);
5498 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5502 /* reduction(-:var) sums up the partial results, so it
5503 acts identically to reduction(+:var). */
5504 if (code
== MINUS_EXPR
)
5507 tree new_vard
= new_var
;
5508 if (is_simd
&& omp_is_reference (var
))
5510 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5511 new_vard
= TREE_OPERAND (new_var
, 0);
5512 gcc_assert (DECL_P (new_vard
));
5514 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5516 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5517 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5520 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5524 if (new_vard
!= new_var
)
5526 SET_DECL_VALUE_EXPR (new_vard
,
5527 build_fold_addr_expr (lvar
));
5528 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5531 tree ref
= build_outer_var_ref (var
, ctx
);
5535 gimplify_assign (ivar
, ref
, &llist
[0]);
5536 ref
= build_outer_var_ref (var
, ctx
);
5537 gimplify_assign (ref
, rvar
, &llist
[3]);
5541 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
5546 simt_lane
= create_tmp_var (unsigned_type_node
);
5547 x
= build_call_expr_internal_loc
5548 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
5549 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
5550 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
5551 gimplify_assign (ivar
, x
, &llist
[2]);
5553 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
5554 ref
= build_outer_var_ref (var
, ctx
);
5555 gimplify_assign (ref
, x
, &llist
[1]);
5560 if (omp_is_reference (var
) && is_simd
)
5561 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5562 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5563 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5565 gimplify_assign (new_var
, x
, ilist
);
5568 tree ref
= build_outer_var_ref (var
, ctx
);
5570 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
5571 ref
= build_outer_var_ref (var
, ctx
);
5572 gimplify_assign (ref
, x
, dlist
);
5585 tree clobber
= build_constructor (TREE_TYPE (tskred_avar
), NULL
);
5586 TREE_THIS_VOLATILE (clobber
) = 1;
5587 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
5590 if (known_eq (sctx
.max_vf
, 1U))
5592 sctx
.is_simt
= false;
5593 if (ctx
->lastprivate_conditional_map
)
5595 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
5597 /* Signal to lower_omp_1 that it should use parent context. */
5598 ctx
->combined_into_simd_safelen0
= true;
5599 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5600 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5601 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5603 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
5605 = ctx
->lastprivate_conditional_map
->get (o
);
5606 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
->outer
);
5608 = ctx
->outer
->lastprivate_conditional_map
->get (po
);
5614 /* When not vectorized, treat lastprivate(conditional:) like
5615 normal lastprivate, as there will be just one simd lane
5616 writing the privatized variable. */
5617 delete ctx
->lastprivate_conditional_map
;
5618 ctx
->lastprivate_conditional_map
= NULL
;
5623 if (nonconst_simd_if
)
5625 if (sctx
.lane
== NULL_TREE
)
5627 sctx
.idx
= create_tmp_var (unsigned_type_node
);
5628 sctx
.lane
= create_tmp_var (unsigned_type_node
);
5630 /* FIXME: For now. */
5631 sctx
.is_simt
= false;
5634 if (sctx
.lane
|| sctx
.is_simt
)
5636 uid
= create_tmp_var (ptr_type_node
, "simduid");
5637 /* Don't want uninit warnings on simduid, it is always uninitialized,
5638 but we use it not for the value, but for the DECL_UID only. */
5639 TREE_NO_WARNING (uid
) = 1;
5640 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
5641 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
5642 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
5643 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
5645 /* Emit calls denoting privatized variables and initializing a pointer to
5646 structure that holds private variables as fields after ompdevlow pass. */
5649 sctx
.simt_eargs
[0] = uid
;
5651 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
5652 gimple_call_set_lhs (g
, uid
);
5653 gimple_seq_add_stmt (ilist
, g
);
5654 sctx
.simt_eargs
.release ();
5656 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
5657 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
5658 gimple_call_set_lhs (g
, simtrec
);
5659 gimple_seq_add_stmt (ilist
, g
);
5663 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
5664 2 + (nonconst_simd_if
!= NULL
),
5665 uid
, integer_zero_node
,
5667 gimple_call_set_lhs (g
, sctx
.lane
);
5668 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5669 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
5670 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
5671 build_int_cst (unsigned_type_node
, 0));
5672 gimple_seq_add_stmt (ilist
, g
);
5675 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
5677 gimple_call_set_lhs (g
, sctx
.lastlane
);
5678 gimple_seq_add_stmt (dlist
, g
);
5679 gimple_seq_add_seq (dlist
, llist
[3]);
5681 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5684 tree simt_vf
= create_tmp_var (unsigned_type_node
);
5685 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
5686 gimple_call_set_lhs (g
, simt_vf
);
5687 gimple_seq_add_stmt (dlist
, g
);
5689 tree t
= build_int_cst (unsigned_type_node
, 1);
5690 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
5691 gimple_seq_add_stmt (dlist
, g
);
5693 t
= build_int_cst (unsigned_type_node
, 0);
5694 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
5695 gimple_seq_add_stmt (dlist
, g
);
5697 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5698 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
5699 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5700 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
5701 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
5703 gimple_seq_add_seq (dlist
, llist
[2]);
5705 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
5706 gimple_seq_add_stmt (dlist
, g
);
5708 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
5709 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
5710 gimple_seq_add_stmt (dlist
, g
);
5712 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
5714 for (int i
= 0; i
< 2; i
++)
5717 tree vf
= create_tmp_var (unsigned_type_node
);
5718 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
5719 gimple_call_set_lhs (g
, vf
);
5720 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
5721 gimple_seq_add_stmt (seq
, g
);
5722 tree t
= build_int_cst (unsigned_type_node
, 0);
5723 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
5724 gimple_seq_add_stmt (seq
, g
);
5725 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5726 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
5727 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5728 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
5729 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
5730 gimple_seq_add_seq (seq
, llist
[i
]);
5731 t
= build_int_cst (unsigned_type_node
, 1);
5732 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
5733 gimple_seq_add_stmt (seq
, g
);
5734 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
5735 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
5736 gimple_seq_add_stmt (seq
, g
);
5737 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
5742 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
5744 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
5745 gimple_seq_add_stmt (dlist
, g
);
5748 /* The copyin sequence is not to be executed by the main thread, since
5749 that would result in self-copies. Perhaps not visible to scalars,
5750 but it certainly is to C++ operator=. */
5753 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
5755 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
5756 build_int_cst (TREE_TYPE (x
), 0));
5757 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
5758 gimplify_and_add (x
, ilist
);
5761 /* If any copyin variable is passed by reference, we must ensure the
5762 master thread doesn't modify it before it is copied over in all
5763 threads. Similarly for variables in both firstprivate and
5764 lastprivate clauses we need to ensure the lastprivate copying
5765 happens after firstprivate copying in all threads. And similarly
5766 for UDRs if initializer expression refers to omp_orig. */
5767 if (copyin_by_ref
|| lastprivate_firstprivate
5768 || (reduction_omp_orig_ref
5769 && !ctx
->scan_inclusive
5770 && !ctx
->scan_exclusive
))
5772 /* Don't add any barrier for #pragma omp simd or
5773 #pragma omp distribute. */
5774 if (!is_task_ctx (ctx
)
5775 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
5776 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
5777 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
5780 /* If max_vf is non-zero, then we can use only a vectorization factor
5781 up to the max_vf we chose. So stick it into the safelen clause. */
5782 if (maybe_ne (sctx
.max_vf
, 0U))
5784 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
5785 OMP_CLAUSE_SAFELEN
);
5786 poly_uint64 safe_len
;
5788 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
5789 && maybe_gt (safe_len
, sctx
.max_vf
)))
5791 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
5792 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
5794 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
5795 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
5800 /* Create temporary variables for lastprivate(conditional:) implementation
5801 in context CTX with CLAUSES. */
5804 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
5806 tree iter_type
= NULL_TREE
;
5807 tree cond_ptr
= NULL_TREE
;
5808 tree iter_var
= NULL_TREE
;
5809 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5810 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
);
5811 tree next
= *clauses
;
5812 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5813 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5814 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5818 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
5820 if (iter_type
== NULL_TREE
)
5822 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
5823 iter_var
= create_tmp_var_raw (iter_type
);
5824 DECL_CONTEXT (iter_var
) = current_function_decl
;
5825 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
5826 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
5827 ctx
->block_vars
= iter_var
;
5829 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
5830 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
5831 OMP_CLAUSE_DECL (c3
) = iter_var
;
5832 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
5834 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
5836 next
= OMP_CLAUSE_CHAIN (cc
);
5837 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
5838 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
5839 ctx
->lastprivate_conditional_map
->put (o
, v
);
5842 if (iter_type
== NULL
)
5844 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
5846 struct omp_for_data fd
;
5847 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
5849 iter_type
= unsigned_type_for (fd
.iter_type
);
5851 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
5852 iter_type
= unsigned_type_node
;
5853 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
5857 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
5858 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
5862 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
5863 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
5864 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
5865 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
5866 ctx
->block_vars
= cond_ptr
;
5867 c2
= build_omp_clause (UNKNOWN_LOCATION
,
5868 OMP_CLAUSE__CONDTEMP_
);
5869 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
5870 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
5873 iter_var
= create_tmp_var_raw (iter_type
);
5874 DECL_CONTEXT (iter_var
) = current_function_decl
;
5875 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
5876 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
5877 ctx
->block_vars
= iter_var
;
5879 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
5880 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
5881 OMP_CLAUSE_DECL (c3
) = iter_var
;
5882 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
5883 OMP_CLAUSE_CHAIN (c2
) = c3
;
5884 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
5886 tree v
= create_tmp_var_raw (iter_type
);
5887 DECL_CONTEXT (v
) = current_function_decl
;
5888 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
5889 DECL_CHAIN (v
) = ctx
->block_vars
;
5890 ctx
->block_vars
= v
;
5891 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
5892 ctx
->lastprivate_conditional_map
->put (o
, v
);
5897 /* Generate code to implement the LASTPRIVATE clauses. This is used for
5898 both parallel and workshare constructs. PREDICATE may be NULL if it's
5899 always true. BODY_P is the sequence to insert early initialization
5900 if needed, STMT_LIST is where the non-conditional lastprivate handling
5901 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
5905 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
5906 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
5909 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
5910 bool par_clauses
= false;
5911 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
5912 unsigned HOST_WIDE_INT conditional_off
= 0;
5914 /* Early exit if there are no lastprivate or linear clauses. */
5915 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
5916 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
5917 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
5918 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
5920 if (clauses
== NULL
)
5922 /* If this was a workshare clause, see if it had been combined
5923 with its parallel. In that case, look for the clauses on the
5924 parallel statement itself. */
5925 if (is_parallel_ctx (ctx
))
5929 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
5932 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
5933 OMP_CLAUSE_LASTPRIVATE
);
5934 if (clauses
== NULL
)
5939 bool maybe_simt
= false;
5940 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5941 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
5943 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
5944 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
5946 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
5952 tree label_true
, arm1
, arm2
;
5953 enum tree_code pred_code
= TREE_CODE (predicate
);
5955 label
= create_artificial_label (UNKNOWN_LOCATION
);
5956 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
5957 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
5959 arm1
= TREE_OPERAND (predicate
, 0);
5960 arm2
= TREE_OPERAND (predicate
, 1);
5961 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
5962 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
5967 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
5968 arm2
= boolean_false_node
;
5969 pred_code
= NE_EXPR
;
5973 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
5974 c
= fold_convert (integer_type_node
, c
);
5975 simtcond
= create_tmp_var (integer_type_node
);
5976 gimplify_assign (simtcond
, c
, stmt_list
);
5977 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
5979 c
= create_tmp_var (integer_type_node
);
5980 gimple_call_set_lhs (g
, c
);
5981 gimple_seq_add_stmt (stmt_list
, g
);
5982 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
5986 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
5987 gimple_seq_add_stmt (stmt_list
, stmt
);
5988 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
5991 tree cond_ptr
= NULL_TREE
;
5992 for (c
= clauses
; c
;)
5995 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5996 gimple_seq
*this_stmt_list
= stmt_list
;
5997 tree lab2
= NULL_TREE
;
5999 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6000 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6001 && ctx
->lastprivate_conditional_map
6002 && !ctx
->combined_into_simd_safelen0
)
6004 gcc_assert (body_p
);
6007 if (cond_ptr
== NULL_TREE
)
6009 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
6010 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
6012 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
6013 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6014 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
6015 gimplify_assign (v
, build_zero_cst (type
), body_p
);
6016 this_stmt_list
= cstmt_list
;
6018 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
6020 mem
= build2 (MEM_REF
, type
, cond_ptr
,
6021 build_int_cst (TREE_TYPE (cond_ptr
),
6023 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
6026 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
6027 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
6028 tree mem2
= copy_node (mem
);
6029 gimple_seq seq
= NULL
;
6030 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
6031 gimple_seq_add_seq (this_stmt_list
, seq
);
6032 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
6033 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6034 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
6035 gimple_seq_add_stmt (this_stmt_list
, g
);
6036 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
6037 gimplify_assign (mem2
, v
, this_stmt_list
);
6040 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6041 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6042 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
6044 var
= OMP_CLAUSE_DECL (c
);
6045 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6046 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
6047 && is_taskloop_ctx (ctx
))
6049 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
6050 new_var
= lookup_decl (var
, ctx
->outer
);
6054 new_var
= lookup_decl (var
, ctx
);
6055 /* Avoid uninitialized warnings for lastprivate and
6056 for linear iterators. */
6058 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6059 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
6060 TREE_NO_WARNING (new_var
) = 1;
6063 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
6065 tree val
= DECL_VALUE_EXPR (new_var
);
6066 if (TREE_CODE (val
) == ARRAY_REF
6067 && VAR_P (TREE_OPERAND (val
, 0))
6068 && lookup_attribute ("omp simd array",
6069 DECL_ATTRIBUTES (TREE_OPERAND (val
,
6072 if (lastlane
== NULL
)
6074 lastlane
= create_tmp_var (unsigned_type_node
);
6076 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6078 TREE_OPERAND (val
, 1));
6079 gimple_call_set_lhs (g
, lastlane
);
6080 gimple_seq_add_stmt (this_stmt_list
, g
);
6082 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
6083 TREE_OPERAND (val
, 0), lastlane
,
6084 NULL_TREE
, NULL_TREE
);
6085 TREE_THIS_NOTRAP (new_var
) = 1;
6088 else if (maybe_simt
)
6090 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
6091 ? DECL_VALUE_EXPR (new_var
)
6093 if (simtlast
== NULL
)
6095 simtlast
= create_tmp_var (unsigned_type_node
);
6096 gcall
*g
= gimple_build_call_internal
6097 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
6098 gimple_call_set_lhs (g
, simtlast
);
6099 gimple_seq_add_stmt (this_stmt_list
, g
);
6101 x
= build_call_expr_internal_loc
6102 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
6103 TREE_TYPE (val
), 2, val
, simtlast
);
6104 new_var
= unshare_expr (new_var
);
6105 gimplify_assign (new_var
, x
, this_stmt_list
);
6106 new_var
= unshare_expr (new_var
);
6109 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6110 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
6112 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
6113 gimple_seq_add_seq (this_stmt_list
,
6114 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
6115 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
6117 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6118 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
6120 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
6121 gimple_seq_add_seq (this_stmt_list
,
6122 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
6123 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
6127 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6128 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
6130 gcc_checking_assert (is_taskloop_ctx (ctx
));
6131 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
6133 if (is_global_var (ovar
))
6137 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
6138 if (omp_is_reference (var
))
6139 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6140 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
6141 gimplify_and_add (x
, this_stmt_list
);
6144 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
6148 c
= OMP_CLAUSE_CHAIN (c
);
6149 if (c
== NULL
&& !par_clauses
)
6151 /* If this was a workshare clause, see if it had been combined
6152 with its parallel. In that case, continue looking for the
6153 clauses also on the parallel statement itself. */
6154 if (is_parallel_ctx (ctx
))
6158 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6161 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6162 OMP_CLAUSE_LASTPRIVATE
);
6168 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
6171 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6172 (which might be a placeholder). INNER is true if this is an inner
6173 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6174 join markers. Generate the before-loop forking sequence in
6175 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6176 general form of these sequences is
6178 GOACC_REDUCTION_SETUP
6180 GOACC_REDUCTION_INIT
6182 GOACC_REDUCTION_FINI
6184 GOACC_REDUCTION_TEARDOWN. */
6187 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
6188 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
6189 gimple_seq
*join_seq
, omp_context
*ctx
)
6191 gimple_seq before_fork
= NULL
;
6192 gimple_seq after_fork
= NULL
;
6193 gimple_seq before_join
= NULL
;
6194 gimple_seq after_join
= NULL
;
6195 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
6196 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
6197 unsigned offset
= 0;
6199 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6200 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
6202 tree orig
= OMP_CLAUSE_DECL (c
);
6203 tree var
= maybe_lookup_decl (orig
, ctx
);
6204 tree ref_to_res
= NULL_TREE
;
6205 tree incoming
, outgoing
, v1
, v2
, v3
;
6206 bool is_private
= false;
6208 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
6209 if (rcode
== MINUS_EXPR
)
6211 else if (rcode
== TRUTH_ANDIF_EXPR
)
6212 rcode
= BIT_AND_EXPR
;
6213 else if (rcode
== TRUTH_ORIF_EXPR
)
6214 rcode
= BIT_IOR_EXPR
;
6215 tree op
= build_int_cst (unsigned_type_node
, rcode
);
6220 incoming
= outgoing
= var
;
6224 /* See if an outer construct also reduces this variable. */
6225 omp_context
*outer
= ctx
;
6227 while (omp_context
*probe
= outer
->outer
)
6229 enum gimple_code type
= gimple_code (probe
->stmt
);
6234 case GIMPLE_OMP_FOR
:
6235 cls
= gimple_omp_for_clauses (probe
->stmt
);
6238 case GIMPLE_OMP_TARGET
:
6239 if (gimple_omp_target_kind (probe
->stmt
)
6240 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
6243 cls
= gimple_omp_target_clauses (probe
->stmt
);
6251 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
6252 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
6253 && orig
== OMP_CLAUSE_DECL (cls
))
6255 incoming
= outgoing
= lookup_decl (orig
, probe
);
6256 goto has_outer_reduction
;
6258 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
6259 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
6260 && orig
== OMP_CLAUSE_DECL (cls
))
6268 /* This is the outermost construct with this reduction,
6269 see if there's a mapping for it. */
6270 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
6271 && maybe_lookup_field (orig
, outer
) && !is_private
)
6273 ref_to_res
= build_receiver_ref (orig
, false, outer
);
6274 if (omp_is_reference (orig
))
6275 ref_to_res
= build_simple_mem_ref (ref_to_res
);
6277 tree type
= TREE_TYPE (var
);
6278 if (POINTER_TYPE_P (type
))
6279 type
= TREE_TYPE (type
);
6282 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
6286 /* Try to look at enclosing contexts for reduction var,
6287 use original if no mapping found. */
6289 omp_context
*c
= ctx
->outer
;
6292 t
= maybe_lookup_decl (orig
, c
);
6295 incoming
= outgoing
= (t
? t
: orig
);
6298 has_outer_reduction
:;
6302 ref_to_res
= integer_zero_node
;
6304 if (omp_is_reference (orig
))
6306 tree type
= TREE_TYPE (var
);
6307 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
6311 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
6312 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
6315 v1
= create_tmp_var (type
, id
);
6316 v2
= create_tmp_var (type
, id
);
6317 v3
= create_tmp_var (type
, id
);
6319 gimplify_assign (v1
, var
, fork_seq
);
6320 gimplify_assign (v2
, var
, fork_seq
);
6321 gimplify_assign (v3
, var
, fork_seq
);
6323 var
= build_simple_mem_ref (var
);
6324 v1
= build_simple_mem_ref (v1
);
6325 v2
= build_simple_mem_ref (v2
);
6326 v3
= build_simple_mem_ref (v3
);
6327 outgoing
= build_simple_mem_ref (outgoing
);
6329 if (!TREE_CONSTANT (incoming
))
6330 incoming
= build_simple_mem_ref (incoming
);
6335 /* Determine position in reduction buffer, which may be used
6336 by target. The parser has ensured that this is not a
6337 variable-sized type. */
6338 fixed_size_mode mode
6339 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
6340 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
6341 offset
= (offset
+ align
- 1) & ~(align
- 1);
6342 tree off
= build_int_cst (sizetype
, offset
);
6343 offset
+= GET_MODE_SIZE (mode
);
6347 init_code
= build_int_cst (integer_type_node
,
6348 IFN_GOACC_REDUCTION_INIT
);
6349 fini_code
= build_int_cst (integer_type_node
,
6350 IFN_GOACC_REDUCTION_FINI
);
6351 setup_code
= build_int_cst (integer_type_node
,
6352 IFN_GOACC_REDUCTION_SETUP
);
6353 teardown_code
= build_int_cst (integer_type_node
,
6354 IFN_GOACC_REDUCTION_TEARDOWN
);
6358 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6359 TREE_TYPE (var
), 6, setup_code
,
6360 unshare_expr (ref_to_res
),
6361 incoming
, level
, op
, off
);
6363 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6364 TREE_TYPE (var
), 6, init_code
,
6365 unshare_expr (ref_to_res
),
6366 v1
, level
, op
, off
);
6368 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6369 TREE_TYPE (var
), 6, fini_code
,
6370 unshare_expr (ref_to_res
),
6371 v2
, level
, op
, off
);
6373 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6374 TREE_TYPE (var
), 6, teardown_code
,
6375 ref_to_res
, v3
, level
, op
, off
);
6377 gimplify_assign (v1
, setup_call
, &before_fork
);
6378 gimplify_assign (v2
, init_call
, &after_fork
);
6379 gimplify_assign (v3
, fini_call
, &before_join
);
6380 gimplify_assign (outgoing
, teardown_call
, &after_join
);
6383 /* Now stitch things together. */
6384 gimple_seq_add_seq (fork_seq
, before_fork
);
6386 gimple_seq_add_stmt (fork_seq
, fork
);
6387 gimple_seq_add_seq (fork_seq
, after_fork
);
6389 gimple_seq_add_seq (join_seq
, before_join
);
6391 gimple_seq_add_stmt (join_seq
, join
);
6392 gimple_seq_add_seq (join_seq
, after_join
);
6395 /* Generate code to implement the REDUCTION clauses, append it
6396 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6397 that should be emitted also inside of the critical section,
6398 in that case clear *CLIST afterwards, otherwise leave it as is
6399 and let the caller emit it itself. */
6402 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
6403 gimple_seq
*clist
, omp_context
*ctx
)
6405 gimple_seq sub_seq
= NULL
;
6410 /* OpenACC loop reductions are handled elsewhere. */
6411 if (is_gimple_omp_oacc (ctx
->stmt
))
6414 /* SIMD reductions are handled in lower_rec_input_clauses. */
6415 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6416 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
6419 /* inscan reductions are handled elsewhere. */
6420 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
6423 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6424 update in that case, otherwise use a lock. */
6425 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
6426 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6427 && !OMP_CLAUSE_REDUCTION_TASK (c
))
6429 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
6430 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6432 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6442 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6444 tree var
, ref
, new_var
, orig_var
;
6445 enum tree_code code
;
6446 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6448 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
6449 || OMP_CLAUSE_REDUCTION_TASK (c
))
6452 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
6453 orig_var
= var
= OMP_CLAUSE_DECL (c
);
6454 if (TREE_CODE (var
) == MEM_REF
)
6456 var
= TREE_OPERAND (var
, 0);
6457 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
6458 var
= TREE_OPERAND (var
, 0);
6459 if (TREE_CODE (var
) == ADDR_EXPR
)
6460 var
= TREE_OPERAND (var
, 0);
6463 /* If this is a pointer or referenced based array
6464 section, the var could be private in the outer
6465 context e.g. on orphaned loop construct. Pretend this
6466 is private variable's outer reference. */
6467 ccode
= OMP_CLAUSE_PRIVATE
;
6468 if (TREE_CODE (var
) == INDIRECT_REF
)
6469 var
= TREE_OPERAND (var
, 0);
6472 if (is_variable_sized (var
))
6474 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
6475 var
= DECL_VALUE_EXPR (var
);
6476 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
6477 var
= TREE_OPERAND (var
, 0);
6478 gcc_assert (DECL_P (var
));
6481 new_var
= lookup_decl (var
, ctx
);
6482 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
6483 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6484 ref
= build_outer_var_ref (var
, ctx
, ccode
);
6485 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6487 /* reduction(-:var) sums up the partial results, so it acts
6488 identically to reduction(+:var). */
6489 if (code
== MINUS_EXPR
)
6494 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
6496 addr
= save_expr (addr
);
6497 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
6498 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
6499 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
6500 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
6501 gimplify_and_add (x
, stmt_seqp
);
6504 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6506 tree d
= OMP_CLAUSE_DECL (c
);
6507 tree type
= TREE_TYPE (d
);
6508 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
6509 tree i
= create_tmp_var (TREE_TYPE (v
));
6510 tree ptype
= build_pointer_type (TREE_TYPE (type
));
6511 tree bias
= TREE_OPERAND (d
, 1);
6512 d
= TREE_OPERAND (d
, 0);
6513 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
6515 tree b
= TREE_OPERAND (d
, 1);
6516 b
= maybe_lookup_decl (b
, ctx
);
6519 b
= TREE_OPERAND (d
, 1);
6520 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
6522 if (integer_zerop (bias
))
6526 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
6527 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
6528 TREE_TYPE (b
), b
, bias
);
6530 d
= TREE_OPERAND (d
, 0);
6532 /* For ref build_outer_var_ref already performs this, so
6533 only new_var needs a dereference. */
6534 if (TREE_CODE (d
) == INDIRECT_REF
)
6536 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6537 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
6539 else if (TREE_CODE (d
) == ADDR_EXPR
)
6541 if (orig_var
== var
)
6543 new_var
= build_fold_addr_expr (new_var
);
6544 ref
= build_fold_addr_expr (ref
);
6549 gcc_assert (orig_var
== var
);
6550 if (omp_is_reference (var
))
6551 ref
= build_fold_addr_expr (ref
);
6555 tree t
= maybe_lookup_decl (v
, ctx
);
6559 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
6560 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
6562 if (!integer_zerop (bias
))
6564 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
6565 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
6566 TREE_TYPE (new_var
), new_var
,
6567 unshare_expr (bias
));
6568 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
6569 TREE_TYPE (ref
), ref
, bias
);
6571 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
6572 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
6573 tree m
= create_tmp_var (ptype
);
6574 gimplify_assign (m
, new_var
, stmt_seqp
);
6576 m
= create_tmp_var (ptype
);
6577 gimplify_assign (m
, ref
, stmt_seqp
);
6579 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
6580 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6581 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6582 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
6583 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6584 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
6585 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6587 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6588 tree decl_placeholder
6589 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
6590 SET_DECL_VALUE_EXPR (placeholder
, out
);
6591 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6592 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
6593 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
6594 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
6595 gimple_seq_add_seq (&sub_seq
,
6596 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
6597 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6598 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
6599 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
6603 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
6604 out
= unshare_expr (out
);
6605 gimplify_assign (out
, x
, &sub_seq
);
6607 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
6608 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
6609 gimple_seq_add_stmt (&sub_seq
, g
);
6610 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
6611 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
6612 gimple_seq_add_stmt (&sub_seq
, g
);
6613 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
6614 build_int_cst (TREE_TYPE (i
), 1));
6615 gimple_seq_add_stmt (&sub_seq
, g
);
6616 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
6617 gimple_seq_add_stmt (&sub_seq
, g
);
6618 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
6620 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6622 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6624 if (omp_is_reference (var
)
6625 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
6627 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
6628 SET_DECL_VALUE_EXPR (placeholder
, ref
);
6629 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6630 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
6631 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
6632 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6633 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
6637 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
6638 ref
= build_outer_var_ref (var
, ctx
);
6639 gimplify_assign (ref
, x
, &sub_seq
);
6643 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
6645 gimple_seq_add_stmt (stmt_seqp
, stmt
);
6647 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
6651 gimple_seq_add_seq (stmt_seqp
, *clist
);
6655 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
6657 gimple_seq_add_stmt (stmt_seqp
, stmt
);
6661 /* Generate code to implement the COPYPRIVATE clauses. */
6664 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
6669 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6671 tree var
, new_var
, ref
, x
;
6673 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6675 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
6678 var
= OMP_CLAUSE_DECL (c
);
6679 by_ref
= use_pointer_for_field (var
, NULL
);
6681 ref
= build_sender_ref (var
, ctx
);
6682 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
6685 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
6686 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
6688 gimplify_assign (ref
, x
, slist
);
6690 ref
= build_receiver_ref (var
, false, ctx
);
6693 ref
= fold_convert_loc (clause_loc
,
6694 build_pointer_type (TREE_TYPE (new_var
)),
6696 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
6698 if (omp_is_reference (var
))
6700 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
6701 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
6702 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6704 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
6705 gimplify_and_add (x
, rlist
);
6710 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6711 and REDUCTION from the sender (aka parent) side. */
6714 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
6718 int ignored_looptemp
= 0;
6719 bool is_taskloop
= false;
6721 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6722 by GOMP_taskloop. */
6723 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
6725 ignored_looptemp
= 2;
6729 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6731 tree val
, ref
, x
, var
;
6732 bool by_ref
, do_in
= false, do_out
= false;
6733 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6735 switch (OMP_CLAUSE_CODE (c
))
6737 case OMP_CLAUSE_PRIVATE
:
6738 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
6741 case OMP_CLAUSE_FIRSTPRIVATE
:
6742 case OMP_CLAUSE_COPYIN
:
6743 case OMP_CLAUSE_LASTPRIVATE
:
6744 case OMP_CLAUSE_IN_REDUCTION
:
6745 case OMP_CLAUSE__REDUCTEMP_
:
6747 case OMP_CLAUSE_REDUCTION
:
6748 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
6751 case OMP_CLAUSE_SHARED
:
6752 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
6755 case OMP_CLAUSE__LOOPTEMP_
:
6756 if (ignored_looptemp
)
6766 val
= OMP_CLAUSE_DECL (c
);
6767 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6768 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
6769 && TREE_CODE (val
) == MEM_REF
)
6771 val
= TREE_OPERAND (val
, 0);
6772 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
6773 val
= TREE_OPERAND (val
, 0);
6774 if (TREE_CODE (val
) == INDIRECT_REF
6775 || TREE_CODE (val
) == ADDR_EXPR
)
6776 val
= TREE_OPERAND (val
, 0);
6777 if (is_variable_sized (val
))
6781 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6782 outer taskloop region. */
6783 omp_context
*ctx_for_o
= ctx
;
6785 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
6786 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
6787 ctx_for_o
= ctx
->outer
;
6789 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
6791 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
6792 && is_global_var (var
)
6793 && (val
== OMP_CLAUSE_DECL (c
)
6794 || !is_task_ctx (ctx
)
6795 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
6796 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
6797 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
6798 != POINTER_TYPE
)))))
6801 t
= omp_member_access_dummy_var (var
);
6804 var
= DECL_VALUE_EXPR (var
);
6805 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
6807 var
= unshare_and_remap (var
, t
, o
);
6809 var
= unshare_expr (var
);
6812 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
6814 /* Handle taskloop firstprivate/lastprivate, where the
6815 lastprivate on GIMPLE_OMP_TASK is represented as
6816 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6817 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
6818 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
6819 if (use_pointer_for_field (val
, ctx
))
6820 var
= build_fold_addr_expr (var
);
6821 gimplify_assign (x
, var
, ilist
);
6822 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
6826 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
6827 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
6828 || val
== OMP_CLAUSE_DECL (c
))
6829 && is_variable_sized (val
))
6831 by_ref
= use_pointer_for_field (val
, NULL
);
6833 switch (OMP_CLAUSE_CODE (c
))
6835 case OMP_CLAUSE_FIRSTPRIVATE
:
6836 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
6838 && is_task_ctx (ctx
))
6839 TREE_NO_WARNING (var
) = 1;
6843 case OMP_CLAUSE_PRIVATE
:
6844 case OMP_CLAUSE_COPYIN
:
6845 case OMP_CLAUSE__LOOPTEMP_
:
6846 case OMP_CLAUSE__REDUCTEMP_
:
6850 case OMP_CLAUSE_LASTPRIVATE
:
6851 if (by_ref
|| omp_is_reference (val
))
6853 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
6860 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
6865 case OMP_CLAUSE_REDUCTION
:
6866 case OMP_CLAUSE_IN_REDUCTION
:
6868 if (val
== OMP_CLAUSE_DECL (c
))
6870 if (is_task_ctx (ctx
))
6871 by_ref
= use_pointer_for_field (val
, ctx
);
6873 do_out
= !(by_ref
|| omp_is_reference (val
));
6876 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
6885 ref
= build_sender_ref (val
, ctx
);
6886 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
6887 gimplify_assign (ref
, x
, ilist
);
6888 if (is_task_ctx (ctx
))
6889 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
6894 ref
= build_sender_ref (val
, ctx
);
6895 gimplify_assign (var
, ref
, olist
);
6900 /* Generate code to implement SHARED from the sender (aka parent)
6901 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6902 list things that got automatically shared. */
6905 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
6907 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
6909 if (ctx
->record_type
== NULL
)
6912 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
6913 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
6915 ovar
= DECL_ABSTRACT_ORIGIN (f
);
6916 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
6919 nvar
= maybe_lookup_decl (ovar
, ctx
);
6920 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
6923 /* If CTX is a nested parallel directive. Find the immediately
6924 enclosing parallel or workshare construct that contains a
6925 mapping for OVAR. */
6926 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
6928 t
= omp_member_access_dummy_var (var
);
6931 var
= DECL_VALUE_EXPR (var
);
6932 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
6934 var
= unshare_and_remap (var
, t
, o
);
6936 var
= unshare_expr (var
);
6939 if (use_pointer_for_field (ovar
, ctx
))
6941 x
= build_sender_ref (ovar
, ctx
);
6942 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
6943 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
6945 gcc_assert (is_parallel_ctx (ctx
)
6946 && DECL_ARTIFICIAL (ovar
));
6947 /* _condtemp_ clause. */
6948 var
= build_constructor (TREE_TYPE (x
), NULL
);
6951 var
= build_fold_addr_expr (var
);
6952 gimplify_assign (x
, var
, ilist
);
6956 x
= build_sender_ref (ovar
, ctx
);
6957 gimplify_assign (x
, var
, ilist
);
6959 if (!TREE_READONLY (var
)
6960 /* We don't need to receive a new reference to a result
6961 or parm decl. In fact we may not store to it as we will
6962 invalidate any pending RSO and generate wrong gimple
6964 && !((TREE_CODE (var
) == RESULT_DECL
6965 || TREE_CODE (var
) == PARM_DECL
)
6966 && DECL_BY_REFERENCE (var
)))
6968 x
= build_sender_ref (ovar
, ctx
);
6969 gimplify_assign (var
, x
, olist
);
6975 /* Emit an OpenACC head marker call, encapulating the partitioning and
6976 other information that must be processed by the target compiler.
6977 Return the maximum number of dimensions the associated loop might
6978 be partitioned over. */
6981 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
6982 gimple_seq
*seq
, omp_context
*ctx
)
6984 unsigned levels
= 0;
6986 tree gang_static
= NULL_TREE
;
6987 auto_vec
<tree
, 5> args
;
6989 args
.quick_push (build_int_cst
6990 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
6991 args
.quick_push (ddvar
);
6992 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6994 switch (OMP_CLAUSE_CODE (c
))
6996 case OMP_CLAUSE_GANG
:
6997 tag
|= OLF_DIM_GANG
;
6998 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
6999 /* static:* is represented by -1, and we can ignore it, as
7000 scheduling is always static. */
7001 if (gang_static
&& integer_minus_onep (gang_static
))
7002 gang_static
= NULL_TREE
;
7006 case OMP_CLAUSE_WORKER
:
7007 tag
|= OLF_DIM_WORKER
;
7011 case OMP_CLAUSE_VECTOR
:
7012 tag
|= OLF_DIM_VECTOR
;
7016 case OMP_CLAUSE_SEQ
:
7020 case OMP_CLAUSE_AUTO
:
7024 case OMP_CLAUSE_INDEPENDENT
:
7025 tag
|= OLF_INDEPENDENT
;
7028 case OMP_CLAUSE_TILE
:
7039 if (DECL_P (gang_static
))
7040 gang_static
= build_outer_var_ref (gang_static
, ctx
);
7041 tag
|= OLF_GANG_STATIC
;
7044 /* In a parallel region, loops are implicitly INDEPENDENT. */
7045 omp_context
*tgt
= enclosing_target_ctx (ctx
);
7046 if (!tgt
|| is_oacc_parallel (tgt
))
7047 tag
|= OLF_INDEPENDENT
;
7050 /* Tiling could use all 3 levels. */
7054 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7055 Ensure at least one level, or 2 for possible auto
7057 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
7058 << OLF_DIM_BASE
) | OLF_SEQ
));
7060 if (levels
< 1u + maybe_auto
)
7061 levels
= 1u + maybe_auto
;
7064 args
.quick_push (build_int_cst (integer_type_node
, levels
));
7065 args
.quick_push (build_int_cst (integer_type_node
, tag
));
7067 args
.quick_push (gang_static
);
7069 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
7070 gimple_set_location (call
, loc
);
7071 gimple_set_lhs (call
, ddvar
);
7072 gimple_seq_add_stmt (seq
, call
);
7077 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7078 partitioning level of the enclosed region. */
7081 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
7082 tree tofollow
, gimple_seq
*seq
)
7084 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
7085 : IFN_UNIQUE_OACC_TAIL_MARK
);
7086 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
7087 int nargs
= 2 + (tofollow
!= NULL_TREE
);
7088 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
7089 marker
, ddvar
, tofollow
);
7090 gimple_set_location (call
, loc
);
7091 gimple_set_lhs (call
, ddvar
);
7092 gimple_seq_add_stmt (seq
, call
);
7095 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7096 the loop clauses, from which we extract reductions. Initialize
7100 lower_oacc_head_tail (location_t loc
, tree clauses
,
7101 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
7104 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
7105 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
7107 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
7108 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
7109 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
7112 for (unsigned done
= 1; count
; count
--, done
++)
7114 gimple_seq fork_seq
= NULL
;
7115 gimple_seq join_seq
= NULL
;
7117 tree place
= build_int_cst (integer_type_node
, -1);
7118 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7119 fork_kind
, ddvar
, place
);
7120 gimple_set_location (fork
, loc
);
7121 gimple_set_lhs (fork
, ddvar
);
7123 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7124 join_kind
, ddvar
, place
);
7125 gimple_set_location (join
, loc
);
7126 gimple_set_lhs (join
, ddvar
);
7128 /* Mark the beginning of this level sequence. */
7130 lower_oacc_loop_marker (loc
, ddvar
, true,
7131 build_int_cst (integer_type_node
, count
),
7133 lower_oacc_loop_marker (loc
, ddvar
, false,
7134 build_int_cst (integer_type_node
, done
),
7137 lower_oacc_reductions (loc
, clauses
, place
, inner
,
7138 fork
, join
, &fork_seq
, &join_seq
, ctx
);
7140 /* Append this level to head. */
7141 gimple_seq_add_seq (head
, fork_seq
);
7142 /* Prepend it to tail. */
7143 gimple_seq_add_seq (&join_seq
, *tail
);
7149 /* Mark the end of the sequence. */
7150 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
7151 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
7154 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7155 catch handler and return it. This prevents programs from violating the
7156 structured block semantics with throws. */
7159 maybe_catch_exception (gimple_seq body
)
7164 if (!flag_exceptions
)
7167 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
7168 decl
= lang_hooks
.eh_protect_cleanup_actions ();
7170 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
7172 g
= gimple_build_eh_must_not_throw (decl
);
7173 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
7176 return gimple_seq_alloc_with_stmt (g
);
7180 /* Routines to lower OMP directives into OMP-GIMPLE. */
7182 /* If ctx is a worksharing context inside of a cancellable parallel
7183 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7184 and conditional branch to parallel's cancel_label to handle
7185 cancellation in the implicit barrier. */
7188 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
7191 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
7192 if (gimple_omp_return_nowait_p (omp_return
))
7194 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
7195 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
7196 && outer
->cancellable
)
7198 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
7199 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
7200 tree lhs
= create_tmp_var (c_bool_type
);
7201 gimple_omp_return_set_lhs (omp_return
, lhs
);
7202 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
7203 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
7204 fold_convert (c_bool_type
,
7205 boolean_false_node
),
7206 outer
->cancel_label
, fallthru_label
);
7207 gimple_seq_add_stmt (body
, g
);
7208 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
7210 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
7214 /* Find the first task_reduction or reduction clause or return NULL
7215 if there are none. */
7218 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
7219 enum omp_clause_code ccode
)
7223 clauses
= omp_find_clause (clauses
, ccode
);
7224 if (clauses
== NULL_TREE
)
7226 if (ccode
!= OMP_CLAUSE_REDUCTION
7227 || code
== OMP_TASKLOOP
7228 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
7230 clauses
= OMP_CLAUSE_CHAIN (clauses
);
7234 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
7235 gimple_seq
*, gimple_seq
*);
7237 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7238 CTX is the enclosing OMP context for the current statement. */
7241 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7243 tree block
, control
;
7244 gimple_stmt_iterator tgsi
;
7245 gomp_sections
*stmt
;
7247 gbind
*new_stmt
, *bind
;
7248 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
7250 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
7252 push_gimplify_context ();
7258 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
7259 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
7260 tree rtmp
= NULL_TREE
;
7263 tree type
= build_pointer_type (pointer_sized_int_node
);
7264 tree temp
= create_tmp_var (type
);
7265 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
7266 OMP_CLAUSE_DECL (c
) = temp
;
7267 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
7268 gimple_omp_sections_set_clauses (stmt
, c
);
7269 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
7270 gimple_omp_sections_clauses (stmt
),
7271 &ilist
, &tred_dlist
);
7273 rtmp
= make_ssa_name (type
);
7274 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
7277 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
7278 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
7280 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
7281 &ilist
, &dlist
, ctx
, NULL
);
7283 control
= create_tmp_var (unsigned_type_node
, ".section");
7284 gimple_omp_sections_set_control (stmt
, control
);
7286 new_body
= gimple_omp_body (stmt
);
7287 gimple_omp_set_body (stmt
, NULL
);
7288 tgsi
= gsi_start (new_body
);
7289 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
7294 sec_start
= gsi_stmt (tgsi
);
7295 sctx
= maybe_lookup_ctx (sec_start
);
7298 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
7299 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
7300 GSI_CONTINUE_LINKING
);
7301 gimple_omp_set_body (sec_start
, NULL
);
7303 if (gsi_one_before_end_p (tgsi
))
7305 gimple_seq l
= NULL
;
7306 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
7307 &ilist
, &l
, &clist
, ctx
);
7308 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
7309 gimple_omp_section_set_last (sec_start
);
7312 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
7313 GSI_CONTINUE_LINKING
);
7316 block
= make_node (BLOCK
);
7317 bind
= gimple_build_bind (NULL
, new_body
, block
);
7320 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
7324 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
7325 gcall
*g
= gimple_build_call (fndecl
, 0);
7326 gimple_seq_add_stmt (&olist
, g
);
7327 gimple_seq_add_seq (&olist
, clist
);
7328 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
7329 g
= gimple_build_call (fndecl
, 0);
7330 gimple_seq_add_stmt (&olist
, g
);
7333 block
= make_node (BLOCK
);
7334 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
7335 gsi_replace (gsi_p
, new_stmt
, true);
7337 pop_gimplify_context (new_stmt
);
7338 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
7339 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
7340 if (BLOCK_VARS (block
))
7341 TREE_USED (block
) = 1;
7344 gimple_seq_add_seq (&new_body
, ilist
);
7345 gimple_seq_add_stmt (&new_body
, stmt
);
7346 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
7347 gimple_seq_add_stmt (&new_body
, bind
);
7349 t
= gimple_build_omp_continue (control
, control
);
7350 gimple_seq_add_stmt (&new_body
, t
);
7352 gimple_seq_add_seq (&new_body
, olist
);
7353 if (ctx
->cancellable
)
7354 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7355 gimple_seq_add_seq (&new_body
, dlist
);
7357 new_body
= maybe_catch_exception (new_body
);
7359 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
7360 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7361 t
= gimple_build_omp_return (nowait
);
7362 gimple_seq_add_stmt (&new_body
, t
);
7363 gimple_seq_add_seq (&new_body
, tred_dlist
);
7364 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
7367 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
7369 gimple_bind_set_body (new_stmt
, new_body
);
7373 /* A subroutine of lower_omp_single. Expand the simple form of
7374 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7376 if (GOMP_single_start ())
7378 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7380 FIXME. It may be better to delay expanding the logic of this until
7381 pass_expand_omp. The expanded logic may make the job more difficult
7382 to a synchronization analysis pass. */
7385 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
7387 location_t loc
= gimple_location (single_stmt
);
7388 tree tlabel
= create_artificial_label (loc
);
7389 tree flabel
= create_artificial_label (loc
);
7390 gimple
*call
, *cond
;
7393 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
7394 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
7395 call
= gimple_build_call (decl
, 0);
7396 gimple_call_set_lhs (call
, lhs
);
7397 gimple_seq_add_stmt (pre_p
, call
);
7399 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
7400 fold_convert_loc (loc
, TREE_TYPE (lhs
),
7403 gimple_seq_add_stmt (pre_p
, cond
);
7404 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
7405 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7406 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
7410 /* A subroutine of lower_omp_single. Expand the simple form of
7411 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7413 #pragma omp single copyprivate (a, b, c)
7415 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7418 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7424 GOMP_single_copy_end (©out);
7435 FIXME. It may be better to delay expanding the logic of this until
7436 pass_expand_omp. The expanded logic may make the job more difficult
7437 to a synchronization analysis pass. */
7440 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
7443 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
7444 gimple_seq copyin_seq
;
7445 location_t loc
= gimple_location (single_stmt
);
7447 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
7449 ptr_type
= build_pointer_type (ctx
->record_type
);
7450 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
7452 l0
= create_artificial_label (loc
);
7453 l1
= create_artificial_label (loc
);
7454 l2
= create_artificial_label (loc
);
7456 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
7457 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
7458 t
= fold_convert_loc (loc
, ptr_type
, t
);
7459 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
7461 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
7462 build_int_cst (ptr_type
, 0));
7463 t
= build3 (COND_EXPR
, void_type_node
, t
,
7464 build_and_jump (&l0
), build_and_jump (&l1
));
7465 gimplify_and_add (t
, pre_p
);
7467 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
7469 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7472 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
7475 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
7476 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
7477 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
7478 gimplify_and_add (t
, pre_p
);
7480 t
= build_and_jump (&l2
);
7481 gimplify_and_add (t
, pre_p
);
7483 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
7485 gimple_seq_add_seq (pre_p
, copyin_seq
);
7487 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
7491 /* Expand code for an OpenMP single directive. */
7494 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7497 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
7499 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
7501 push_gimplify_context ();
7503 block
= make_node (BLOCK
);
7504 bind
= gimple_build_bind (NULL
, NULL
, block
);
7505 gsi_replace (gsi_p
, bind
, true);
7508 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
7509 &bind_body
, &dlist
, ctx
, NULL
);
7510 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
7512 gimple_seq_add_stmt (&bind_body
, single_stmt
);
7514 if (ctx
->record_type
)
7515 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
7517 lower_omp_single_simple (single_stmt
, &bind_body
);
7519 gimple_omp_set_body (single_stmt
, NULL
);
7521 gimple_seq_add_seq (&bind_body
, dlist
);
7523 bind_body
= maybe_catch_exception (bind_body
);
7525 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
7526 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7527 gimple
*g
= gimple_build_omp_return (nowait
);
7528 gimple_seq_add_stmt (&bind_body_tail
, g
);
7529 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
7530 if (ctx
->record_type
)
7532 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
7533 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
7534 TREE_THIS_VOLATILE (clobber
) = 1;
7535 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
7536 clobber
), GSI_SAME_STMT
);
7538 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
7539 gimple_bind_set_body (bind
, bind_body
);
7541 pop_gimplify_context (bind
);
7543 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7544 BLOCK_VARS (block
) = ctx
->block_vars
;
7545 if (BLOCK_VARS (block
))
7546 TREE_USED (block
) = 1;
7550 /* Expand code for an OpenMP master directive. */
7553 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7555 tree block
, lab
= NULL
, x
, bfn_decl
;
7556 gimple
*stmt
= gsi_stmt (*gsi_p
);
7558 location_t loc
= gimple_location (stmt
);
7561 push_gimplify_context ();
7563 block
= make_node (BLOCK
);
7564 bind
= gimple_build_bind (NULL
, NULL
, block
);
7565 gsi_replace (gsi_p
, bind
, true);
7566 gimple_bind_add_stmt (bind
, stmt
);
7568 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
7569 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
7570 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
7571 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
7573 gimplify_and_add (x
, &tseq
);
7574 gimple_bind_add_seq (bind
, tseq
);
7576 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
7577 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
7578 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
7579 gimple_omp_set_body (stmt
, NULL
);
7581 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
7583 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
7585 pop_gimplify_context (bind
);
7587 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7588 BLOCK_VARS (block
) = ctx
->block_vars
;
7591 /* Helper function for lower_omp_task_reductions. For a specific PASS
7592 find out the current clause it should be processed, or return false
7593 if all have been processed already. */
7596 omp_task_reduction_iterate (int pass
, enum tree_code code
,
7597 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
7598 tree
*type
, tree
*next
)
7600 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
7602 if (ccode
== OMP_CLAUSE_REDUCTION
7603 && code
!= OMP_TASKLOOP
7604 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
7606 *decl
= OMP_CLAUSE_DECL (*c
);
7607 *type
= TREE_TYPE (*decl
);
7608 if (TREE_CODE (*decl
) == MEM_REF
)
7615 if (omp_is_reference (*decl
))
7616 *type
= TREE_TYPE (*type
);
7617 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
7620 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
7629 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7630 OMP_TASKGROUP only with task modifier). Register mapping of those in
7631 START sequence and reducing them and unregister them in the END sequence. */
7634 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
7635 gimple_seq
*start
, gimple_seq
*end
)
7637 enum omp_clause_code ccode
7638 = (code
== OMP_TASKGROUP
7639 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
7640 tree cancellable
= NULL_TREE
;
7641 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
7642 if (clauses
== NULL_TREE
)
7644 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7646 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
7647 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
7648 && outer
->cancellable
)
7650 cancellable
= error_mark_node
;
7653 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
7656 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
7657 tree
*last
= &TYPE_FIELDS (record_type
);
7661 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
7663 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
7666 DECL_CHAIN (field
) = ifield
;
7667 last
= &DECL_CHAIN (ifield
);
7668 DECL_CONTEXT (field
) = record_type
;
7669 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
7670 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
7671 DECL_CONTEXT (ifield
) = record_type
;
7672 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
7673 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
7675 for (int pass
= 0; pass
< 2; pass
++)
7677 tree decl
, type
, next
;
7678 for (tree c
= clauses
;
7679 omp_task_reduction_iterate (pass
, code
, ccode
,
7680 &c
, &decl
, &type
, &next
); c
= next
)
7683 tree new_type
= type
;
7685 new_type
= remap_type (type
, &ctx
->outer
->cb
);
7687 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
7688 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
7690 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
7692 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
7693 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
7694 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
7697 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
7698 DECL_CONTEXT (field
) = record_type
;
7699 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
7700 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
7702 last
= &DECL_CHAIN (field
);
7704 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
7706 DECL_CONTEXT (bfield
) = record_type
;
7707 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
7708 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
7710 last
= &DECL_CHAIN (bfield
);
7714 layout_type (record_type
);
7716 /* Build up an array which registers with the runtime all the reductions
7717 and deregisters them at the end. Format documented in libgomp/task.c. */
7718 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
7719 tree avar
= create_tmp_var_raw (atype
);
7720 gimple_add_tmp_var (avar
);
7721 TREE_ADDRESSABLE (avar
) = 1;
7722 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
7723 NULL_TREE
, NULL_TREE
);
7724 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
7725 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7726 gimple_seq seq
= NULL
;
7727 tree sz
= fold_convert (pointer_sized_int_node
,
7728 TYPE_SIZE_UNIT (record_type
));
7730 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
7731 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
7732 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
7733 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
7734 ctx
->task_reductions
.create (1 + cnt
);
7735 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
7736 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
7738 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
7739 gimple_seq_add_seq (start
, seq
);
7740 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
7741 NULL_TREE
, NULL_TREE
);
7742 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
7743 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
7744 NULL_TREE
, NULL_TREE
);
7745 t
= build_int_cst (pointer_sized_int_node
,
7746 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
7747 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7748 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
7749 NULL_TREE
, NULL_TREE
);
7750 t
= build_int_cst (pointer_sized_int_node
, -1);
7751 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7752 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
7753 NULL_TREE
, NULL_TREE
);
7754 t
= build_int_cst (pointer_sized_int_node
, 0);
7755 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7757 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7758 and for each task reduction checks a bool right after the private variable
7759 within that thread's chunk; if the bool is clear, it hasn't been
7760 initialized and thus isn't going to be reduced nor destructed, otherwise
7761 reduce and destruct it. */
7762 tree idx
= create_tmp_var (size_type_node
);
7763 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
7764 tree num_thr_sz
= create_tmp_var (size_type_node
);
7765 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7766 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7767 tree lab3
= NULL_TREE
;
7769 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7771 /* For worksharing constructs, only perform it in the master thread,
7772 with the exception of cancelled implicit barriers - then only handle
7773 the current thread. */
7774 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
7775 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
7776 tree thr_num
= create_tmp_var (integer_type_node
);
7777 g
= gimple_build_call (t
, 0);
7778 gimple_call_set_lhs (g
, thr_num
);
7779 gimple_seq_add_stmt (end
, g
);
7783 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7784 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
7785 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
7786 if (code
== OMP_FOR
)
7787 c
= gimple_omp_for_clauses (ctx
->stmt
);
7788 else /* if (code == OMP_SECTIONS) */
7789 c
= gimple_omp_sections_clauses (ctx
->stmt
);
7790 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
7792 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
7794 gimple_seq_add_stmt (end
, g
);
7795 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7796 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
7797 gimple_seq_add_stmt (end
, g
);
7798 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
7799 build_one_cst (TREE_TYPE (idx
)));
7800 gimple_seq_add_stmt (end
, g
);
7801 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
7802 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
7804 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
7805 gimple_seq_add_stmt (end
, g
);
7806 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
7808 if (code
!= OMP_PARALLEL
)
7810 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
7811 tree num_thr
= create_tmp_var (integer_type_node
);
7812 g
= gimple_build_call (t
, 0);
7813 gimple_call_set_lhs (g
, num_thr
);
7814 gimple_seq_add_stmt (end
, g
);
7815 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
7816 gimple_seq_add_stmt (end
, g
);
7818 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
7822 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7823 OMP_CLAUSE__REDUCTEMP_
);
7824 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
7825 t
= fold_convert (size_type_node
, t
);
7826 gimplify_assign (num_thr_sz
, t
, end
);
7828 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
7829 NULL_TREE
, NULL_TREE
);
7830 tree data
= create_tmp_var (pointer_sized_int_node
);
7831 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
7832 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
7834 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
7835 ptr
= create_tmp_var (build_pointer_type (record_type
));
7837 ptr
= create_tmp_var (ptr_type_node
);
7838 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
7840 tree field
= TYPE_FIELDS (record_type
);
7843 field
= DECL_CHAIN (DECL_CHAIN (field
));
7844 for (int pass
= 0; pass
< 2; pass
++)
7846 tree decl
, type
, next
;
7847 for (tree c
= clauses
;
7848 omp_task_reduction_iterate (pass
, code
, ccode
,
7849 &c
, &decl
, &type
, &next
); c
= next
)
7851 tree var
= decl
, ref
;
7852 if (TREE_CODE (decl
) == MEM_REF
)
7854 var
= TREE_OPERAND (var
, 0);
7855 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7856 var
= TREE_OPERAND (var
, 0);
7858 if (TREE_CODE (var
) == ADDR_EXPR
)
7859 var
= TREE_OPERAND (var
, 0);
7860 else if (TREE_CODE (var
) == INDIRECT_REF
)
7861 var
= TREE_OPERAND (var
, 0);
7862 tree orig_var
= var
;
7863 if (is_variable_sized (var
))
7865 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7866 var
= DECL_VALUE_EXPR (var
);
7867 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7868 var
= TREE_OPERAND (var
, 0);
7869 gcc_assert (DECL_P (var
));
7871 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
7872 if (orig_var
!= var
)
7873 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
7874 else if (TREE_CODE (v
) == ADDR_EXPR
)
7875 t
= build_fold_addr_expr (t
);
7876 else if (TREE_CODE (v
) == INDIRECT_REF
)
7877 t
= build_fold_indirect_ref (t
);
7878 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
7880 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
7881 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7882 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
7884 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
7885 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
7886 fold_convert (size_type_node
,
7887 TREE_OPERAND (decl
, 1)));
7891 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
7892 if (!omp_is_reference (decl
))
7893 t
= build_fold_addr_expr (t
);
7895 t
= fold_convert (pointer_sized_int_node
, t
);
7897 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
7898 gimple_seq_add_seq (start
, seq
);
7899 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
7900 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
7901 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7902 t
= unshare_expr (byte_position (field
));
7903 t
= fold_convert (pointer_sized_int_node
, t
);
7904 ctx
->task_reduction_map
->put (c
, cnt
);
7905 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
7908 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
7909 gimple_seq_add_seq (start
, seq
);
7910 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
7911 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
7912 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7914 tree bfield
= DECL_CHAIN (field
);
7916 if (code
== OMP_PARALLEL
|| code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7917 /* In parallel or worksharing all threads unconditionally
7918 initialize all their task reduction private variables. */
7919 cond
= boolean_true_node
;
7920 else if (TREE_TYPE (ptr
) == ptr_type_node
)
7922 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
7923 unshare_expr (byte_position (bfield
)));
7925 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
7926 gimple_seq_add_seq (end
, seq
);
7927 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
7928 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
7929 build_int_cst (pbool
, 0));
7932 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
7933 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
7934 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
7935 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
7936 tree condv
= create_tmp_var (boolean_type_node
);
7937 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
7938 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
7940 gimple_seq_add_stmt (end
, g
);
7941 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
7942 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
7944 /* If this reduction doesn't need destruction and parallel
7945 has been cancelled, there is nothing to do for this
7946 reduction, so jump around the merge operation. */
7947 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7948 g
= gimple_build_cond (NE_EXPR
, cancellable
,
7949 build_zero_cst (TREE_TYPE (cancellable
)),
7951 gimple_seq_add_stmt (end
, g
);
7952 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7956 if (TREE_TYPE (ptr
) == ptr_type_node
)
7958 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
7959 unshare_expr (byte_position (field
)));
7961 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
7962 gimple_seq_add_seq (end
, seq
);
7963 tree pbool
= build_pointer_type (TREE_TYPE (field
));
7964 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
7965 build_int_cst (pbool
, 0));
7968 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
7969 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
7971 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7972 if (TREE_CODE (decl
) != MEM_REF
&& omp_is_reference (decl
))
7973 ref
= build_simple_mem_ref (ref
);
7974 /* reduction(-:var) sums up the partial results, so it acts
7975 identically to reduction(+:var). */
7976 if (rcode
== MINUS_EXPR
)
7978 if (TREE_CODE (decl
) == MEM_REF
)
7980 tree type
= TREE_TYPE (new_var
);
7981 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7982 tree i
= create_tmp_var (TREE_TYPE (v
));
7983 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7986 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7987 tree vv
= create_tmp_var (TREE_TYPE (v
));
7988 gimplify_assign (vv
, v
, start
);
7991 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
7992 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
7993 new_var
= build_fold_addr_expr (new_var
);
7994 new_var
= fold_convert (ptype
, new_var
);
7995 ref
= fold_convert (ptype
, ref
);
7996 tree m
= create_tmp_var (ptype
);
7997 gimplify_assign (m
, new_var
, end
);
7999 m
= create_tmp_var (ptype
);
8000 gimplify_assign (m
, ref
, end
);
8002 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
8003 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
8004 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
8005 gimple_seq_add_stmt (end
, gimple_build_label (body
));
8006 tree priv
= build_simple_mem_ref (new_var
);
8007 tree out
= build_simple_mem_ref (ref
);
8008 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8010 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8011 tree decl_placeholder
8012 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
8013 tree lab6
= NULL_TREE
;
8016 /* If this reduction needs destruction and parallel
8017 has been cancelled, jump around the merge operation
8018 to the destruction. */
8019 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8020 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8021 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8022 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8024 gimple_seq_add_stmt (end
, g
);
8025 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8027 SET_DECL_VALUE_EXPR (placeholder
, out
);
8028 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8029 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
8030 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
8031 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8032 gimple_seq_add_seq (end
,
8033 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8034 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8035 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8037 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8038 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
8041 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8042 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
8045 gimple_seq tseq
= NULL
;
8046 gimplify_stmt (&x
, &tseq
);
8047 gimple_seq_add_seq (end
, tseq
);
8052 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
8053 out
= unshare_expr (out
);
8054 gimplify_assign (out
, x
, end
);
8057 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
8058 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8059 gimple_seq_add_stmt (end
, g
);
8060 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
8061 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8062 gimple_seq_add_stmt (end
, g
);
8063 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
8064 build_int_cst (TREE_TYPE (i
), 1));
8065 gimple_seq_add_stmt (end
, g
);
8066 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
8067 gimple_seq_add_stmt (end
, g
);
8068 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
8070 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8072 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8073 tree oldv
= NULL_TREE
;
8074 tree lab6
= NULL_TREE
;
8077 /* If this reduction needs destruction and parallel
8078 has been cancelled, jump around the merge operation
8079 to the destruction. */
8080 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8081 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8082 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8083 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8085 gimple_seq_add_stmt (end
, g
);
8086 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8088 if (omp_is_reference (decl
)
8089 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
8091 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8092 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8093 tree refv
= create_tmp_var (TREE_TYPE (ref
));
8094 gimplify_assign (refv
, ref
, end
);
8095 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
8096 SET_DECL_VALUE_EXPR (placeholder
, ref
);
8097 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8098 tree d
= maybe_lookup_decl (decl
, ctx
);
8100 if (DECL_HAS_VALUE_EXPR_P (d
))
8101 oldv
= DECL_VALUE_EXPR (d
);
8102 if (omp_is_reference (var
))
8104 tree v
= fold_convert (TREE_TYPE (d
),
8105 build_fold_addr_expr (new_var
));
8106 SET_DECL_VALUE_EXPR (d
, v
);
8109 SET_DECL_VALUE_EXPR (d
, new_var
);
8110 DECL_HAS_VALUE_EXPR_P (d
) = 1;
8111 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8113 SET_DECL_VALUE_EXPR (d
, oldv
);
8116 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
8117 DECL_HAS_VALUE_EXPR_P (d
) = 0;
8119 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8120 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8121 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8122 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8124 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8125 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
8128 gimple_seq tseq
= NULL
;
8129 gimplify_stmt (&x
, &tseq
);
8130 gimple_seq_add_seq (end
, tseq
);
8135 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
8136 ref
= unshare_expr (ref
);
8137 gimplify_assign (ref
, x
, end
);
8139 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8141 field
= DECL_CHAIN (bfield
);
8145 if (code
== OMP_TASKGROUP
)
8147 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
8148 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8149 gimple_seq_add_stmt (start
, g
);
8154 if (code
== OMP_FOR
)
8155 c
= gimple_omp_for_clauses (ctx
->stmt
);
8156 else if (code
== OMP_SECTIONS
)
8157 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8159 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
8160 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
8161 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
8162 build_fold_addr_expr (avar
));
8163 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
8166 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
8167 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
8169 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
8170 gimple_seq_add_stmt (end
, g
);
8171 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
8172 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8174 enum built_in_function bfn
8175 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
8176 t
= builtin_decl_explicit (bfn
);
8177 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
8181 arg
= create_tmp_var (c_bool_type
);
8182 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
8186 arg
= build_int_cst (c_bool_type
, 0);
8187 g
= gimple_build_call (t
, 1, arg
);
8191 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
8192 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8194 gimple_seq_add_stmt (end
, g
);
8195 t
= build_constructor (atype
, NULL
);
8196 TREE_THIS_VOLATILE (t
) = 1;
8197 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
8200 /* Expand code for an OpenMP taskgroup directive. */
8203 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8205 gimple
*stmt
= gsi_stmt (*gsi_p
);
8208 gimple_seq dseq
= NULL
;
8209 tree block
= make_node (BLOCK
);
8211 bind
= gimple_build_bind (NULL
, NULL
, block
);
8212 gsi_replace (gsi_p
, bind
, true);
8213 gimple_bind_add_stmt (bind
, stmt
);
8215 push_gimplify_context ();
8217 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
8219 gimple_bind_add_stmt (bind
, x
);
8221 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
8222 gimple_omp_taskgroup_clauses (stmt
),
8223 gimple_bind_body_ptr (bind
), &dseq
);
8225 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8226 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8227 gimple_omp_set_body (stmt
, NULL
);
8229 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8230 gimple_bind_add_seq (bind
, dseq
);
8232 pop_gimplify_context (bind
);
8234 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8235 BLOCK_VARS (block
) = ctx
->block_vars
;
8239 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8242 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
8245 struct omp_for_data fd
;
8246 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
8249 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
8250 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
8251 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
8255 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8256 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
8257 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
8258 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8260 /* Merge depend clauses from multiple adjacent
8261 #pragma omp ordered depend(sink:...) constructs
8262 into one #pragma omp ordered depend(sink:...), so that
8263 we can optimize them together. */
8264 gimple_stmt_iterator gsi
= *gsi_p
;
8266 while (!gsi_end_p (gsi
))
8268 gimple
*stmt
= gsi_stmt (gsi
);
8269 if (is_gimple_debug (stmt
)
8270 || gimple_code (stmt
) == GIMPLE_NOP
)
8275 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
8277 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
8278 c
= gimple_omp_ordered_clauses (ord_stmt2
);
8280 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
8281 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8284 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
8286 gsi_remove (&gsi
, true);
8290 /* Canonicalize sink dependence clauses into one folded clause if
8293 The basic algorithm is to create a sink vector whose first
8294 element is the GCD of all the first elements, and whose remaining
8295 elements are the minimum of the subsequent columns.
8297 We ignore dependence vectors whose first element is zero because
8298 such dependencies are known to be executed by the same thread.
8300 We take into account the direction of the loop, so a minimum
8301 becomes a maximum if the loop is iterating forwards. We also
8302 ignore sink clauses where the loop direction is unknown, or where
8303 the offsets are clearly invalid because they are not a multiple
8304 of the loop increment.
8308 #pragma omp for ordered(2)
8309 for (i=0; i < N; ++i)
8310 for (j=0; j < M; ++j)
8312 #pragma omp ordered \
8313 depend(sink:i-8,j-2) \
8314 depend(sink:i,j-1) \ // Completely ignored because i+0.
8315 depend(sink:i-4,j-3) \
8316 depend(sink:i-6,j-4)
8317 #pragma omp ordered depend(source)
8322 depend(sink:-gcd(8,4,6),-min(2,3,4))
8327 /* FIXME: Computing GCD's where the first element is zero is
8328 non-trivial in the presence of collapsed loops. Do this later. */
8329 if (fd
.collapse
> 1)
8332 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
8334 /* wide_int is not a POD so it must be default-constructed. */
8335 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
8336 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
8338 tree folded_dep
= NULL_TREE
;
8339 /* TRUE if the first dimension's offset is negative. */
8340 bool neg_offset_p
= false;
8342 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8344 while ((c
= *list_p
) != NULL
)
8346 bool remove
= false;
8348 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
8349 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8350 goto next_ordered_clause
;
8353 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
8354 vec
&& TREE_CODE (vec
) == TREE_LIST
;
8355 vec
= TREE_CHAIN (vec
), ++i
)
8357 gcc_assert (i
< len
);
8359 /* omp_extract_for_data has canonicalized the condition. */
8360 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
8361 || fd
.loops
[i
].cond_code
== GT_EXPR
);
8362 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
8363 bool maybe_lexically_later
= true;
8365 /* While the committee makes up its mind, bail if we have any
8366 non-constant steps. */
8367 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
8368 goto lower_omp_ordered_ret
;
8370 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
8371 if (POINTER_TYPE_P (itype
))
8373 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
8374 TYPE_PRECISION (itype
),
8377 /* Ignore invalid offsets that are not multiples of the step. */
8378 if (!wi::multiple_of_p (wi::abs (offset
),
8379 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
8382 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
8383 "ignoring sink clause with offset that is not "
8384 "a multiple of the loop step");
8386 goto next_ordered_clause
;
8389 /* Calculate the first dimension. The first dimension of
8390 the folded dependency vector is the GCD of the first
8391 elements, while ignoring any first elements whose offset
8395 /* Ignore dependence vectors whose first dimension is 0. */
8399 goto next_ordered_clause
;
8403 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
8405 error_at (OMP_CLAUSE_LOCATION (c
),
8406 "first offset must be in opposite direction "
8407 "of loop iterations");
8408 goto lower_omp_ordered_ret
;
8412 neg_offset_p
= forward
;
8413 /* Initialize the first time around. */
8414 if (folded_dep
== NULL_TREE
)
8417 folded_deps
[0] = offset
;
8420 folded_deps
[0] = wi::gcd (folded_deps
[0],
8424 /* Calculate minimum for the remaining dimensions. */
8427 folded_deps
[len
+ i
- 1] = offset
;
8428 if (folded_dep
== c
)
8429 folded_deps
[i
] = offset
;
8430 else if (maybe_lexically_later
8431 && !wi::eq_p (folded_deps
[i
], offset
))
8433 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
8437 for (j
= 1; j
<= i
; j
++)
8438 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
8441 maybe_lexically_later
= false;
8445 gcc_assert (i
== len
);
8449 next_ordered_clause
:
8451 *list_p
= OMP_CLAUSE_CHAIN (c
);
8453 list_p
= &OMP_CLAUSE_CHAIN (c
);
8459 folded_deps
[0] = -folded_deps
[0];
8461 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
8462 if (POINTER_TYPE_P (itype
))
8465 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
8466 = wide_int_to_tree (itype
, folded_deps
[0]);
8467 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
8468 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
8471 lower_omp_ordered_ret
:
8473 /* Ordered without clauses is #pragma omp threads, while we want
8474 a nop instead if we remove all clauses. */
8475 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
8476 gsi_replace (gsi_p
, gimple_build_nop (), true);
8480 /* Expand code for an OpenMP ordered directive. */
8483 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8486 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
8487 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
8490 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8492 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8495 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
8496 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8497 OMP_CLAUSE_THREADS
);
8499 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8502 /* FIXME: This is needs to be moved to the expansion to verify various
8503 conditions only testable on cfg with dominators computed, and also
8504 all the depend clauses to be merged still might need to be available
8505 for the runtime checks. */
8507 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
8511 push_gimplify_context ();
8513 block
= make_node (BLOCK
);
8514 bind
= gimple_build_bind (NULL
, NULL
, block
);
8515 gsi_replace (gsi_p
, bind
, true);
8516 gimple_bind_add_stmt (bind
, stmt
);
8520 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
8521 build_int_cst (NULL_TREE
, threads
));
8522 cfun
->has_simduid_loops
= true;
8525 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
8527 gimple_bind_add_stmt (bind
, x
);
8529 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
8532 counter
= create_tmp_var (integer_type_node
);
8533 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
8534 gimple_call_set_lhs (g
, counter
);
8535 gimple_bind_add_stmt (bind
, g
);
8537 body
= create_artificial_label (UNKNOWN_LOCATION
);
8538 test
= create_artificial_label (UNKNOWN_LOCATION
);
8539 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
8541 tree simt_pred
= create_tmp_var (integer_type_node
);
8542 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
8543 gimple_call_set_lhs (g
, simt_pred
);
8544 gimple_bind_add_stmt (bind
, g
);
8546 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
8547 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
8548 gimple_bind_add_stmt (bind
, g
);
8550 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
8552 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8553 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8554 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8555 gimple_omp_set_body (stmt
, NULL
);
8559 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
8560 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
8561 gimple_bind_add_stmt (bind
, g
);
8563 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
8564 tree nonneg
= create_tmp_var (integer_type_node
);
8565 gimple_seq tseq
= NULL
;
8566 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
8567 gimple_bind_add_seq (bind
, tseq
);
8569 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
8570 gimple_call_set_lhs (g
, nonneg
);
8571 gimple_bind_add_stmt (bind
, g
);
8573 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
8574 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
8575 gimple_bind_add_stmt (bind
, g
);
8577 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
8580 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
8581 build_int_cst (NULL_TREE
, threads
));
8583 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
8585 gimple_bind_add_stmt (bind
, x
);
8587 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8589 pop_gimplify_context (bind
);
8591 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8592 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8596 /* Expand code for an OpenMP scan directive and the structured block
8597 before the scan directive. */
8600 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8602 gimple
*stmt
= gsi_stmt (*gsi_p
);
8604 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
8605 tree lane
= NULL_TREE
;
8606 gimple_seq before
= NULL
;
8607 omp_context
*octx
= ctx
->outer
;
8609 if (octx
->scan_exclusive
&& !has_clauses
)
8611 gimple_stmt_iterator gsi2
= *gsi_p
;
8613 gimple
*stmt2
= gsi_stmt (gsi2
);
8614 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8615 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8616 the one with exclusive clause(s), comes first. */
8618 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
8619 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
8621 gsi_remove (gsi_p
, false);
8622 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
8623 ctx
= maybe_lookup_ctx (stmt2
);
8625 lower_omp_scan (gsi_p
, ctx
);
8630 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
8631 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
8632 && (gimple_omp_for_kind (octx
->stmt
) & GF_OMP_FOR_SIMD
)
8633 && !gimple_omp_for_combined_into_p (octx
->stmt
));
8634 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
8635 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
8636 && !gimple_omp_for_combined_p (octx
->stmt
));
8638 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
8639 OMP_CLAUSE__SIMDUID_
))
8641 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
8642 lane
= create_tmp_var (unsigned_type_node
);
8643 tree t
= build_int_cst (integer_type_node
,
8645 : octx
->scan_inclusive
? 2 : 3);
8647 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
8648 gimple_call_set_lhs (g
, lane
);
8649 gimple_seq_add_stmt (&before
, g
);
8652 if (is_simd
|| is_for
)
8654 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
8655 c
; c
= OMP_CLAUSE_CHAIN (c
))
8656 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
8657 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
8659 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8660 tree var
= OMP_CLAUSE_DECL (c
);
8661 tree new_var
= lookup_decl (var
, octx
);
8663 tree var2
= NULL_TREE
;
8664 tree var3
= NULL_TREE
;
8665 tree var4
= NULL_TREE
;
8666 tree lane0
= NULL_TREE
;
8667 tree new_vard
= new_var
;
8668 if (omp_is_reference (var
))
8670 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
8673 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
8675 val
= DECL_VALUE_EXPR (new_vard
);
8676 if (new_vard
!= new_var
)
8678 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
8679 val
= TREE_OPERAND (val
, 0);
8681 if (TREE_CODE (val
) == ARRAY_REF
8682 && VAR_P (TREE_OPERAND (val
, 0)))
8684 tree v
= TREE_OPERAND (val
, 0);
8685 if (lookup_attribute ("omp simd array",
8686 DECL_ATTRIBUTES (v
)))
8688 val
= unshare_expr (val
);
8689 lane0
= TREE_OPERAND (val
, 1);
8690 TREE_OPERAND (val
, 1) = lane
;
8691 var2
= lookup_decl (v
, octx
);
8692 if (octx
->scan_exclusive
)
8693 var4
= lookup_decl (var2
, octx
);
8695 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8696 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
8699 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
8700 var2
, lane
, NULL_TREE
, NULL_TREE
);
8701 TREE_THIS_NOTRAP (var2
) = 1;
8702 if (octx
->scan_exclusive
)
8704 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
8705 var4
, lane
, NULL_TREE
,
8707 TREE_THIS_NOTRAP (var4
) = 1;
8718 var2
= build_outer_var_ref (var
, octx
);
8719 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8721 var3
= maybe_lookup_decl (new_vard
, octx
);
8722 if (var3
== new_vard
|| var3
== NULL_TREE
)
8724 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
8726 var4
= maybe_lookup_decl (var3
, octx
);
8727 if (var4
== var3
|| var4
== NULL_TREE
)
8729 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
8740 && octx
->scan_exclusive
8742 && var4
== NULL_TREE
)
8743 var4
= create_tmp_var (TREE_TYPE (val
));
8745 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8747 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8752 /* If we've added a separate identity element
8753 variable, copy it over into val. */
8754 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
8756 gimplify_and_add (x
, &before
);
8758 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
8760 /* Otherwise, assign to it the identity element. */
8761 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
8763 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
8764 tree ref
= build_outer_var_ref (var
, octx
);
8765 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
8766 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
8769 if (new_vard
!= new_var
)
8770 val
= build_fold_addr_expr_loc (clause_loc
, val
);
8771 SET_DECL_VALUE_EXPR (new_vard
, val
);
8773 SET_DECL_VALUE_EXPR (placeholder
, ref
);
8774 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8775 lower_omp (&tseq
, octx
);
8777 SET_DECL_VALUE_EXPR (new_vard
, x
);
8778 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
8779 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
8780 gimple_seq_add_seq (&before
, tseq
);
8782 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
8788 if (octx
->scan_exclusive
)
8790 tree v4
= unshare_expr (var4
);
8791 tree v2
= unshare_expr (var2
);
8792 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
8793 gimplify_and_add (x
, &before
);
8795 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
8796 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
8797 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
8799 if (x
&& new_vard
!= new_var
)
8800 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
8802 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
8803 SET_DECL_VALUE_EXPR (placeholder
, var2
);
8804 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8805 lower_omp (&tseq
, octx
);
8806 gimple_seq_add_seq (&before
, tseq
);
8807 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8809 SET_DECL_VALUE_EXPR (new_vard
, x
);
8810 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
8811 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
8812 if (octx
->scan_inclusive
)
8814 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
8816 gimplify_and_add (x
, &before
);
8818 else if (lane0
== NULL_TREE
)
8820 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
8822 gimplify_and_add (x
, &before
);
8830 /* input phase. Set val to initializer before
8832 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
8833 gimplify_assign (val
, x
, &before
);
8838 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
8839 if (code
== MINUS_EXPR
)
8842 tree x
= build2 (code
, TREE_TYPE (var2
),
8843 unshare_expr (var2
), unshare_expr (val
));
8844 if (octx
->scan_inclusive
)
8846 gimplify_assign (unshare_expr (var2
), x
, &before
);
8847 gimplify_assign (val
, var2
, &before
);
8851 gimplify_assign (unshare_expr (var4
),
8852 unshare_expr (var2
), &before
);
8853 gimplify_assign (var2
, x
, &before
);
8854 if (lane0
== NULL_TREE
)
8855 gimplify_assign (val
, var4
, &before
);
8859 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
8861 tree vexpr
= unshare_expr (var4
);
8862 TREE_OPERAND (vexpr
, 1) = lane0
;
8863 if (new_vard
!= new_var
)
8864 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
8865 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
8869 else if (has_clauses
)
8870 sorry_at (gimple_location (stmt
),
8871 "%<#pragma omp scan%> not supported yet");
8874 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
8875 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
8876 gsi_replace (gsi_p
, gimple_build_nop (), true);
8879 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
8882 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (stmt
));
8883 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
8888 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
8889 substitution of a couple of function calls. But in the NAMED case,
8890 requires that languages coordinate a symbol name. It is therefore
8891 best put here in common code. */
8893 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
8896 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8899 tree name
, lock
, unlock
;
8900 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
8902 location_t loc
= gimple_location (stmt
);
8905 name
= gimple_omp_critical_name (stmt
);
8910 if (!critical_name_mutexes
)
8911 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
8913 tree
*n
= critical_name_mutexes
->get (name
);
8918 decl
= create_tmp_var_raw (ptr_type_node
);
8920 new_str
= ACONCAT ((".gomp_critical_user_",
8921 IDENTIFIER_POINTER (name
), NULL
));
8922 DECL_NAME (decl
) = get_identifier (new_str
);
8923 TREE_PUBLIC (decl
) = 1;
8924 TREE_STATIC (decl
) = 1;
8925 DECL_COMMON (decl
) = 1;
8926 DECL_ARTIFICIAL (decl
) = 1;
8927 DECL_IGNORED_P (decl
) = 1;
8929 varpool_node::finalize_decl (decl
);
8931 critical_name_mutexes
->put (name
, decl
);
8936 /* If '#pragma omp critical' is inside offloaded region or
8937 inside function marked as offloadable, the symbol must be
8938 marked as offloadable too. */
8940 if (cgraph_node::get (current_function_decl
)->offloadable
)
8941 varpool_node::get_create (decl
)->offloadable
= 1;
8943 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
8944 if (is_gimple_omp_offloaded (octx
->stmt
))
8946 varpool_node::get_create (decl
)->offloadable
= 1;
8950 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
8951 lock
= build_call_expr_loc (loc
, lock
, 1,
8952 build_fold_addr_expr_loc (loc
, decl
));
8954 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
8955 unlock
= build_call_expr_loc (loc
, unlock
, 1,
8956 build_fold_addr_expr_loc (loc
, decl
));
8960 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
8961 lock
= build_call_expr_loc (loc
, lock
, 0);
8963 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
8964 unlock
= build_call_expr_loc (loc
, unlock
, 0);
8967 push_gimplify_context ();
8969 block
= make_node (BLOCK
);
8970 bind
= gimple_build_bind (NULL
, NULL
, block
);
8971 gsi_replace (gsi_p
, bind
, true);
8972 gimple_bind_add_stmt (bind
, stmt
);
8974 tbody
= gimple_bind_body (bind
);
8975 gimplify_and_add (lock
, &tbody
);
8976 gimple_bind_set_body (bind
, tbody
);
8978 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8979 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8980 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8981 gimple_omp_set_body (stmt
, NULL
);
8983 tbody
= gimple_bind_body (bind
);
8984 gimplify_and_add (unlock
, &tbody
);
8985 gimple_bind_set_body (bind
, tbody
);
8987 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8989 pop_gimplify_context (bind
);
8990 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8991 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8994 /* A subroutine of lower_omp_for. Generate code to emit the predicate
8995 for a lastprivate clause. Given a loop control predicate of (V
8996 cond N2), we gate the clause on (!(V cond N2)). The lowered form
8997 is appended to *DLIST, iterator initialization is appended to
8998 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
8999 to be emitted in a critical section. */
9002 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
9003 gimple_seq
*dlist
, gimple_seq
*clist
,
9004 struct omp_context
*ctx
)
9006 tree clauses
, cond
, vinit
;
9007 enum tree_code cond_code
;
9010 cond_code
= fd
->loop
.cond_code
;
9011 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
9013 /* When possible, use a strict equality expression. This can let VRP
9014 type optimizations deduce the value and remove a copy. */
9015 if (tree_fits_shwi_p (fd
->loop
.step
))
9017 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
9018 if (step
== 1 || step
== -1)
9019 cond_code
= EQ_EXPR
;
9022 if (gimple_omp_for_kind (fd
->for_stmt
) == GF_OMP_FOR_KIND_GRID_LOOP
9023 || gimple_omp_for_grid_phony (fd
->for_stmt
))
9024 cond
= omp_grid_lastprivate_predicate (fd
);
9027 tree n2
= fd
->loop
.n2
;
9028 if (fd
->collapse
> 1
9029 && TREE_CODE (n2
) != INTEGER_CST
9030 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
9032 struct omp_context
*taskreg_ctx
= NULL
;
9033 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
9035 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
9036 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
9037 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
9039 if (gimple_omp_for_combined_into_p (gfor
))
9041 gcc_assert (ctx
->outer
->outer
9042 && is_parallel_ctx (ctx
->outer
->outer
));
9043 taskreg_ctx
= ctx
->outer
->outer
;
9047 struct omp_for_data outer_fd
;
9048 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
9049 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
9052 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
9053 taskreg_ctx
= ctx
->outer
->outer
;
9055 else if (is_taskreg_ctx (ctx
->outer
))
9056 taskreg_ctx
= ctx
->outer
;
9060 tree taskreg_clauses
9061 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
9062 tree innerc
= omp_find_clause (taskreg_clauses
,
9063 OMP_CLAUSE__LOOPTEMP_
);
9064 gcc_assert (innerc
);
9065 for (i
= 0; i
< fd
->collapse
; i
++)
9067 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9068 OMP_CLAUSE__LOOPTEMP_
);
9069 gcc_assert (innerc
);
9071 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9072 OMP_CLAUSE__LOOPTEMP_
);
9074 n2
= fold_convert (TREE_TYPE (n2
),
9075 lookup_decl (OMP_CLAUSE_DECL (innerc
),
9079 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
9082 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
9084 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
9085 if (!gimple_seq_empty_p (stmts
))
9087 gimple_seq_add_seq (&stmts
, *dlist
);
9090 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9091 vinit
= fd
->loop
.n1
;
9092 if (cond_code
== EQ_EXPR
9093 && tree_fits_shwi_p (fd
->loop
.n2
)
9094 && ! integer_zerop (fd
->loop
.n2
))
9095 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
9097 vinit
= unshare_expr (vinit
);
9099 /* Initialize the iterator variable, so that threads that don't execute
9100 any iterations don't execute the lastprivate clauses by accident. */
9101 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
9105 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9108 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9109 struct walk_stmt_info
*wi
)
9111 gimple
*stmt
= gsi_stmt (*gsi_p
);
9113 *handled_ops_p
= true;
9114 switch (gimple_code (stmt
))
9118 case GIMPLE_OMP_SCAN
:
9119 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
9120 return integer_zero_node
;
9127 /* Helper function for lower_omp_for, add transformations for a worksharing
9128 loop with scan directives inside of it.
9129 For worksharing loop not combined with simd, transform:
9130 #pragma omp for reduction(inscan,+:r) private(i)
9131 for (i = 0; i < n; i = i + 1)
9136 #pragma omp scan inclusive(r)
9142 into two worksharing loops + code to merge results:
9144 num_threads = omp_get_num_threads ();
9145 thread_num = omp_get_thread_num ();
9146 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9151 // For UDRs this is UDR init, or if ctors are needed, copy from
9152 // var3 that has been constructed to contain the neutral element.
9156 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9157 // a shared array with num_threads elements and rprivb to a local array
9158 // number of elements equal to the number of (contiguous) iterations the
9159 // current thread will perform. controlb and controlp variables are
9160 // temporaries to handle deallocation of rprivb at the end of second
9162 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9163 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9164 for (i = 0; i < n; i = i + 1)
9167 // For UDRs this is UDR init or copy from var3.
9169 // This is the input phase from user code.
9173 // For UDRs this is UDR merge.
9175 // Rather than handing it over to the user, save to local thread's
9177 rprivb[ivar] = var2;
9178 // For exclusive scan, the above two statements are swapped.
9182 // And remember the final value from this thread's into the shared
9184 rpriva[(sizetype) thread_num] = var2;
9185 // If more than one thread, compute using Work-Efficient prefix sum
9186 // the inclusive parallel scan of the rpriva array.
9187 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9192 num_threadsu = (unsigned int) num_threads;
9193 thread_numup1 = (unsigned int) thread_num + 1;
9196 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9200 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9205 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9206 mul = REALPART_EXPR <cplx>;
9207 ovf = IMAGPART_EXPR <cplx>;
9208 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9211 andvm1 = andv + 4294967295;
9213 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9215 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9216 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9217 rpriva[l] = rpriva[l - k] + rpriva[l];
9219 if (down == 0) goto <D.2121>; else goto <D.2122>;
9227 if (k != 0) goto <D.2108>; else goto <D.2103>;
9229 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9231 // For UDRs this is UDR init or copy from var3.
9235 var2 = rpriva[thread_num - 1];
9238 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9239 reduction(inscan,+:r) private(i)
9240 for (i = 0; i < n; i = i + 1)
9243 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9244 r = var2 + rprivb[ivar];
9247 // This is the scan phase from user code.
9249 // Plus a bump of the iterator.
9255 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
9256 struct omp_for_data
*fd
, omp_context
*ctx
)
9258 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
9260 gimple_seq body
= gimple_omp_body (stmt
);
9261 gimple_stmt_iterator input1_gsi
= gsi_none ();
9262 struct walk_stmt_info wi
;
9263 memset (&wi
, 0, sizeof (wi
));
9265 wi
.info
= (void *) &input1_gsi
;
9266 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
9267 gcc_assert (!gsi_end_p (input1_gsi
));
9269 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
9270 gimple_stmt_iterator gsi
= input1_gsi
;
9272 gimple_stmt_iterator scan1_gsi
= gsi
;
9273 gimple
*scan_stmt1
= gsi_stmt (gsi
);
9274 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
9276 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
9277 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
9278 gimple_omp_set_body (input_stmt1
, NULL
);
9279 gimple_omp_set_body (scan_stmt1
, NULL
);
9280 gimple_omp_set_body (stmt
, NULL
);
9282 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
9283 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
9284 gimple_omp_set_body (stmt
, body
);
9285 gimple_omp_set_body (input_stmt1
, input_body
);
9287 gimple_stmt_iterator input2_gsi
= gsi_none ();
9288 memset (&wi
, 0, sizeof (wi
));
9290 wi
.info
= (void *) &input2_gsi
;
9291 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
9292 gcc_assert (!gsi_end_p (input2_gsi
));
9294 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
9297 gimple_stmt_iterator scan2_gsi
= gsi
;
9298 gimple
*scan_stmt2
= gsi_stmt (gsi
);
9299 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
9300 gimple_omp_set_body (scan_stmt2
, scan_body
);
9302 tree num_threads
= create_tmp_var (integer_type_node
);
9303 tree thread_num
= create_tmp_var (integer_type_node
);
9304 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9305 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9306 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
9307 gimple_call_set_lhs (g
, num_threads
);
9308 gimple_seq_add_stmt (body_p
, g
);
9309 g
= gimple_build_call (threadnum_decl
, 0);
9310 gimple_call_set_lhs (g
, thread_num
);
9311 gimple_seq_add_stmt (body_p
, g
);
9313 tree ivar
= create_tmp_var (sizetype
);
9314 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
9315 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
9316 tree k
= create_tmp_var (unsigned_type_node
);
9317 tree l
= create_tmp_var (unsigned_type_node
);
9319 gimple_seq clist
= NULL
, mdlist
= NULL
;
9320 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
9321 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
9322 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
9323 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
9324 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9325 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9326 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9328 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9329 tree var
= OMP_CLAUSE_DECL (c
);
9330 tree new_var
= lookup_decl (var
, ctx
);
9331 tree var3
= NULL_TREE
;
9332 tree new_vard
= new_var
;
9333 if (omp_is_reference (var
))
9334 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9335 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9337 var3
= maybe_lookup_decl (new_vard
, ctx
);
9338 if (var3
== new_vard
)
9342 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
9343 tree rpriva
= create_tmp_var (ptype
);
9344 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9345 OMP_CLAUSE_DECL (nc
) = rpriva
;
9347 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9349 tree rprivb
= create_tmp_var (ptype
);
9350 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9351 OMP_CLAUSE_DECL (nc
) = rprivb
;
9352 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
9354 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9356 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
9357 if (new_vard
!= new_var
)
9358 TREE_ADDRESSABLE (var2
) = 1;
9359 gimple_add_tmp_var (var2
);
9361 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
9362 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9363 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9364 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9365 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9367 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
9368 thread_num
, integer_minus_one_node
);
9369 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9370 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9371 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9372 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9373 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9375 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
9376 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9377 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9378 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9379 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9381 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
9382 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9383 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9384 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9385 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9386 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9388 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
9389 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9390 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
9391 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9393 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9395 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9398 x
= lang_hooks
.decls
.omp_clause_default_ctor
9399 (c
, var2
, build_outer_var_ref (var
, ctx
));
9401 gimplify_and_add (x
, &clist
);
9403 x
= build_outer_var_ref (var
, ctx
);
9404 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, x
);
9405 gimplify_and_add (x
, &thr01_list
);
9407 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9408 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9411 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, var3
);
9412 gimplify_and_add (x
, &thrn1_list
);
9413 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, var3
);
9414 gimplify_and_add (x
, &thr02_list
);
9416 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9418 /* Otherwise, assign to it the identity element. */
9419 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9420 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9422 if (new_vard
!= new_var
)
9423 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9424 SET_DECL_VALUE_EXPR (new_vard
, val
);
9425 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9426 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
9427 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9428 lower_omp (&tseq
, ctx
);
9429 gimple_seq_add_seq (&thrn1_list
, tseq
);
9430 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9431 lower_omp (&tseq
, ctx
);
9432 gimple_seq_add_seq (&thr02_list
, tseq
);
9433 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9434 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9435 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9437 SET_DECL_VALUE_EXPR (new_vard
, y
);
9440 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9441 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9445 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivam1_ref
);
9446 gimplify_and_add (x
, &thrn2_list
);
9448 if (ctx
->scan_exclusive
)
9450 x
= unshare_expr (rprivb_ref
);
9451 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
9452 gimplify_and_add (x
, &scan1_list
);
9455 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9456 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9457 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9458 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9459 lower_omp (&tseq
, ctx
);
9460 gimple_seq_add_seq (&scan1_list
, tseq
);
9462 if (ctx
->scan_inclusive
)
9464 x
= unshare_expr (rprivb_ref
);
9465 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
9466 gimplify_and_add (x
, &scan1_list
);
9469 x
= unshare_expr (rpriva_ref
);
9470 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
9471 gimplify_and_add (x
, &mdlist
);
9473 x
= unshare_expr (new_var
);
9474 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
9475 gimplify_and_add (x
, &input2_list
);
9478 if (new_vard
!= new_var
)
9479 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9481 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9482 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9483 SET_DECL_VALUE_EXPR (new_vard
, val
);
9484 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9485 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9486 lower_omp (&tseq
, ctx
);
9488 SET_DECL_VALUE_EXPR (new_vard
, y
);
9491 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9492 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9494 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
9495 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9496 lower_omp (&tseq
, ctx
);
9497 gimple_seq_add_seq (&input2_list
, tseq
);
9499 x
= build_outer_var_ref (var
, ctx
);
9500 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
9501 gimplify_and_add (x
, &last_list
);
9503 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
9504 gimplify_and_add (x
, &reduc_list
);
9505 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9506 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9508 if (new_vard
!= new_var
)
9509 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9510 SET_DECL_VALUE_EXPR (new_vard
, val
);
9511 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9512 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9513 lower_omp (&tseq
, ctx
);
9514 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9515 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9516 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9518 SET_DECL_VALUE_EXPR (new_vard
, y
);
9521 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9522 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9524 gimple_seq_add_seq (&reduc_list
, tseq
);
9525 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
9526 gimplify_and_add (x
, &reduc_list
);
9528 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
9530 gimplify_and_add (x
, dlist
);
9534 x
= build_outer_var_ref (var
, ctx
);
9535 gimplify_assign (var2
, x
, &thr01_list
);
9537 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
9538 gimplify_assign (var2
, unshare_expr (x
), &thrn1_list
);
9539 gimplify_assign (var2
, x
, &thr02_list
);
9541 gimplify_assign (var2
, rprivam1_ref
, &thrn2_list
);
9543 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
9544 if (code
== MINUS_EXPR
)
9547 if (ctx
->scan_exclusive
)
9548 gimplify_assign (unshare_expr (rprivb_ref
), var2
, &scan1_list
);
9549 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
9550 gimplify_assign (var2
, x
, &scan1_list
);
9551 if (ctx
->scan_inclusive
)
9552 gimplify_assign (unshare_expr (rprivb_ref
), var2
, &scan1_list
);
9554 gimplify_assign (unshare_expr (rpriva_ref
), var2
, &mdlist
);
9556 x
= build2 (code
, TREE_TYPE (new_var
), var2
, rprivb_ref
);
9557 gimplify_assign (new_var
, x
, &input2_list
);
9559 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
9562 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
9563 unshare_expr (rprival_ref
));
9564 gimplify_assign (rprival_ref
, x
, &reduc_list
);
9568 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
9569 gimple_seq_add_stmt (&scan1_list
, g
);
9570 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
9571 gimple_seq_add_stmt (gimple_omp_body_ptr (scan_stmt2
), g
);
9573 tree controlb
= create_tmp_var (boolean_type_node
);
9574 tree controlp
= create_tmp_var (ptr_type_node
);
9575 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
9576 OMP_CLAUSE_DECL (nc
) = controlb
;
9577 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
9579 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9580 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
9581 OMP_CLAUSE_DECL (nc
) = controlp
;
9582 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
9584 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9585 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
9586 OMP_CLAUSE_DECL (nc
) = controlb
;
9587 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
9589 cp2
= &OMP_CLAUSE_CHAIN (nc
);
9590 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
9591 OMP_CLAUSE_DECL (nc
) = controlp
;
9592 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
9594 cp2
= &OMP_CLAUSE_CHAIN (nc
);
9596 *cp1
= gimple_omp_for_clauses (stmt
);
9597 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
9598 *cp2
= gimple_omp_for_clauses (new_stmt
);
9599 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
9601 gimple_omp_set_body (scan_stmt1
, scan1_list
);
9602 gimple_omp_set_body (input_stmt2
, input2_list
);
9604 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
9606 gsi_remove (&input1_gsi
, true);
9607 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
9609 gsi_remove (&scan1_gsi
, true);
9610 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
9612 gsi_remove (&input2_gsi
, true);
9613 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
9615 gsi_remove (&scan2_gsi
, true);
9617 gimple_seq_add_seq (body_p
, clist
);
9619 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9620 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9621 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9622 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
9623 gimple_seq_add_stmt (body_p
, g
);
9624 g
= gimple_build_label (lab1
);
9625 gimple_seq_add_stmt (body_p
, g
);
9626 gimple_seq_add_seq (body_p
, thr01_list
);
9627 g
= gimple_build_goto (lab3
);
9628 gimple_seq_add_stmt (body_p
, g
);
9629 g
= gimple_build_label (lab2
);
9630 gimple_seq_add_stmt (body_p
, g
);
9631 gimple_seq_add_seq (body_p
, thrn1_list
);
9632 g
= gimple_build_label (lab3
);
9633 gimple_seq_add_stmt (body_p
, g
);
9635 g
= gimple_build_assign (ivar
, size_zero_node
);
9636 gimple_seq_add_stmt (body_p
, g
);
9638 gimple_seq_add_stmt (body_p
, stmt
);
9639 gimple_seq_add_seq (body_p
, body
);
9640 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
9643 g
= gimple_build_omp_return (true);
9644 gimple_seq_add_stmt (body_p
, g
);
9645 gimple_seq_add_seq (body_p
, mdlist
);
9647 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9648 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9649 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
9650 gimple_seq_add_stmt (body_p
, g
);
9651 g
= gimple_build_label (lab1
);
9652 gimple_seq_add_stmt (body_p
, g
);
9654 g
= omp_build_barrier (NULL
);
9655 gimple_seq_add_stmt (body_p
, g
);
9657 tree down
= create_tmp_var (unsigned_type_node
);
9658 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
9659 gimple_seq_add_stmt (body_p
, g
);
9661 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
9662 gimple_seq_add_stmt (body_p
, g
);
9664 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
9665 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
9666 gimple_seq_add_stmt (body_p
, g
);
9668 tree thread_numu
= create_tmp_var (unsigned_type_node
);
9669 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
9670 gimple_seq_add_stmt (body_p
, g
);
9672 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
9673 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
9674 build_int_cst (unsigned_type_node
, 1));
9675 gimple_seq_add_stmt (body_p
, g
);
9677 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9678 g
= gimple_build_label (lab3
);
9679 gimple_seq_add_stmt (body_p
, g
);
9681 tree twok
= create_tmp_var (unsigned_type_node
);
9682 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
9683 gimple_seq_add_stmt (body_p
, g
);
9685 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9686 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9687 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9688 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
9689 gimple_seq_add_stmt (body_p
, g
);
9690 g
= gimple_build_label (lab4
);
9691 gimple_seq_add_stmt (body_p
, g
);
9692 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
9693 gimple_seq_add_stmt (body_p
, g
);
9694 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
9695 gimple_seq_add_stmt (body_p
, g
);
9697 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
9698 gimple_seq_add_stmt (body_p
, g
);
9699 g
= gimple_build_label (lab6
);
9700 gimple_seq_add_stmt (body_p
, g
);
9702 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
9703 gimple_seq_add_stmt (body_p
, g
);
9705 g
= gimple_build_label (lab5
);
9706 gimple_seq_add_stmt (body_p
, g
);
9708 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
9709 gimple_seq_add_stmt (body_p
, g
);
9711 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
9712 DECL_GIMPLE_REG_P (cplx
) = 1;
9713 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
9714 gimple_call_set_lhs (g
, cplx
);
9715 gimple_seq_add_stmt (body_p
, g
);
9716 tree mul
= create_tmp_var (unsigned_type_node
);
9717 g
= gimple_build_assign (mul
, REALPART_EXPR
,
9718 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
9719 gimple_seq_add_stmt (body_p
, g
);
9720 tree ovf
= create_tmp_var (unsigned_type_node
);
9721 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
9722 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
9723 gimple_seq_add_stmt (body_p
, g
);
9725 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
9726 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
9727 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
9729 gimple_seq_add_stmt (body_p
, g
);
9730 g
= gimple_build_label (lab7
);
9731 gimple_seq_add_stmt (body_p
, g
);
9733 tree andv
= create_tmp_var (unsigned_type_node
);
9734 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
9735 gimple_seq_add_stmt (body_p
, g
);
9736 tree andvm1
= create_tmp_var (unsigned_type_node
);
9737 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
9738 build_minus_one_cst (unsigned_type_node
));
9739 gimple_seq_add_stmt (body_p
, g
);
9741 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
9742 gimple_seq_add_stmt (body_p
, g
);
9744 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
9745 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
9746 gimple_seq_add_stmt (body_p
, g
);
9747 g
= gimple_build_label (lab9
);
9748 gimple_seq_add_stmt (body_p
, g
);
9749 gimple_seq_add_seq (body_p
, reduc_list
);
9750 g
= gimple_build_label (lab8
);
9751 gimple_seq_add_stmt (body_p
, g
);
9753 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
9754 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
9755 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
9756 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
9758 gimple_seq_add_stmt (body_p
, g
);
9759 g
= gimple_build_label (lab10
);
9760 gimple_seq_add_stmt (body_p
, g
);
9761 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
9762 gimple_seq_add_stmt (body_p
, g
);
9763 g
= gimple_build_goto (lab12
);
9764 gimple_seq_add_stmt (body_p
, g
);
9765 g
= gimple_build_label (lab11
);
9766 gimple_seq_add_stmt (body_p
, g
);
9767 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
9768 gimple_seq_add_stmt (body_p
, g
);
9769 g
= gimple_build_label (lab12
);
9770 gimple_seq_add_stmt (body_p
, g
);
9772 g
= omp_build_barrier (NULL
);
9773 gimple_seq_add_stmt (body_p
, g
);
9775 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
9777 gimple_seq_add_stmt (body_p
, g
);
9779 g
= gimple_build_label (lab2
);
9780 gimple_seq_add_stmt (body_p
, g
);
9782 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9783 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9784 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9785 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
9786 gimple_seq_add_stmt (body_p
, g
);
9787 g
= gimple_build_label (lab1
);
9788 gimple_seq_add_stmt (body_p
, g
);
9789 gimple_seq_add_seq (body_p
, thr02_list
);
9790 g
= gimple_build_goto (lab3
);
9791 gimple_seq_add_stmt (body_p
, g
);
9792 g
= gimple_build_label (lab2
);
9793 gimple_seq_add_stmt (body_p
, g
);
9794 gimple_seq_add_seq (body_p
, thrn2_list
);
9795 g
= gimple_build_label (lab3
);
9796 gimple_seq_add_stmt (body_p
, g
);
9798 g
= gimple_build_assign (ivar
, size_zero_node
);
9799 gimple_seq_add_stmt (body_p
, g
);
9800 gimple_seq_add_stmt (body_p
, new_stmt
);
9801 gimple_seq_add_seq (body_p
, new_body
);
9803 gimple_seq new_dlist
= NULL
;
9804 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9805 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9806 tree num_threadsm1
= create_tmp_var (integer_type_node
);
9807 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
9808 integer_minus_one_node
);
9809 gimple_seq_add_stmt (&new_dlist
, g
);
9810 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
9811 gimple_seq_add_stmt (&new_dlist
, g
);
9812 g
= gimple_build_label (lab1
);
9813 gimple_seq_add_stmt (&new_dlist
, g
);
9814 gimple_seq_add_seq (&new_dlist
, last_list
);
9815 g
= gimple_build_label (lab2
);
9816 gimple_seq_add_stmt (&new_dlist
, g
);
9817 gimple_seq_add_seq (&new_dlist
, *dlist
);
9821 /* Lower code for an OMP loop directive. */
9824 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9827 struct omp_for_data fd
, *fdp
= NULL
;
9828 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
9830 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
9831 gimple_seq cnt_list
= NULL
, clist
= NULL
;
9832 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
9835 push_gimplify_context ();
9837 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
9839 block
= make_node (BLOCK
);
9840 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
9841 /* Replace at gsi right away, so that 'stmt' is no member
9842 of a sequence anymore as we're going to add to a different
9844 gsi_replace (gsi_p
, new_stmt
, true);
9846 /* Move declaration of temporaries in the loop body before we make
9848 omp_for_body
= gimple_omp_body (stmt
);
9849 if (!gimple_seq_empty_p (omp_for_body
)
9850 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
9853 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
9854 tree vars
= gimple_bind_vars (inner_bind
);
9855 gimple_bind_append_vars (new_stmt
, vars
);
9856 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
9857 keep them on the inner_bind and it's block. */
9858 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
9859 if (gimple_bind_block (inner_bind
))
9860 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
9863 if (gimple_omp_for_combined_into_p (stmt
))
9865 omp_extract_for_data (stmt
, &fd
, NULL
);
9868 /* We need two temporaries with fd.loop.v type (istart/iend)
9869 and then (fd.collapse - 1) temporaries with the same
9870 type for count2 ... countN-1 vars if not constant. */
9872 tree type
= fd
.iter_type
;
9874 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
9875 count
+= fd
.collapse
- 1;
9877 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
9878 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
9879 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
9884 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
9885 OMP_CLAUSE__LOOPTEMP_
);
9887 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
9888 OMP_CLAUSE__LOOPTEMP_
);
9889 for (i
= 0; i
< count
; i
++)
9894 gcc_assert (outerc
);
9895 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
9896 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
9897 OMP_CLAUSE__LOOPTEMP_
);
9901 /* If there are 2 adjacent SIMD stmts, one with _simt_
9902 clause, another without, make sure they have the same
9903 decls in _looptemp_ clauses, because the outer stmt
9904 they are combined into will look up just one inner_stmt. */
9906 temp
= OMP_CLAUSE_DECL (simtc
);
9908 temp
= create_tmp_var (type
);
9909 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
9911 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
9912 OMP_CLAUSE_DECL (*pc
) = temp
;
9913 pc
= &OMP_CLAUSE_CHAIN (*pc
);
9915 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
9916 OMP_CLAUSE__LOOPTEMP_
);
9921 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
9925 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
9926 OMP_CLAUSE_REDUCTION
);
9927 tree rtmp
= NULL_TREE
;
9930 tree type
= build_pointer_type (pointer_sized_int_node
);
9931 tree temp
= create_tmp_var (type
);
9932 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
9933 OMP_CLAUSE_DECL (c
) = temp
;
9934 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
9935 gimple_omp_for_set_clauses (stmt
, c
);
9936 lower_omp_task_reductions (ctx
, OMP_FOR
,
9937 gimple_omp_for_clauses (stmt
),
9938 &tred_ilist
, &tred_dlist
);
9940 rtmp
= make_ssa_name (type
);
9941 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
9944 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
9947 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
9949 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
9950 gimple_omp_for_pre_body (stmt
));
9952 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9954 /* Lower the header expressions. At this point, we can assume that
9955 the header is of the form:
9957 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
9959 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
9960 using the .omp_data_s mapping, if needed. */
9961 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
9963 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
9964 if (!is_gimple_min_invariant (*rhs_p
))
9965 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
9966 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
9967 recompute_tree_invariant_for_addr_expr (*rhs_p
);
9969 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
9970 if (!is_gimple_min_invariant (*rhs_p
))
9971 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
9972 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
9973 recompute_tree_invariant_for_addr_expr (*rhs_p
);
9975 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
9976 if (!is_gimple_min_invariant (*rhs_p
))
9977 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
9980 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
9982 gimple_seq_add_seq (&body
, cnt_list
);
9984 /* Once lowered, extract the bounds and clauses. */
9985 omp_extract_for_data (stmt
, &fd
, NULL
);
9987 if (is_gimple_omp_oacc (ctx
->stmt
)
9988 && !ctx_in_oacc_kernels_region (ctx
))
9989 lower_oacc_head_tail (gimple_location (stmt
),
9990 gimple_omp_for_clauses (stmt
),
9991 &oacc_head
, &oacc_tail
, ctx
);
9993 /* Add OpenACC partitioning and reduction markers just before the loop. */
9995 gimple_seq_add_seq (&body
, oacc_head
);
9997 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
9999 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10000 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10001 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10002 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10004 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
10005 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
10006 OMP_CLAUSE_LINEAR_STEP (c
)
10007 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
10011 bool phony_loop
= (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
10012 && gimple_omp_for_grid_phony (stmt
));
10013 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
10014 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10016 gcc_assert (!phony_loop
);
10017 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
10022 gimple_seq_add_stmt (&body
, stmt
);
10023 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
10027 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
10030 /* After the loop, add exit clauses. */
10031 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
10035 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
10036 gcall
*g
= gimple_build_call (fndecl
, 0);
10037 gimple_seq_add_stmt (&body
, g
);
10038 gimple_seq_add_seq (&body
, clist
);
10039 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
10040 g
= gimple_build_call (fndecl
, 0);
10041 gimple_seq_add_stmt (&body
, g
);
10044 if (ctx
->cancellable
)
10045 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
10047 gimple_seq_add_seq (&body
, dlist
);
10051 gimple_seq_add_seq (&tred_ilist
, body
);
10055 body
= maybe_catch_exception (body
);
10059 /* Region exit marker goes at the end of the loop body. */
10060 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
10061 gimple_seq_add_stmt (&body
, g
);
10063 gimple_seq_add_seq (&body
, tred_dlist
);
10065 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
10068 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
10071 /* Add OpenACC joining and reduction markers just after the loop. */
10073 gimple_seq_add_seq (&body
, oacc_tail
);
10075 pop_gimplify_context (new_stmt
);
10077 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
10078 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
10079 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
10080 if (BLOCK_VARS (block
))
10081 TREE_USED (block
) = 1;
10083 gimple_bind_set_body (new_stmt
, body
);
10084 gimple_omp_set_body (stmt
, NULL
);
10085 gimple_omp_for_set_pre_body (stmt
, NULL
);
10088 /* Callback for walk_stmts. Check if the current statement only contains
10089 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10092 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
10093 bool *handled_ops_p
,
10094 struct walk_stmt_info
*wi
)
10096 int *info
= (int *) wi
->info
;
10097 gimple
*stmt
= gsi_stmt (*gsi_p
);
10099 *handled_ops_p
= true;
10100 switch (gimple_code (stmt
))
10106 case GIMPLE_OMP_FOR
:
10107 case GIMPLE_OMP_SECTIONS
:
10108 *info
= *info
== 0 ? 1 : -1;
10117 struct omp_taskcopy_context
10119 /* This field must be at the beginning, as we do "inheritance": Some
10120 callback functions for tree-inline.c (e.g., omp_copy_decl)
10121 receive a copy_body_data pointer that is up-casted to an
10122 omp_context pointer. */
10128 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
10130 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
10132 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
10133 return create_tmp_var (TREE_TYPE (var
));
10139 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
10141 tree name
, new_fields
= NULL
, type
, f
;
10143 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
10144 name
= DECL_NAME (TYPE_NAME (orig_type
));
10145 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
10146 TYPE_DECL
, name
, type
);
10147 TYPE_NAME (type
) = name
;
10149 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
10151 tree new_f
= copy_node (f
);
10152 DECL_CONTEXT (new_f
) = type
;
10153 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
10154 TREE_CHAIN (new_f
) = new_fields
;
10155 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10156 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10157 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
10159 new_fields
= new_f
;
10160 tcctx
->cb
.decl_map
->put (f
, new_f
);
10162 TYPE_FIELDS (type
) = nreverse (new_fields
);
10163 layout_type (type
);
10167 /* Create task copyfn. */
10170 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
10172 struct function
*child_cfun
;
10173 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
10174 tree record_type
, srecord_type
, bind
, list
;
10175 bool record_needs_remap
= false, srecord_needs_remap
= false;
10177 struct omp_taskcopy_context tcctx
;
10178 location_t loc
= gimple_location (task_stmt
);
10179 size_t looptempno
= 0;
10181 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
10182 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
10183 gcc_assert (child_cfun
->cfg
== NULL
);
10184 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
10186 /* Reset DECL_CONTEXT on function arguments. */
10187 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
10188 DECL_CONTEXT (t
) = child_fn
;
10190 /* Populate the function. */
10191 push_gimplify_context ();
10192 push_cfun (child_cfun
);
10194 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
10195 TREE_SIDE_EFFECTS (bind
) = 1;
10197 DECL_SAVED_TREE (child_fn
) = bind
;
10198 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
10200 /* Remap src and dst argument types if needed. */
10201 record_type
= ctx
->record_type
;
10202 srecord_type
= ctx
->srecord_type
;
10203 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
10204 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10206 record_needs_remap
= true;
10209 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
10210 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10212 srecord_needs_remap
= true;
10216 if (record_needs_remap
|| srecord_needs_remap
)
10218 memset (&tcctx
, '\0', sizeof (tcctx
));
10219 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
10220 tcctx
.cb
.dst_fn
= child_fn
;
10221 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
10222 gcc_checking_assert (tcctx
.cb
.src_node
);
10223 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
10224 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
10225 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
10226 tcctx
.cb
.eh_lp_nr
= 0;
10227 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
10228 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
10231 if (record_needs_remap
)
10232 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
10233 if (srecord_needs_remap
)
10234 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
10237 tcctx
.cb
.decl_map
= NULL
;
10239 arg
= DECL_ARGUMENTS (child_fn
);
10240 TREE_TYPE (arg
) = build_pointer_type (record_type
);
10241 sarg
= DECL_CHAIN (arg
);
10242 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
10244 /* First pass: initialize temporaries used in record_type and srecord_type
10245 sizes and field offsets. */
10246 if (tcctx
.cb
.decl_map
)
10247 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10248 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10252 decl
= OMP_CLAUSE_DECL (c
);
10253 p
= tcctx
.cb
.decl_map
->get (decl
);
10256 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10257 sf
= (tree
) n
->value
;
10258 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10259 src
= build_simple_mem_ref_loc (loc
, sarg
);
10260 src
= omp_build_component_ref (src
, sf
);
10261 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
10262 append_to_statement_list (t
, &list
);
10265 /* Second pass: copy shared var pointers and copy construct non-VLA
10266 firstprivate vars. */
10267 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10268 switch (OMP_CLAUSE_CODE (c
))
10270 splay_tree_key key
;
10271 case OMP_CLAUSE_SHARED
:
10272 decl
= OMP_CLAUSE_DECL (c
);
10273 key
= (splay_tree_key
) decl
;
10274 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
10275 key
= (splay_tree_key
) &DECL_UID (decl
);
10276 n
= splay_tree_lookup (ctx
->field_map
, key
);
10279 f
= (tree
) n
->value
;
10280 if (tcctx
.cb
.decl_map
)
10281 f
= *tcctx
.cb
.decl_map
->get (f
);
10282 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10283 sf
= (tree
) n
->value
;
10284 if (tcctx
.cb
.decl_map
)
10285 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10286 src
= build_simple_mem_ref_loc (loc
, sarg
);
10287 src
= omp_build_component_ref (src
, sf
);
10288 dst
= build_simple_mem_ref_loc (loc
, arg
);
10289 dst
= omp_build_component_ref (dst
, f
);
10290 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10291 append_to_statement_list (t
, &list
);
10293 case OMP_CLAUSE_REDUCTION
:
10294 case OMP_CLAUSE_IN_REDUCTION
:
10295 decl
= OMP_CLAUSE_DECL (c
);
10296 if (TREE_CODE (decl
) == MEM_REF
)
10298 decl
= TREE_OPERAND (decl
, 0);
10299 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
10300 decl
= TREE_OPERAND (decl
, 0);
10301 if (TREE_CODE (decl
) == INDIRECT_REF
10302 || TREE_CODE (decl
) == ADDR_EXPR
)
10303 decl
= TREE_OPERAND (decl
, 0);
10305 key
= (splay_tree_key
) decl
;
10306 n
= splay_tree_lookup (ctx
->field_map
, key
);
10309 f
= (tree
) n
->value
;
10310 if (tcctx
.cb
.decl_map
)
10311 f
= *tcctx
.cb
.decl_map
->get (f
);
10312 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10313 sf
= (tree
) n
->value
;
10314 if (tcctx
.cb
.decl_map
)
10315 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10316 src
= build_simple_mem_ref_loc (loc
, sarg
);
10317 src
= omp_build_component_ref (src
, sf
);
10318 if (decl
!= OMP_CLAUSE_DECL (c
)
10319 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10320 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
10321 src
= build_simple_mem_ref_loc (loc
, src
);
10322 dst
= build_simple_mem_ref_loc (loc
, arg
);
10323 dst
= omp_build_component_ref (dst
, f
);
10324 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10325 append_to_statement_list (t
, &list
);
10327 case OMP_CLAUSE__LOOPTEMP_
:
10328 /* Fields for first two _looptemp_ clauses are initialized by
10329 GOMP_taskloop*, the rest are handled like firstprivate. */
10330 if (looptempno
< 2)
10336 case OMP_CLAUSE__REDUCTEMP_
:
10337 case OMP_CLAUSE_FIRSTPRIVATE
:
10338 decl
= OMP_CLAUSE_DECL (c
);
10339 if (is_variable_sized (decl
))
10341 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10344 f
= (tree
) n
->value
;
10345 if (tcctx
.cb
.decl_map
)
10346 f
= *tcctx
.cb
.decl_map
->get (f
);
10347 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10350 sf
= (tree
) n
->value
;
10351 if (tcctx
.cb
.decl_map
)
10352 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10353 src
= build_simple_mem_ref_loc (loc
, sarg
);
10354 src
= omp_build_component_ref (src
, sf
);
10355 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
10356 src
= build_simple_mem_ref_loc (loc
, src
);
10360 dst
= build_simple_mem_ref_loc (loc
, arg
);
10361 dst
= omp_build_component_ref (dst
, f
);
10362 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
10363 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10365 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
10366 append_to_statement_list (t
, &list
);
10368 case OMP_CLAUSE_PRIVATE
:
10369 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
10371 decl
= OMP_CLAUSE_DECL (c
);
10372 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10373 f
= (tree
) n
->value
;
10374 if (tcctx
.cb
.decl_map
)
10375 f
= *tcctx
.cb
.decl_map
->get (f
);
10376 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10379 sf
= (tree
) n
->value
;
10380 if (tcctx
.cb
.decl_map
)
10381 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10382 src
= build_simple_mem_ref_loc (loc
, sarg
);
10383 src
= omp_build_component_ref (src
, sf
);
10384 if (use_pointer_for_field (decl
, NULL
))
10385 src
= build_simple_mem_ref_loc (loc
, src
);
10389 dst
= build_simple_mem_ref_loc (loc
, arg
);
10390 dst
= omp_build_component_ref (dst
, f
);
10391 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10392 append_to_statement_list (t
, &list
);
10398 /* Last pass: handle VLA firstprivates. */
10399 if (tcctx
.cb
.decl_map
)
10400 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10401 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10405 decl
= OMP_CLAUSE_DECL (c
);
10406 if (!is_variable_sized (decl
))
10408 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10411 f
= (tree
) n
->value
;
10412 f
= *tcctx
.cb
.decl_map
->get (f
);
10413 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
10414 ind
= DECL_VALUE_EXPR (decl
);
10415 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
10416 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
10417 n
= splay_tree_lookup (ctx
->sfield_map
,
10418 (splay_tree_key
) TREE_OPERAND (ind
, 0));
10419 sf
= (tree
) n
->value
;
10420 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10421 src
= build_simple_mem_ref_loc (loc
, sarg
);
10422 src
= omp_build_component_ref (src
, sf
);
10423 src
= build_simple_mem_ref_loc (loc
, src
);
10424 dst
= build_simple_mem_ref_loc (loc
, arg
);
10425 dst
= omp_build_component_ref (dst
, f
);
10426 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
10427 append_to_statement_list (t
, &list
);
10428 n
= splay_tree_lookup (ctx
->field_map
,
10429 (splay_tree_key
) TREE_OPERAND (ind
, 0));
10430 df
= (tree
) n
->value
;
10431 df
= *tcctx
.cb
.decl_map
->get (df
);
10432 ptr
= build_simple_mem_ref_loc (loc
, arg
);
10433 ptr
= omp_build_component_ref (ptr
, df
);
10434 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
10435 build_fold_addr_expr_loc (loc
, dst
));
10436 append_to_statement_list (t
, &list
);
10439 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
10440 append_to_statement_list (t
, &list
);
10442 if (tcctx
.cb
.decl_map
)
10443 delete tcctx
.cb
.decl_map
;
10444 pop_gimplify_context (NULL
);
10445 BIND_EXPR_BODY (bind
) = list
;
10450 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
10454 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
10456 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
10457 gcc_assert (clauses
);
10458 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10459 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
10460 switch (OMP_CLAUSE_DEPEND_KIND (c
))
10462 case OMP_CLAUSE_DEPEND_LAST
:
10463 /* Lowering already done at gimplification. */
10465 case OMP_CLAUSE_DEPEND_IN
:
10468 case OMP_CLAUSE_DEPEND_OUT
:
10469 case OMP_CLAUSE_DEPEND_INOUT
:
10472 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
10475 case OMP_CLAUSE_DEPEND_DEPOBJ
:
10478 case OMP_CLAUSE_DEPEND_SOURCE
:
10479 case OMP_CLAUSE_DEPEND_SINK
:
10482 gcc_unreachable ();
10484 if (cnt
[1] || cnt
[3])
10486 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
10487 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
10488 tree array
= create_tmp_var (type
);
10489 TREE_ADDRESSABLE (array
) = 1;
10490 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
10494 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
10495 gimple_seq_add_stmt (iseq
, g
);
10496 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
10499 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
10500 gimple_seq_add_stmt (iseq
, g
);
10501 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
10503 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
10504 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
10505 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
10506 gimple_seq_add_stmt (iseq
, g
);
10508 for (i
= 0; i
< 4; i
++)
10512 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10513 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
10517 switch (OMP_CLAUSE_DEPEND_KIND (c
))
10519 case OMP_CLAUSE_DEPEND_IN
:
10523 case OMP_CLAUSE_DEPEND_OUT
:
10524 case OMP_CLAUSE_DEPEND_INOUT
:
10528 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
10532 case OMP_CLAUSE_DEPEND_DEPOBJ
:
10537 gcc_unreachable ();
10539 tree t
= OMP_CLAUSE_DECL (c
);
10540 t
= fold_convert (ptr_type_node
, t
);
10541 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
10542 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
10543 NULL_TREE
, NULL_TREE
);
10544 g
= gimple_build_assign (r
, t
);
10545 gimple_seq_add_stmt (iseq
, g
);
10548 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
10549 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
10550 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
10551 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
10553 tree clobber
= build_constructor (type
, NULL
);
10554 TREE_THIS_VOLATILE (clobber
) = 1;
10555 g
= gimple_build_assign (array
, clobber
);
10556 gimple_seq_add_stmt (oseq
, g
);
10559 /* Lower the OpenMP parallel or task directive in the current statement
10560 in GSI_P. CTX holds context information for the directive. */
10563 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10567 gimple
*stmt
= gsi_stmt (*gsi_p
);
10568 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
10569 gimple_seq par_body
;
10570 location_t loc
= gimple_location (stmt
);
10572 clauses
= gimple_omp_taskreg_clauses (stmt
);
10573 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
10574 && gimple_omp_task_taskwait_p (stmt
))
10582 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
10583 par_body
= gimple_bind_body (par_bind
);
10585 child_fn
= ctx
->cb
.dst_fn
;
10586 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
10587 && !gimple_omp_parallel_combined_p (stmt
))
10589 struct walk_stmt_info wi
;
10592 memset (&wi
, 0, sizeof (wi
));
10594 wi
.val_only
= true;
10595 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
10597 gimple_omp_parallel_set_combined_p (stmt
, true);
10599 gimple_seq dep_ilist
= NULL
;
10600 gimple_seq dep_olist
= NULL
;
10601 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
10602 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
10604 push_gimplify_context ();
10605 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
10606 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
10607 &dep_ilist
, &dep_olist
);
10610 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
10611 && gimple_omp_task_taskwait_p (stmt
))
10615 gsi_replace (gsi_p
, dep_bind
, true);
10616 gimple_bind_add_seq (dep_bind
, dep_ilist
);
10617 gimple_bind_add_stmt (dep_bind
, stmt
);
10618 gimple_bind_add_seq (dep_bind
, dep_olist
);
10619 pop_gimplify_context (dep_bind
);
10624 if (ctx
->srecord_type
)
10625 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
10627 gimple_seq tskred_ilist
= NULL
;
10628 gimple_seq tskred_olist
= NULL
;
10629 if ((is_task_ctx (ctx
)
10630 && gimple_omp_task_taskloop_p (ctx
->stmt
)
10631 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
10632 OMP_CLAUSE_REDUCTION
))
10633 || (is_parallel_ctx (ctx
)
10634 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
10635 OMP_CLAUSE__REDUCTEMP_
)))
10637 if (dep_bind
== NULL
)
10639 push_gimplify_context ();
10640 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
10642 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
10644 gimple_omp_taskreg_clauses (ctx
->stmt
),
10645 &tskred_ilist
, &tskred_olist
);
10648 push_gimplify_context ();
10650 gimple_seq par_olist
= NULL
;
10651 gimple_seq par_ilist
= NULL
;
10652 gimple_seq par_rlist
= NULL
;
10653 bool phony_construct
= gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
10654 && gimple_omp_parallel_grid_phony (as_a
<gomp_parallel
*> (stmt
));
10655 if (phony_construct
&& ctx
->record_type
)
10657 gcc_checking_assert (!ctx
->receiver_decl
);
10658 ctx
->receiver_decl
= create_tmp_var
10659 (build_reference_type (ctx
->record_type
), ".omp_rec");
10661 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
10662 lower_omp (&par_body
, ctx
);
10663 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
)
10664 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
10666 /* Declare all the variables created by mapping and the variables
10667 declared in the scope of the parallel body. */
10668 record_vars_into (ctx
->block_vars
, child_fn
);
10669 maybe_remove_omp_member_access_dummy_vars (par_bind
);
10670 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
10672 if (ctx
->record_type
)
10675 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
10676 : ctx
->record_type
, ".omp_data_o");
10677 DECL_NAMELESS (ctx
->sender_decl
) = 1;
10678 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
10679 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
10682 gimple_seq olist
= NULL
;
10683 gimple_seq ilist
= NULL
;
10684 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
10685 lower_send_shared_vars (&ilist
, &olist
, ctx
);
10687 if (ctx
->record_type
)
10689 tree clobber
= build_constructor (TREE_TYPE (ctx
->sender_decl
), NULL
);
10690 TREE_THIS_VOLATILE (clobber
) = 1;
10691 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
10695 /* Once all the expansions are done, sequence all the different
10696 fragments inside gimple_omp_body. */
10698 gimple_seq new_body
= NULL
;
10700 if (ctx
->record_type
)
10702 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
10703 /* fixup_child_record_type might have changed receiver_decl's type. */
10704 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
10705 gimple_seq_add_stmt (&new_body
,
10706 gimple_build_assign (ctx
->receiver_decl
, t
));
10709 gimple_seq_add_seq (&new_body
, par_ilist
);
10710 gimple_seq_add_seq (&new_body
, par_body
);
10711 gimple_seq_add_seq (&new_body
, par_rlist
);
10712 if (ctx
->cancellable
)
10713 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
10714 gimple_seq_add_seq (&new_body
, par_olist
);
10715 new_body
= maybe_catch_exception (new_body
);
10716 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
10717 gimple_seq_add_stmt (&new_body
,
10718 gimple_build_omp_continue (integer_zero_node
,
10719 integer_zero_node
));
10720 if (!phony_construct
)
10722 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
10723 gimple_omp_set_body (stmt
, new_body
);
10726 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
10727 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
10729 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
10730 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
10731 gimple_bind_add_seq (bind
, ilist
);
10732 if (!phony_construct
)
10733 gimple_bind_add_stmt (bind
, stmt
);
10735 gimple_bind_add_seq (bind
, new_body
);
10736 gimple_bind_add_seq (bind
, olist
);
10738 pop_gimplify_context (NULL
);
10742 gimple_bind_add_seq (dep_bind
, dep_ilist
);
10743 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
10744 gimple_bind_add_stmt (dep_bind
, bind
);
10745 gimple_bind_add_seq (dep_bind
, tskred_olist
);
10746 gimple_bind_add_seq (dep_bind
, dep_olist
);
10747 pop_gimplify_context (dep_bind
);
10751 /* Lower the GIMPLE_OMP_TARGET in the current statement
10752 in GSI_P. CTX holds context information for the directive. */
10755 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10758 tree child_fn
, t
, c
;
10759 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
10760 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
10761 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
10762 location_t loc
= gimple_location (stmt
);
10763 bool offloaded
, data_region
;
10764 unsigned int map_cnt
= 0;
10766 offloaded
= is_gimple_omp_offloaded (stmt
);
10767 switch (gimple_omp_target_kind (stmt
))
10769 case GF_OMP_TARGET_KIND_REGION
:
10770 case GF_OMP_TARGET_KIND_UPDATE
:
10771 case GF_OMP_TARGET_KIND_ENTER_DATA
:
10772 case GF_OMP_TARGET_KIND_EXIT_DATA
:
10773 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
10774 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
10775 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
10776 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
10777 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
10778 data_region
= false;
10780 case GF_OMP_TARGET_KIND_DATA
:
10781 case GF_OMP_TARGET_KIND_OACC_DATA
:
10782 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
10783 data_region
= true;
10786 gcc_unreachable ();
10789 clauses
= gimple_omp_target_clauses (stmt
);
10791 gimple_seq dep_ilist
= NULL
;
10792 gimple_seq dep_olist
= NULL
;
10793 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
10795 push_gimplify_context ();
10796 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
10797 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
10798 &dep_ilist
, &dep_olist
);
10805 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
10806 tgt_body
= gimple_bind_body (tgt_bind
);
10808 else if (data_region
)
10809 tgt_body
= gimple_omp_body (stmt
);
10810 child_fn
= ctx
->cb
.dst_fn
;
10812 push_gimplify_context ();
10815 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10816 switch (OMP_CLAUSE_CODE (c
))
10822 case OMP_CLAUSE_MAP
:
10824 /* First check what we're prepared to handle in the following. */
10825 switch (OMP_CLAUSE_MAP_KIND (c
))
10827 case GOMP_MAP_ALLOC
:
10829 case GOMP_MAP_FROM
:
10830 case GOMP_MAP_TOFROM
:
10831 case GOMP_MAP_POINTER
:
10832 case GOMP_MAP_TO_PSET
:
10833 case GOMP_MAP_DELETE
:
10834 case GOMP_MAP_RELEASE
:
10835 case GOMP_MAP_ALWAYS_TO
:
10836 case GOMP_MAP_ALWAYS_FROM
:
10837 case GOMP_MAP_ALWAYS_TOFROM
:
10838 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
10839 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
10840 case GOMP_MAP_STRUCT
:
10841 case GOMP_MAP_ALWAYS_POINTER
:
10843 case GOMP_MAP_FORCE_ALLOC
:
10844 case GOMP_MAP_FORCE_TO
:
10845 case GOMP_MAP_FORCE_FROM
:
10846 case GOMP_MAP_FORCE_TOFROM
:
10847 case GOMP_MAP_FORCE_PRESENT
:
10848 case GOMP_MAP_FORCE_DEVICEPTR
:
10849 case GOMP_MAP_DEVICE_RESIDENT
:
10850 case GOMP_MAP_LINK
:
10851 gcc_assert (is_gimple_omp_oacc (stmt
));
10854 gcc_unreachable ();
10858 case OMP_CLAUSE_TO
:
10859 case OMP_CLAUSE_FROM
:
10861 var
= OMP_CLAUSE_DECL (c
);
10864 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
10865 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
10866 && (OMP_CLAUSE_MAP_KIND (c
)
10867 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
10872 if (DECL_SIZE (var
)
10873 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
10875 tree var2
= DECL_VALUE_EXPR (var
);
10876 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
10877 var2
= TREE_OPERAND (var2
, 0);
10878 gcc_assert (DECL_P (var2
));
10883 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
10884 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
10885 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10887 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
10889 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
10890 && varpool_node::get_create (var
)->offloadable
)
10893 tree type
= build_pointer_type (TREE_TYPE (var
));
10894 tree new_var
= lookup_decl (var
, ctx
);
10895 x
= create_tmp_var_raw (type
, get_name (new_var
));
10896 gimple_add_tmp_var (x
);
10897 x
= build_simple_mem_ref (x
);
10898 SET_DECL_VALUE_EXPR (new_var
, x
);
10899 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
10904 if (!maybe_lookup_field (var
, ctx
))
10907 /* Don't remap oacc parallel reduction variables, because the
10908 intermediate result must be local to each gang. */
10909 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
10910 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
10912 x
= build_receiver_ref (var
, true, ctx
);
10913 tree new_var
= lookup_decl (var
, ctx
);
10915 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
10916 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
10917 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
10918 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
10919 x
= build_simple_mem_ref (x
);
10920 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10922 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
10923 if (omp_is_reference (new_var
)
10924 && TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
)
10926 /* Create a local object to hold the instance
10928 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
10929 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
10930 tree inst
= create_tmp_var (type
, id
);
10931 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
10932 x
= build_fold_addr_expr (inst
);
10934 gimplify_assign (new_var
, x
, &fplist
);
10936 else if (DECL_P (new_var
))
10938 SET_DECL_VALUE_EXPR (new_var
, x
);
10939 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
10942 gcc_unreachable ();
10947 case OMP_CLAUSE_FIRSTPRIVATE
:
10948 if (is_oacc_parallel (ctx
))
10949 goto oacc_firstprivate
;
10951 var
= OMP_CLAUSE_DECL (c
);
10952 if (!omp_is_reference (var
)
10953 && !is_gimple_reg_type (TREE_TYPE (var
)))
10955 tree new_var
= lookup_decl (var
, ctx
);
10956 if (is_variable_sized (var
))
10958 tree pvar
= DECL_VALUE_EXPR (var
);
10959 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
10960 pvar
= TREE_OPERAND (pvar
, 0);
10961 gcc_assert (DECL_P (pvar
));
10962 tree new_pvar
= lookup_decl (pvar
, ctx
);
10963 x
= build_fold_indirect_ref (new_pvar
);
10964 TREE_THIS_NOTRAP (x
) = 1;
10967 x
= build_receiver_ref (var
, true, ctx
);
10968 SET_DECL_VALUE_EXPR (new_var
, x
);
10969 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
10973 case OMP_CLAUSE_PRIVATE
:
10974 if (is_gimple_omp_oacc (ctx
->stmt
))
10976 var
= OMP_CLAUSE_DECL (c
);
10977 if (is_variable_sized (var
))
10979 tree new_var
= lookup_decl (var
, ctx
);
10980 tree pvar
= DECL_VALUE_EXPR (var
);
10981 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
10982 pvar
= TREE_OPERAND (pvar
, 0);
10983 gcc_assert (DECL_P (pvar
));
10984 tree new_pvar
= lookup_decl (pvar
, ctx
);
10985 x
= build_fold_indirect_ref (new_pvar
);
10986 TREE_THIS_NOTRAP (x
) = 1;
10987 SET_DECL_VALUE_EXPR (new_var
, x
);
10988 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
10992 case OMP_CLAUSE_USE_DEVICE_PTR
:
10993 case OMP_CLAUSE_IS_DEVICE_PTR
:
10994 var
= OMP_CLAUSE_DECL (c
);
10996 if (is_variable_sized (var
))
10998 tree new_var
= lookup_decl (var
, ctx
);
10999 tree pvar
= DECL_VALUE_EXPR (var
);
11000 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11001 pvar
= TREE_OPERAND (pvar
, 0);
11002 gcc_assert (DECL_P (pvar
));
11003 tree new_pvar
= lookup_decl (pvar
, ctx
);
11004 x
= build_fold_indirect_ref (new_pvar
);
11005 TREE_THIS_NOTRAP (x
) = 1;
11006 SET_DECL_VALUE_EXPR (new_var
, x
);
11007 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11009 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11011 tree new_var
= lookup_decl (var
, ctx
);
11012 tree type
= build_pointer_type (TREE_TYPE (var
));
11013 x
= create_tmp_var_raw (type
, get_name (new_var
));
11014 gimple_add_tmp_var (x
);
11015 x
= build_simple_mem_ref (x
);
11016 SET_DECL_VALUE_EXPR (new_var
, x
);
11017 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11021 tree new_var
= lookup_decl (var
, ctx
);
11022 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
11023 gimple_add_tmp_var (x
);
11024 SET_DECL_VALUE_EXPR (new_var
, x
);
11025 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11032 target_nesting_level
++;
11033 lower_omp (&tgt_body
, ctx
);
11034 target_nesting_level
--;
11036 else if (data_region
)
11037 lower_omp (&tgt_body
, ctx
);
11041 /* Declare all the variables created by mapping and the variables
11042 declared in the scope of the target body. */
11043 record_vars_into (ctx
->block_vars
, child_fn
);
11044 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
11045 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
11050 if (ctx
->record_type
)
11053 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
11054 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11055 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11056 t
= make_tree_vec (3);
11057 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
11058 TREE_VEC_ELT (t
, 1)
11059 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
11060 ".omp_data_sizes");
11061 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
11062 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
11063 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
11064 tree tkind_type
= short_unsigned_type_node
;
11065 int talign_shift
= 8;
11066 TREE_VEC_ELT (t
, 2)
11067 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
11068 ".omp_data_kinds");
11069 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
11070 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
11071 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
11072 gimple_omp_target_set_data_arg (stmt
, t
);
11074 vec
<constructor_elt
, va_gc
> *vsize
;
11075 vec
<constructor_elt
, va_gc
> *vkind
;
11076 vec_alloc (vsize
, map_cnt
);
11077 vec_alloc (vkind
, map_cnt
);
11078 unsigned int map_idx
= 0;
11080 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11081 switch (OMP_CLAUSE_CODE (c
))
11083 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
11084 unsigned int talign
;
11089 case OMP_CLAUSE_MAP
:
11090 case OMP_CLAUSE_TO
:
11091 case OMP_CLAUSE_FROM
:
11092 oacc_firstprivate_map
:
11094 ovar
= OMP_CLAUSE_DECL (c
);
11095 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11096 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11097 || (OMP_CLAUSE_MAP_KIND (c
)
11098 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
11100 if (!DECL_P (ovar
))
11102 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11103 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
11105 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
11106 == get_base_address (ovar
));
11107 nc
= OMP_CLAUSE_CHAIN (c
);
11108 ovar
= OMP_CLAUSE_DECL (nc
);
11112 tree x
= build_sender_ref (ovar
, ctx
);
11114 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
11115 gimplify_assign (x
, v
, &ilist
);
11121 if (DECL_SIZE (ovar
)
11122 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
11124 tree ovar2
= DECL_VALUE_EXPR (ovar
);
11125 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
11126 ovar2
= TREE_OPERAND (ovar2
, 0);
11127 gcc_assert (DECL_P (ovar2
));
11130 if (!maybe_lookup_field (ovar
, ctx
))
11134 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
11135 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
11136 talign
= DECL_ALIGN_UNIT (ovar
);
11139 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11140 x
= build_sender_ref (ovar
, ctx
);
11142 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11143 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11144 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11145 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
11147 gcc_assert (offloaded
);
11149 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
11150 mark_addressable (avar
);
11151 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
11152 talign
= DECL_ALIGN_UNIT (avar
);
11153 avar
= build_fold_addr_expr (avar
);
11154 gimplify_assign (x
, avar
, &ilist
);
11156 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11158 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11159 if (!omp_is_reference (var
))
11161 if (is_gimple_reg (var
)
11162 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11163 TREE_NO_WARNING (var
) = 1;
11164 var
= build_fold_addr_expr (var
);
11167 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11168 gimplify_assign (x
, var
, &ilist
);
11170 else if (is_gimple_reg (var
))
11172 gcc_assert (offloaded
);
11173 tree avar
= create_tmp_var (TREE_TYPE (var
));
11174 mark_addressable (avar
);
11175 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
11176 if (GOMP_MAP_COPY_TO_P (map_kind
)
11177 || map_kind
== GOMP_MAP_POINTER
11178 || map_kind
== GOMP_MAP_TO_PSET
11179 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11181 /* If we need to initialize a temporary
11182 with VAR because it is not addressable, and
11183 the variable hasn't been initialized yet, then
11184 we'll get a warning for the store to avar.
11185 Don't warn in that case, the mapping might
11187 TREE_NO_WARNING (var
) = 1;
11188 gimplify_assign (avar
, var
, &ilist
);
11190 avar
= build_fold_addr_expr (avar
);
11191 gimplify_assign (x
, avar
, &ilist
);
11192 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
11193 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11194 && !TYPE_READONLY (TREE_TYPE (var
)))
11196 x
= unshare_expr (x
);
11197 x
= build_simple_mem_ref (x
);
11198 gimplify_assign (var
, x
, &olist
);
11203 var
= build_fold_addr_expr (var
);
11204 gimplify_assign (x
, var
, &ilist
);
11208 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11210 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11211 s
= TREE_TYPE (ovar
);
11212 if (TREE_CODE (s
) == REFERENCE_TYPE
)
11214 s
= TYPE_SIZE_UNIT (s
);
11217 s
= OMP_CLAUSE_SIZE (c
);
11218 if (s
== NULL_TREE
)
11219 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11220 s
= fold_convert (size_type_node
, s
);
11221 purpose
= size_int (map_idx
++);
11222 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11223 if (TREE_CODE (s
) != INTEGER_CST
)
11224 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11226 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
11227 switch (OMP_CLAUSE_CODE (c
))
11229 case OMP_CLAUSE_MAP
:
11230 tkind
= OMP_CLAUSE_MAP_KIND (c
);
11231 tkind_zero
= tkind
;
11232 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
11235 case GOMP_MAP_ALLOC
:
11237 case GOMP_MAP_FROM
:
11238 case GOMP_MAP_TOFROM
:
11239 case GOMP_MAP_ALWAYS_TO
:
11240 case GOMP_MAP_ALWAYS_FROM
:
11241 case GOMP_MAP_ALWAYS_TOFROM
:
11242 case GOMP_MAP_RELEASE
:
11243 case GOMP_MAP_FORCE_TO
:
11244 case GOMP_MAP_FORCE_FROM
:
11245 case GOMP_MAP_FORCE_TOFROM
:
11246 case GOMP_MAP_FORCE_PRESENT
:
11247 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
11249 case GOMP_MAP_DELETE
:
11250 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
11254 if (tkind_zero
!= tkind
)
11256 if (integer_zerop (s
))
11257 tkind
= tkind_zero
;
11258 else if (integer_nonzerop (s
))
11259 tkind_zero
= tkind
;
11262 case OMP_CLAUSE_FIRSTPRIVATE
:
11263 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11264 tkind
= GOMP_MAP_TO
;
11265 tkind_zero
= tkind
;
11267 case OMP_CLAUSE_TO
:
11268 tkind
= GOMP_MAP_TO
;
11269 tkind_zero
= tkind
;
11271 case OMP_CLAUSE_FROM
:
11272 tkind
= GOMP_MAP_FROM
;
11273 tkind_zero
= tkind
;
11276 gcc_unreachable ();
11278 gcc_checking_assert (tkind
11279 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11280 gcc_checking_assert (tkind_zero
11281 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11282 talign
= ceil_log2 (talign
);
11283 tkind
|= talign
<< talign_shift
;
11284 tkind_zero
|= talign
<< talign_shift
;
11285 gcc_checking_assert (tkind
11286 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11287 gcc_checking_assert (tkind_zero
11288 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11289 if (tkind
== tkind_zero
)
11290 x
= build_int_cstu (tkind_type
, tkind
);
11293 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
11294 x
= build3 (COND_EXPR
, tkind_type
,
11295 fold_build2 (EQ_EXPR
, boolean_type_node
,
11296 unshare_expr (s
), size_zero_node
),
11297 build_int_cstu (tkind_type
, tkind_zero
),
11298 build_int_cstu (tkind_type
, tkind
));
11300 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
11305 case OMP_CLAUSE_FIRSTPRIVATE
:
11306 if (is_oacc_parallel (ctx
))
11307 goto oacc_firstprivate_map
;
11308 ovar
= OMP_CLAUSE_DECL (c
);
11309 if (omp_is_reference (ovar
))
11310 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11312 talign
= DECL_ALIGN_UNIT (ovar
);
11313 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11314 x
= build_sender_ref (ovar
, ctx
);
11315 tkind
= GOMP_MAP_FIRSTPRIVATE
;
11316 type
= TREE_TYPE (ovar
);
11317 if (omp_is_reference (ovar
))
11318 type
= TREE_TYPE (type
);
11319 if ((INTEGRAL_TYPE_P (type
)
11320 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
11321 || TREE_CODE (type
) == POINTER_TYPE
)
11323 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
11325 if (omp_is_reference (var
))
11326 t
= build_simple_mem_ref (var
);
11327 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11328 TREE_NO_WARNING (var
) = 1;
11329 if (TREE_CODE (type
) != POINTER_TYPE
)
11330 t
= fold_convert (pointer_sized_int_node
, t
);
11331 t
= fold_convert (TREE_TYPE (x
), t
);
11332 gimplify_assign (x
, t
, &ilist
);
11334 else if (omp_is_reference (var
))
11335 gimplify_assign (x
, var
, &ilist
);
11336 else if (is_gimple_reg (var
))
11338 tree avar
= create_tmp_var (TREE_TYPE (var
));
11339 mark_addressable (avar
);
11340 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11341 TREE_NO_WARNING (var
) = 1;
11342 gimplify_assign (avar
, var
, &ilist
);
11343 avar
= build_fold_addr_expr (avar
);
11344 gimplify_assign (x
, avar
, &ilist
);
11348 var
= build_fold_addr_expr (var
);
11349 gimplify_assign (x
, var
, &ilist
);
11351 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
11353 else if (omp_is_reference (ovar
))
11354 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11356 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11357 s
= fold_convert (size_type_node
, s
);
11358 purpose
= size_int (map_idx
++);
11359 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11360 if (TREE_CODE (s
) != INTEGER_CST
)
11361 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11363 gcc_checking_assert (tkind
11364 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11365 talign
= ceil_log2 (talign
);
11366 tkind
|= talign
<< talign_shift
;
11367 gcc_checking_assert (tkind
11368 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11369 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
11370 build_int_cstu (tkind_type
, tkind
));
11373 case OMP_CLAUSE_USE_DEVICE_PTR
:
11374 case OMP_CLAUSE_IS_DEVICE_PTR
:
11375 ovar
= OMP_CLAUSE_DECL (c
);
11376 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11377 x
= build_sender_ref (ovar
, ctx
);
11378 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
11379 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
11381 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
11382 type
= TREE_TYPE (ovar
);
11383 if (TREE_CODE (type
) == ARRAY_TYPE
)
11384 var
= build_fold_addr_expr (var
);
11387 if (omp_is_reference (ovar
))
11389 type
= TREE_TYPE (type
);
11390 if (TREE_CODE (type
) != ARRAY_TYPE
)
11391 var
= build_simple_mem_ref (var
);
11392 var
= fold_convert (TREE_TYPE (x
), var
);
11395 gimplify_assign (x
, var
, &ilist
);
11397 purpose
= size_int (map_idx
++);
11398 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11399 gcc_checking_assert (tkind
11400 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11401 gcc_checking_assert (tkind
11402 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11403 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
11404 build_int_cstu (tkind_type
, tkind
));
11408 gcc_assert (map_idx
== map_cnt
);
11410 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
11411 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
11412 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
11413 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
11414 for (int i
= 1; i
<= 2; i
++)
11415 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
11417 gimple_seq initlist
= NULL
;
11418 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
11419 TREE_VEC_ELT (t
, i
)),
11420 &initlist
, true, NULL_TREE
);
11421 gimple_seq_add_seq (&ilist
, initlist
);
11423 tree clobber
= build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, i
)),
11425 TREE_THIS_VOLATILE (clobber
) = 1;
11426 gimple_seq_add_stmt (&olist
,
11427 gimple_build_assign (TREE_VEC_ELT (t
, i
),
11431 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
11432 TREE_THIS_VOLATILE (clobber
) = 1;
11433 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
11437 /* Once all the expansions are done, sequence all the different
11438 fragments inside gimple_omp_body. */
11443 && ctx
->record_type
)
11445 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
11446 /* fixup_child_record_type might have changed receiver_decl's type. */
11447 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
11448 gimple_seq_add_stmt (&new_body
,
11449 gimple_build_assign (ctx
->receiver_decl
, t
));
11451 gimple_seq_add_seq (&new_body
, fplist
);
11453 if (offloaded
|| data_region
)
11455 tree prev
= NULL_TREE
;
11456 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11457 switch (OMP_CLAUSE_CODE (c
))
11462 case OMP_CLAUSE_FIRSTPRIVATE
:
11463 if (is_gimple_omp_oacc (ctx
->stmt
))
11465 var
= OMP_CLAUSE_DECL (c
);
11466 if (omp_is_reference (var
)
11467 || is_gimple_reg_type (TREE_TYPE (var
)))
11469 tree new_var
= lookup_decl (var
, ctx
);
11471 type
= TREE_TYPE (var
);
11472 if (omp_is_reference (var
))
11473 type
= TREE_TYPE (type
);
11474 if ((INTEGRAL_TYPE_P (type
)
11475 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
11476 || TREE_CODE (type
) == POINTER_TYPE
)
11478 x
= build_receiver_ref (var
, false, ctx
);
11479 if (TREE_CODE (type
) != POINTER_TYPE
)
11480 x
= fold_convert (pointer_sized_int_node
, x
);
11481 x
= fold_convert (type
, x
);
11482 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
11484 if (omp_is_reference (var
))
11486 tree v
= create_tmp_var_raw (type
, get_name (var
));
11487 gimple_add_tmp_var (v
);
11488 TREE_ADDRESSABLE (v
) = 1;
11489 gimple_seq_add_stmt (&new_body
,
11490 gimple_build_assign (v
, x
));
11491 x
= build_fold_addr_expr (v
);
11493 gimple_seq_add_stmt (&new_body
,
11494 gimple_build_assign (new_var
, x
));
11498 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
11499 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
11501 gimple_seq_add_stmt (&new_body
,
11502 gimple_build_assign (new_var
, x
));
11505 else if (is_variable_sized (var
))
11507 tree pvar
= DECL_VALUE_EXPR (var
);
11508 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11509 pvar
= TREE_OPERAND (pvar
, 0);
11510 gcc_assert (DECL_P (pvar
));
11511 tree new_var
= lookup_decl (pvar
, ctx
);
11512 x
= build_receiver_ref (var
, false, ctx
);
11513 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11514 gimple_seq_add_stmt (&new_body
,
11515 gimple_build_assign (new_var
, x
));
11518 case OMP_CLAUSE_PRIVATE
:
11519 if (is_gimple_omp_oacc (ctx
->stmt
))
11521 var
= OMP_CLAUSE_DECL (c
);
11522 if (omp_is_reference (var
))
11524 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
11525 tree new_var
= lookup_decl (var
, ctx
);
11526 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
11527 if (TREE_CONSTANT (x
))
11529 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
11531 gimple_add_tmp_var (x
);
11532 TREE_ADDRESSABLE (x
) = 1;
11533 x
= build_fold_addr_expr_loc (clause_loc
, x
);
11538 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
11539 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11540 gimple_seq_add_stmt (&new_body
,
11541 gimple_build_assign (new_var
, x
));
11544 case OMP_CLAUSE_USE_DEVICE_PTR
:
11545 case OMP_CLAUSE_IS_DEVICE_PTR
:
11546 var
= OMP_CLAUSE_DECL (c
);
11547 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
11548 x
= build_sender_ref (var
, ctx
);
11550 x
= build_receiver_ref (var
, false, ctx
);
11551 if (is_variable_sized (var
))
11553 tree pvar
= DECL_VALUE_EXPR (var
);
11554 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11555 pvar
= TREE_OPERAND (pvar
, 0);
11556 gcc_assert (DECL_P (pvar
));
11557 tree new_var
= lookup_decl (pvar
, ctx
);
11558 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11559 gimple_seq_add_stmt (&new_body
,
11560 gimple_build_assign (new_var
, x
));
11562 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11564 tree new_var
= lookup_decl (var
, ctx
);
11565 new_var
= DECL_VALUE_EXPR (new_var
);
11566 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
11567 new_var
= TREE_OPERAND (new_var
, 0);
11568 gcc_assert (DECL_P (new_var
));
11569 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11570 gimple_seq_add_stmt (&new_body
,
11571 gimple_build_assign (new_var
, x
));
11575 tree type
= TREE_TYPE (var
);
11576 tree new_var
= lookup_decl (var
, ctx
);
11577 if (omp_is_reference (var
))
11579 type
= TREE_TYPE (type
);
11580 if (TREE_CODE (type
) != ARRAY_TYPE
)
11582 tree v
= create_tmp_var_raw (type
, get_name (var
));
11583 gimple_add_tmp_var (v
);
11584 TREE_ADDRESSABLE (v
) = 1;
11585 x
= fold_convert (type
, x
);
11586 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
11588 gimple_seq_add_stmt (&new_body
,
11589 gimple_build_assign (v
, x
));
11590 x
= build_fold_addr_expr (v
);
11593 new_var
= DECL_VALUE_EXPR (new_var
);
11594 x
= fold_convert (TREE_TYPE (new_var
), x
);
11595 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11596 gimple_seq_add_stmt (&new_body
,
11597 gimple_build_assign (new_var
, x
));
11601 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
11602 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
11603 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
11604 or references to VLAs. */
11605 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11606 switch (OMP_CLAUSE_CODE (c
))
11611 case OMP_CLAUSE_MAP
:
11612 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11613 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
11615 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
11616 poly_int64 offset
= 0;
11618 var
= OMP_CLAUSE_DECL (c
);
11620 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
11621 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
11623 && varpool_node::get_create (var
)->offloadable
)
11625 if (TREE_CODE (var
) == INDIRECT_REF
11626 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
11627 var
= TREE_OPERAND (var
, 0);
11628 if (TREE_CODE (var
) == COMPONENT_REF
)
11630 var
= get_addr_base_and_unit_offset (var
, &offset
);
11631 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
11633 else if (DECL_SIZE (var
)
11634 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
11636 tree var2
= DECL_VALUE_EXPR (var
);
11637 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
11638 var2
= TREE_OPERAND (var2
, 0);
11639 gcc_assert (DECL_P (var2
));
11642 tree new_var
= lookup_decl (var
, ctx
), x
;
11643 tree type
= TREE_TYPE (new_var
);
11645 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
11646 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
11649 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
11651 new_var
= build2 (MEM_REF
, type
,
11652 build_fold_addr_expr (new_var
),
11653 build_int_cst (build_pointer_type (type
),
11656 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
11658 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
11659 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
11660 new_var
= build2 (MEM_REF
, type
,
11661 build_fold_addr_expr (new_var
),
11662 build_int_cst (build_pointer_type (type
),
11666 is_ref
= omp_is_reference (var
);
11667 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
11669 bool ref_to_array
= false;
11672 type
= TREE_TYPE (type
);
11673 if (TREE_CODE (type
) == ARRAY_TYPE
)
11675 type
= build_pointer_type (type
);
11676 ref_to_array
= true;
11679 else if (TREE_CODE (type
) == ARRAY_TYPE
)
11681 tree decl2
= DECL_VALUE_EXPR (new_var
);
11682 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
11683 decl2
= TREE_OPERAND (decl2
, 0);
11684 gcc_assert (DECL_P (decl2
));
11686 type
= TREE_TYPE (new_var
);
11688 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
11689 x
= fold_convert_loc (clause_loc
, type
, x
);
11690 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
11692 tree bias
= OMP_CLAUSE_SIZE (c
);
11694 bias
= lookup_decl (bias
, ctx
);
11695 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
11696 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
11698 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
11699 TREE_TYPE (x
), x
, bias
);
11702 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
11703 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11704 if (is_ref
&& !ref_to_array
)
11706 tree t
= create_tmp_var_raw (type
, get_name (var
));
11707 gimple_add_tmp_var (t
);
11708 TREE_ADDRESSABLE (t
) = 1;
11709 gimple_seq_add_stmt (&new_body
,
11710 gimple_build_assign (t
, x
));
11711 x
= build_fold_addr_expr_loc (clause_loc
, t
);
11713 gimple_seq_add_stmt (&new_body
,
11714 gimple_build_assign (new_var
, x
));
11717 else if (OMP_CLAUSE_CHAIN (c
)
11718 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
11720 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
11721 == GOMP_MAP_FIRSTPRIVATE_POINTER
11722 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
11723 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
11726 case OMP_CLAUSE_PRIVATE
:
11727 var
= OMP_CLAUSE_DECL (c
);
11728 if (is_variable_sized (var
))
11730 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
11731 tree new_var
= lookup_decl (var
, ctx
);
11732 tree pvar
= DECL_VALUE_EXPR (var
);
11733 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11734 pvar
= TREE_OPERAND (pvar
, 0);
11735 gcc_assert (DECL_P (pvar
));
11736 tree new_pvar
= lookup_decl (pvar
, ctx
);
11737 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
11738 tree al
= size_int (DECL_ALIGN (var
));
11739 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
11740 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
11741 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
11742 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11743 gimple_seq_add_stmt (&new_body
,
11744 gimple_build_assign (new_pvar
, x
));
11746 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
11748 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
11749 tree new_var
= lookup_decl (var
, ctx
);
11750 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
11751 if (TREE_CONSTANT (x
))
11756 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
11757 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
11758 tree al
= size_int (TYPE_ALIGN (rtype
));
11759 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
11762 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
11763 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11764 gimple_seq_add_stmt (&new_body
,
11765 gimple_build_assign (new_var
, x
));
11770 gimple_seq fork_seq
= NULL
;
11771 gimple_seq join_seq
= NULL
;
11773 if (is_oacc_parallel (ctx
))
11775 /* If there are reductions on the offloaded region itself, treat
11776 them as a dummy GANG loop. */
11777 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
11779 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
11780 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
11783 gimple_seq_add_seq (&new_body
, fork_seq
);
11784 gimple_seq_add_seq (&new_body
, tgt_body
);
11785 gimple_seq_add_seq (&new_body
, join_seq
);
11788 new_body
= maybe_catch_exception (new_body
);
11790 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
11791 gimple_omp_set_body (stmt
, new_body
);
11794 bind
= gimple_build_bind (NULL
, NULL
,
11795 tgt_bind
? gimple_bind_block (tgt_bind
)
11797 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
11798 gimple_bind_add_seq (bind
, ilist
);
11799 gimple_bind_add_stmt (bind
, stmt
);
11800 gimple_bind_add_seq (bind
, olist
);
11802 pop_gimplify_context (NULL
);
11806 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11807 gimple_bind_add_stmt (dep_bind
, bind
);
11808 gimple_bind_add_seq (dep_bind
, dep_olist
);
11809 pop_gimplify_context (dep_bind
);
11813 /* Expand code for an OpenMP teams directive. */
11816 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11818 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
11819 push_gimplify_context ();
11821 tree block
= make_node (BLOCK
);
11822 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
11823 gsi_replace (gsi_p
, bind
, true);
11824 gimple_seq bind_body
= NULL
;
11825 gimple_seq dlist
= NULL
;
11826 gimple_seq olist
= NULL
;
11828 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
11829 OMP_CLAUSE_NUM_TEAMS
);
11830 if (num_teams
== NULL_TREE
)
11831 num_teams
= build_int_cst (unsigned_type_node
, 0);
11834 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
11835 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
11836 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
11838 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
11839 OMP_CLAUSE_THREAD_LIMIT
);
11840 if (thread_limit
== NULL_TREE
)
11841 thread_limit
= build_int_cst (unsigned_type_node
, 0);
11844 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
11845 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
11846 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
11850 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
11851 &bind_body
, &dlist
, ctx
, NULL
);
11852 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
11853 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
11855 if (!gimple_omp_teams_grid_phony (teams_stmt
))
11857 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
11858 location_t loc
= gimple_location (teams_stmt
);
11859 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
11860 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
11861 gimple_set_location (call
, loc
);
11862 gimple_seq_add_stmt (&bind_body
, call
);
11865 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
11866 gimple_omp_set_body (teams_stmt
, NULL
);
11867 gimple_seq_add_seq (&bind_body
, olist
);
11868 gimple_seq_add_seq (&bind_body
, dlist
);
11869 if (!gimple_omp_teams_grid_phony (teams_stmt
))
11870 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
11871 gimple_bind_set_body (bind
, bind_body
);
11873 pop_gimplify_context (bind
);
11875 gimple_bind_append_vars (bind
, ctx
->block_vars
);
11876 BLOCK_VARS (block
) = ctx
->block_vars
;
11877 if (BLOCK_VARS (block
))
11878 TREE_USED (block
) = 1;
11881 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
11884 lower_omp_grid_body (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11886 gimple
*stmt
= gsi_stmt (*gsi_p
);
11887 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
11888 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt
),
11889 gimple_build_omp_return (false));
11893 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
11894 regimplified. If DATA is non-NULL, lower_omp_1 is outside
11895 of OMP context, but with task_shared_vars set. */
11898 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
11903 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
11904 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
11907 if (task_shared_vars
11909 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
11912 /* If a global variable has been privatized, TREE_CONSTANT on
11913 ADDR_EXPR might be wrong. */
11914 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
11915 recompute_tree_invariant_for_addr_expr (t
);
11917 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
11921 /* Data to be communicated between lower_omp_regimplify_operands and
11922 lower_omp_regimplify_operands_p. */
11924 struct lower_omp_regimplify_operands_data
11930 /* Helper function for lower_omp_regimplify_operands. Find
11931 omp_member_access_dummy_var vars and adjust temporarily their
11932 DECL_VALUE_EXPRs if needed. */
11935 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
11938 tree t
= omp_member_access_dummy_var (*tp
);
11941 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
11942 lower_omp_regimplify_operands_data
*ldata
11943 = (lower_omp_regimplify_operands_data
*) wi
->info
;
11944 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
11947 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
11948 ldata
->decls
->safe_push (*tp
);
11949 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
11950 SET_DECL_VALUE_EXPR (*tp
, v
);
11953 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
11957 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
11958 of omp_member_access_dummy_var vars during regimplification. */
11961 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
11962 gimple_stmt_iterator
*gsi_p
)
11964 auto_vec
<tree
, 10> decls
;
11967 struct walk_stmt_info wi
;
11968 memset (&wi
, '\0', sizeof (wi
));
11969 struct lower_omp_regimplify_operands_data data
;
11971 data
.decls
= &decls
;
11973 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
11975 gimple_regimplify_operands (stmt
, gsi_p
);
11976 while (!decls
.is_empty ())
11978 tree t
= decls
.pop ();
11979 tree v
= decls
.pop ();
11980 SET_DECL_VALUE_EXPR (t
, v
);
11985 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11987 gimple
*stmt
= gsi_stmt (*gsi_p
);
11988 struct walk_stmt_info wi
;
11991 if (gimple_has_location (stmt
))
11992 input_location
= gimple_location (stmt
);
11994 if (task_shared_vars
)
11995 memset (&wi
, '\0', sizeof (wi
));
11997 /* If we have issued syntax errors, avoid doing any heavy lifting.
11998 Just replace the OMP directives with a NOP to avoid
11999 confusing RTL expansion. */
12000 if (seen_error () && is_gimple_omp (stmt
))
12002 gsi_replace (gsi_p
, gimple_build_nop (), true);
12006 switch (gimple_code (stmt
))
12010 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
12011 if ((ctx
|| task_shared_vars
)
12012 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
12013 lower_omp_regimplify_p
,
12014 ctx
? NULL
: &wi
, NULL
)
12015 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
12016 lower_omp_regimplify_p
,
12017 ctx
? NULL
: &wi
, NULL
)))
12018 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
12022 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
12024 case GIMPLE_EH_FILTER
:
12025 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
12028 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
12029 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
12031 case GIMPLE_TRANSACTION
:
12032 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
12036 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
12037 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
12039 case GIMPLE_OMP_PARALLEL
:
12040 case GIMPLE_OMP_TASK
:
12041 ctx
= maybe_lookup_ctx (stmt
);
12043 if (ctx
->cancellable
)
12044 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12045 lower_omp_taskreg (gsi_p
, ctx
);
12047 case GIMPLE_OMP_FOR
:
12048 ctx
= maybe_lookup_ctx (stmt
);
12050 if (ctx
->cancellable
)
12051 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12052 lower_omp_for (gsi_p
, ctx
);
12054 case GIMPLE_OMP_SECTIONS
:
12055 ctx
= maybe_lookup_ctx (stmt
);
12057 if (ctx
->cancellable
)
12058 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12059 lower_omp_sections (gsi_p
, ctx
);
12061 case GIMPLE_OMP_SINGLE
:
12062 ctx
= maybe_lookup_ctx (stmt
);
12064 lower_omp_single (gsi_p
, ctx
);
12066 case GIMPLE_OMP_MASTER
:
12067 ctx
= maybe_lookup_ctx (stmt
);
12069 lower_omp_master (gsi_p
, ctx
);
12071 case GIMPLE_OMP_TASKGROUP
:
12072 ctx
= maybe_lookup_ctx (stmt
);
12074 lower_omp_taskgroup (gsi_p
, ctx
);
12076 case GIMPLE_OMP_ORDERED
:
12077 ctx
= maybe_lookup_ctx (stmt
);
12079 lower_omp_ordered (gsi_p
, ctx
);
12081 case GIMPLE_OMP_SCAN
:
12082 ctx
= maybe_lookup_ctx (stmt
);
12084 lower_omp_scan (gsi_p
, ctx
);
12086 case GIMPLE_OMP_CRITICAL
:
12087 ctx
= maybe_lookup_ctx (stmt
);
12089 lower_omp_critical (gsi_p
, ctx
);
12091 case GIMPLE_OMP_ATOMIC_LOAD
:
12092 if ((ctx
|| task_shared_vars
)
12093 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12094 as_a
<gomp_atomic_load
*> (stmt
)),
12095 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
12096 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12098 case GIMPLE_OMP_TARGET
:
12099 ctx
= maybe_lookup_ctx (stmt
);
12101 lower_omp_target (gsi_p
, ctx
);
12103 case GIMPLE_OMP_TEAMS
:
12104 ctx
= maybe_lookup_ctx (stmt
);
12106 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
12107 lower_omp_taskreg (gsi_p
, ctx
);
12109 lower_omp_teams (gsi_p
, ctx
);
12111 case GIMPLE_OMP_GRID_BODY
:
12112 ctx
= maybe_lookup_ctx (stmt
);
12114 lower_omp_grid_body (gsi_p
, ctx
);
12118 call_stmt
= as_a
<gcall
*> (stmt
);
12119 fndecl
= gimple_call_fndecl (call_stmt
);
12121 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
12122 switch (DECL_FUNCTION_CODE (fndecl
))
12124 case BUILT_IN_GOMP_BARRIER
:
12128 case BUILT_IN_GOMP_CANCEL
:
12129 case BUILT_IN_GOMP_CANCELLATION_POINT
:
12132 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
12133 cctx
= cctx
->outer
;
12134 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
12135 if (!cctx
->cancellable
)
12137 if (DECL_FUNCTION_CODE (fndecl
)
12138 == BUILT_IN_GOMP_CANCELLATION_POINT
)
12140 stmt
= gimple_build_nop ();
12141 gsi_replace (gsi_p
, stmt
, false);
12145 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
12147 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
12148 gimple_call_set_fndecl (call_stmt
, fndecl
);
12149 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
12152 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
12153 gimple_call_set_lhs (call_stmt
, lhs
);
12154 tree fallthru_label
;
12155 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
12157 g
= gimple_build_label (fallthru_label
);
12158 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12159 g
= gimple_build_cond (NE_EXPR
, lhs
,
12160 fold_convert (TREE_TYPE (lhs
),
12161 boolean_false_node
),
12162 cctx
->cancel_label
, fallthru_label
);
12163 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12170 case GIMPLE_ASSIGN
:
12171 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
12173 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
12174 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
12175 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
12176 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
12177 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
12178 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
12179 && (gimple_omp_target_kind (up
->stmt
)
12180 == GF_OMP_TARGET_KIND_DATA
)))
12182 else if (!up
->lastprivate_conditional_map
)
12184 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
12185 if (TREE_CODE (lhs
) == MEM_REF
12186 && DECL_P (TREE_OPERAND (lhs
, 0))
12187 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
12188 0))) == REFERENCE_TYPE
)
12189 lhs
= TREE_OPERAND (lhs
, 0);
12191 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
12194 if (up
->combined_into_simd_safelen0
)
12196 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
12197 clauses
= gimple_omp_for_clauses (up
->stmt
);
12199 clauses
= gimple_omp_sections_clauses (up
->stmt
);
12200 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
12201 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
12202 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
12203 OMP_CLAUSE__CONDTEMP_
);
12204 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
12205 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
12206 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12213 if ((ctx
|| task_shared_vars
)
12214 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
12217 /* Just remove clobbers, this should happen only if we have
12218 "privatized" local addressable variables in SIMD regions,
12219 the clobber isn't needed in that case and gimplifying address
12220 of the ARRAY_REF into a pointer and creating MEM_REF based
12221 clobber would create worse code than we get with the clobber
12223 if (gimple_clobber_p (stmt
))
12225 gsi_replace (gsi_p
, gimple_build_nop (), true);
12228 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12235 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
12237 location_t saved_location
= input_location
;
12238 gimple_stmt_iterator gsi
;
12239 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
12240 lower_omp_1 (&gsi
, ctx
);
12241 /* During gimplification, we haven't folded statments inside offloading
12242 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12243 if (target_nesting_level
|| taskreg_nesting_level
)
12244 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
12246 input_location
= saved_location
;
12249 /* Main entry point. */
12251 static unsigned int
12252 execute_lower_omp (void)
12258 /* This pass always runs, to provide PROP_gimple_lomp.
12259 But often, there is nothing to do. */
12260 if (flag_openacc
== 0 && flag_openmp
== 0
12261 && flag_openmp_simd
== 0)
12264 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
12265 delete_omp_context
);
12267 body
= gimple_body (current_function_decl
);
12269 if (hsa_gen_requested_p ())
12270 omp_grid_gridify_all_targets (&body
);
12272 scan_omp (&body
, NULL
);
12273 gcc_assert (taskreg_nesting_level
== 0);
12274 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
12275 finish_taskreg_scan (ctx
);
12276 taskreg_contexts
.release ();
12278 if (all_contexts
->root
)
12280 if (task_shared_vars
)
12281 push_gimplify_context ();
12282 lower_omp (&body
, NULL
);
12283 if (task_shared_vars
)
12284 pop_gimplify_context (NULL
);
12289 splay_tree_delete (all_contexts
);
12290 all_contexts
= NULL
;
12292 BITMAP_FREE (task_shared_vars
);
12294 /* If current function is a method, remove artificial dummy VAR_DECL created
12295 for non-static data member privatization, they aren't needed for
12296 debuginfo nor anything else, have been already replaced everywhere in the
12297 IL and cause problems with LTO. */
12298 if (DECL_ARGUMENTS (current_function_decl
)
12299 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
12300 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
12302 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
12308 const pass_data pass_data_lower_omp
=
12310 GIMPLE_PASS
, /* type */
12311 "omplower", /* name */
12312 OPTGROUP_OMP
, /* optinfo_flags */
12313 TV_NONE
, /* tv_id */
12314 PROP_gimple_any
, /* properties_required */
12315 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
12316 0, /* properties_destroyed */
12317 0, /* todo_flags_start */
12318 0, /* todo_flags_finish */
12321 class pass_lower_omp
: public gimple_opt_pass
12324 pass_lower_omp (gcc::context
*ctxt
)
12325 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
12328 /* opt_pass methods: */
12329 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
12331 }; // class pass_lower_omp
12333 } // anon namespace
12336 make_pass_lower_omp (gcc::context
*ctxt
)
12338 return new pass_lower_omp (ctxt
);
12341 /* The following is a utility to diagnose structured block violations.
12342 It is not part of the "omplower" pass, as that's invoked too late. It
12343 should be invoked by the respective front ends after gimplification. */
12345 static splay_tree all_labels
;
12347 /* Check for mismatched contexts and generate an error if needed. Return
12348 true if an error is detected. */
12351 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
12352 gimple
*branch_ctx
, gimple
*label_ctx
)
12354 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
12355 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
12357 if (label_ctx
== branch_ctx
)
12360 const char* kind
= NULL
;
12364 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
12365 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
12367 gcc_checking_assert (kind
== NULL
);
12373 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
12377 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
12378 so we could traverse it and issue a correct "exit" or "enter" error
12379 message upon a structured block violation.
12381 We built the context by building a list with tree_cons'ing, but there is
12382 no easy counterpart in gimple tuples. It seems like far too much work
12383 for issuing exit/enter error messages. If someone really misses the
12384 distinct error message... patches welcome. */
12387 /* Try to avoid confusing the user by producing and error message
12388 with correct "exit" or "enter" verbiage. We prefer "exit"
12389 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
12390 if (branch_ctx
== NULL
)
12396 if (TREE_VALUE (label_ctx
) == branch_ctx
)
12401 label_ctx
= TREE_CHAIN (label_ctx
);
12406 error ("invalid exit from %s structured block", kind
);
12408 error ("invalid entry to %s structured block", kind
);
12411 /* If it's obvious we have an invalid entry, be specific about the error. */
12412 if (branch_ctx
== NULL
)
12413 error ("invalid entry to %s structured block", kind
);
12416 /* Otherwise, be vague and lazy, but efficient. */
12417 error ("invalid branch to/from %s structured block", kind
);
12420 gsi_replace (gsi_p
, gimple_build_nop (), false);
12424 /* Pass 1: Create a minimal tree of structured blocks, and record
12425 where each label is found. */
12428 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
12429 struct walk_stmt_info
*wi
)
12431 gimple
*context
= (gimple
*) wi
->info
;
12432 gimple
*inner_context
;
12433 gimple
*stmt
= gsi_stmt (*gsi_p
);
12435 *handled_ops_p
= true;
12437 switch (gimple_code (stmt
))
12441 case GIMPLE_OMP_PARALLEL
:
12442 case GIMPLE_OMP_TASK
:
12443 case GIMPLE_OMP_SECTIONS
:
12444 case GIMPLE_OMP_SINGLE
:
12445 case GIMPLE_OMP_SECTION
:
12446 case GIMPLE_OMP_MASTER
:
12447 case GIMPLE_OMP_ORDERED
:
12448 case GIMPLE_OMP_SCAN
:
12449 case GIMPLE_OMP_CRITICAL
:
12450 case GIMPLE_OMP_TARGET
:
12451 case GIMPLE_OMP_TEAMS
:
12452 case GIMPLE_OMP_TASKGROUP
:
12453 /* The minimal context here is just the current OMP construct. */
12454 inner_context
= stmt
;
12455 wi
->info
= inner_context
;
12456 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
12457 wi
->info
= context
;
12460 case GIMPLE_OMP_FOR
:
12461 inner_context
= stmt
;
12462 wi
->info
= inner_context
;
12463 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12465 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
12466 diagnose_sb_1
, NULL
, wi
);
12467 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
12468 wi
->info
= context
;
12472 splay_tree_insert (all_labels
,
12473 (splay_tree_key
) gimple_label_label (
12474 as_a
<glabel
*> (stmt
)),
12475 (splay_tree_value
) context
);
12485 /* Pass 2: Check each branch and see if its context differs from that of
12486 the destination label's context. */
12489 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
12490 struct walk_stmt_info
*wi
)
12492 gimple
*context
= (gimple
*) wi
->info
;
12494 gimple
*stmt
= gsi_stmt (*gsi_p
);
12496 *handled_ops_p
= true;
12498 switch (gimple_code (stmt
))
12502 case GIMPLE_OMP_PARALLEL
:
12503 case GIMPLE_OMP_TASK
:
12504 case GIMPLE_OMP_SECTIONS
:
12505 case GIMPLE_OMP_SINGLE
:
12506 case GIMPLE_OMP_SECTION
:
12507 case GIMPLE_OMP_MASTER
:
12508 case GIMPLE_OMP_ORDERED
:
12509 case GIMPLE_OMP_SCAN
:
12510 case GIMPLE_OMP_CRITICAL
:
12511 case GIMPLE_OMP_TARGET
:
12512 case GIMPLE_OMP_TEAMS
:
12513 case GIMPLE_OMP_TASKGROUP
:
12515 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
12516 wi
->info
= context
;
12519 case GIMPLE_OMP_FOR
:
12521 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12523 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
12524 diagnose_sb_2
, NULL
, wi
);
12525 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
12526 wi
->info
= context
;
12531 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
12532 tree lab
= gimple_cond_true_label (cond_stmt
);
12535 n
= splay_tree_lookup (all_labels
,
12536 (splay_tree_key
) lab
);
12537 diagnose_sb_0 (gsi_p
, context
,
12538 n
? (gimple
*) n
->value
: NULL
);
12540 lab
= gimple_cond_false_label (cond_stmt
);
12543 n
= splay_tree_lookup (all_labels
,
12544 (splay_tree_key
) lab
);
12545 diagnose_sb_0 (gsi_p
, context
,
12546 n
? (gimple
*) n
->value
: NULL
);
12553 tree lab
= gimple_goto_dest (stmt
);
12554 if (TREE_CODE (lab
) != LABEL_DECL
)
12557 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
12558 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
12562 case GIMPLE_SWITCH
:
12564 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
12566 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
12568 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
12569 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
12570 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
12576 case GIMPLE_RETURN
:
12577 diagnose_sb_0 (gsi_p
, context
, NULL
);
12587 static unsigned int
12588 diagnose_omp_structured_block_errors (void)
12590 struct walk_stmt_info wi
;
12591 gimple_seq body
= gimple_body (current_function_decl
);
12593 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
12595 memset (&wi
, 0, sizeof (wi
));
12596 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
12598 memset (&wi
, 0, sizeof (wi
));
12599 wi
.want_locations
= true;
12600 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
12602 gimple_set_body (current_function_decl
, body
);
12604 splay_tree_delete (all_labels
);
12612 const pass_data pass_data_diagnose_omp_blocks
=
12614 GIMPLE_PASS
, /* type */
12615 "*diagnose_omp_blocks", /* name */
12616 OPTGROUP_OMP
, /* optinfo_flags */
12617 TV_NONE
, /* tv_id */
12618 PROP_gimple_any
, /* properties_required */
12619 0, /* properties_provided */
12620 0, /* properties_destroyed */
12621 0, /* todo_flags_start */
12622 0, /* todo_flags_finish */
12625 class pass_diagnose_omp_blocks
: public gimple_opt_pass
12628 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
12629 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
12632 /* opt_pass methods: */
12633 virtual bool gate (function
*)
12635 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
12637 virtual unsigned int execute (function
*)
12639 return diagnose_omp_structured_block_errors ();
12642 }; // class pass_diagnose_omp_blocks
12644 } // anon namespace
12647 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
12649 return new pass_diagnose_omp_blocks (ctxt
);
12653 #include "gt-omp-low.h"