1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
135 /* True if this parallel directive is nested within another. */
138 /* True if this construct can be cancelled. */
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
143 bool combined_into_simd_safelen1
;
145 /* True if there is nested scan context with inclusive clause. */
148 /* True if there is nested scan context with exclusive clause. */
151 /* True in the second simd loop of for simd with inscan reductions. */
152 bool for_simd_scan_phase
;
154 /* True if there is order(concurrent) clause on the construct. */
155 bool order_concurrent
;
157 /* True if there is bind clause on the construct (i.e. a loop construct). */
161 static splay_tree all_contexts
;
162 static int taskreg_nesting_level
;
163 static int target_nesting_level
;
164 static bitmap task_shared_vars
;
165 static bitmap global_nonaddressable_vars
;
166 static vec
<omp_context
*> taskreg_contexts
;
168 static void scan_omp (gimple_seq
*, omp_context
*);
169 static tree
scan_omp_1_op (tree
*, int *, void *);
171 #define WALK_SUBSTMTS \
175 case GIMPLE_EH_FILTER: \
176 case GIMPLE_TRANSACTION: \
177 /* The sub-statements for these should be walked. */ \
178 *handled_ops_p = false; \
181 /* Return true if CTX corresponds to an oacc parallel region. */
184 is_oacc_parallel (omp_context
*ctx
)
186 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
187 return ((outer_type
== GIMPLE_OMP_TARGET
)
188 && (gimple_omp_target_kind (ctx
->stmt
)
189 == GF_OMP_TARGET_KIND_OACC_PARALLEL
));
192 /* Return true if CTX corresponds to an oacc kernels region. */
195 is_oacc_kernels (omp_context
*ctx
)
197 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
198 return ((outer_type
== GIMPLE_OMP_TARGET
)
199 && (gimple_omp_target_kind (ctx
->stmt
)
200 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
203 /* If DECL is the artificial dummy VAR_DECL created for non-static
204 data member privatization, return the underlying "this" parameter,
205 otherwise return NULL. */
208 omp_member_access_dummy_var (tree decl
)
211 || !DECL_ARTIFICIAL (decl
)
212 || !DECL_IGNORED_P (decl
)
213 || !DECL_HAS_VALUE_EXPR_P (decl
)
214 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
217 tree v
= DECL_VALUE_EXPR (decl
);
218 if (TREE_CODE (v
) != COMPONENT_REF
)
222 switch (TREE_CODE (v
))
228 case POINTER_PLUS_EXPR
:
229 v
= TREE_OPERAND (v
, 0);
232 if (DECL_CONTEXT (v
) == current_function_decl
233 && DECL_ARTIFICIAL (v
)
234 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
242 /* Helper for unshare_and_remap, called through walk_tree. */
245 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
247 tree
*pair
= (tree
*) data
;
250 *tp
= unshare_expr (pair
[1]);
253 else if (IS_TYPE_OR_DECL_P (*tp
))
258 /* Return unshare_expr (X) with all occurrences of FROM
262 unshare_and_remap (tree x
, tree from
, tree to
)
264 tree pair
[2] = { from
, to
};
265 x
= unshare_expr (x
);
266 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
270 /* Convenience function for calling scan_omp_1_op on tree operands. */
273 scan_omp_op (tree
*tp
, omp_context
*ctx
)
275 struct walk_stmt_info wi
;
277 memset (&wi
, 0, sizeof (wi
));
279 wi
.want_locations
= true;
281 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
284 static void lower_omp (gimple_seq
*, omp_context
*);
285 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
286 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
288 /* Return true if CTX is for an omp parallel. */
291 is_parallel_ctx (omp_context
*ctx
)
293 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
297 /* Return true if CTX is for an omp task. */
300 is_task_ctx (omp_context
*ctx
)
302 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
306 /* Return true if CTX is for an omp taskloop. */
309 is_taskloop_ctx (omp_context
*ctx
)
311 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
312 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
316 /* Return true if CTX is for a host omp teams. */
319 is_host_teams_ctx (omp_context
*ctx
)
321 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
322 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
325 /* Return true if CTX is for an omp parallel or omp task or host omp teams
326 (the last one is strictly not a task region in OpenMP speak, but we
327 need to treat it similarly). */
330 is_taskreg_ctx (omp_context
*ctx
)
332 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
335 /* Return true if EXPR is variable sized. */
338 is_variable_sized (const_tree expr
)
340 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
343 /* Lookup variables. The "maybe" form
344 allows for the variable form to not have been entered, otherwise we
345 assert that the variable must have been entered. */
348 lookup_decl (tree var
, omp_context
*ctx
)
350 tree
*n
= ctx
->cb
.decl_map
->get (var
);
355 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
357 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
358 return n
? *n
: NULL_TREE
;
362 lookup_field (tree var
, omp_context
*ctx
)
365 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
366 return (tree
) n
->value
;
370 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
373 n
= splay_tree_lookup (ctx
->sfield_map
374 ? ctx
->sfield_map
: ctx
->field_map
, key
);
375 return (tree
) n
->value
;
379 lookup_sfield (tree var
, omp_context
*ctx
)
381 return lookup_sfield ((splay_tree_key
) var
, ctx
);
385 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
388 n
= splay_tree_lookup (ctx
->field_map
, key
);
389 return n
? (tree
) n
->value
: NULL_TREE
;
393 maybe_lookup_field (tree var
, omp_context
*ctx
)
395 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
398 /* Return true if DECL should be copied by pointer. SHARED_CTX is
399 the parallel context if DECL is to be shared. */
402 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
404 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
405 || TYPE_ATOMIC (TREE_TYPE (decl
)))
408 /* We can only use copy-in/copy-out semantics for shared variables
409 when we know the value is not accessible from an outer scope. */
412 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
414 /* ??? Trivially accessible from anywhere. But why would we even
415 be passing an address in this case? Should we simply assert
416 this to be false, or should we have a cleanup pass that removes
417 these from the list of mappings? */
418 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
421 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
422 without analyzing the expression whether or not its location
423 is accessible to anyone else. In the case of nested parallel
424 regions it certainly may be. */
425 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
428 /* Do not use copy-in/copy-out for variables that have their
430 if (is_global_var (decl
))
432 /* For file scope vars, track whether we've seen them as
433 non-addressable initially and in that case, keep the same
434 answer for the duration of the pass, even when they are made
435 addressable later on e.g. through reduction expansion. Global
436 variables which weren't addressable before the pass will not
437 have their privatized copies address taken. See PR91216. */
438 if (!TREE_ADDRESSABLE (decl
))
440 if (!global_nonaddressable_vars
)
441 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
442 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
444 else if (!global_nonaddressable_vars
445 || !bitmap_bit_p (global_nonaddressable_vars
,
449 else if (TREE_ADDRESSABLE (decl
))
452 /* lower_send_shared_vars only uses copy-in, but not copy-out
454 if (TREE_READONLY (decl
)
455 || ((TREE_CODE (decl
) == RESULT_DECL
456 || TREE_CODE (decl
) == PARM_DECL
)
457 && DECL_BY_REFERENCE (decl
)))
460 /* Disallow copy-in/out in nested parallel if
461 decl is shared in outer parallel, otherwise
462 each thread could store the shared variable
463 in its own copy-in location, making the
464 variable no longer really shared. */
465 if (shared_ctx
->is_nested
)
469 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
470 if (is_taskreg_ctx (up
) && maybe_lookup_decl (decl
, up
))
477 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
478 c
; c
= OMP_CLAUSE_CHAIN (c
))
479 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
480 && OMP_CLAUSE_DECL (c
) == decl
)
484 goto maybe_mark_addressable_and_ret
;
488 /* For tasks avoid using copy-in/out. As tasks can be
489 deferred or executed in different thread, when GOMP_task
490 returns, the task hasn't necessarily terminated. */
491 if (is_task_ctx (shared_ctx
))
494 maybe_mark_addressable_and_ret
:
495 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
496 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
498 /* Taking address of OUTER in lower_send_shared_vars
499 might need regimplification of everything that uses the
501 if (!task_shared_vars
)
502 task_shared_vars
= BITMAP_ALLOC (NULL
);
503 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
504 TREE_ADDRESSABLE (outer
) = 1;
513 /* Construct a new automatic decl similar to VAR. */
516 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
518 tree copy
= copy_var_decl (var
, name
, type
);
520 DECL_CONTEXT (copy
) = current_function_decl
;
521 DECL_CHAIN (copy
) = ctx
->block_vars
;
522 /* If VAR is listed in task_shared_vars, it means it wasn't
523 originally addressable and is just because task needs to take
524 it's address. But we don't need to take address of privatizations
526 if (TREE_ADDRESSABLE (var
)
527 && ((task_shared_vars
528 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
529 || (global_nonaddressable_vars
530 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
531 TREE_ADDRESSABLE (copy
) = 0;
532 ctx
->block_vars
= copy
;
538 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
540 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
543 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
546 omp_build_component_ref (tree obj
, tree field
)
548 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
549 if (TREE_THIS_VOLATILE (field
))
550 TREE_THIS_VOLATILE (ret
) |= 1;
551 if (TREE_READONLY (field
))
552 TREE_READONLY (ret
) |= 1;
556 /* Build tree nodes to access the field for VAR on the receiver side. */
559 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
561 tree x
, field
= lookup_field (var
, ctx
);
563 /* If the receiver record type was remapped in the child function,
564 remap the field into the new record type. */
565 x
= maybe_lookup_field (field
, ctx
);
569 x
= build_simple_mem_ref (ctx
->receiver_decl
);
570 TREE_THIS_NOTRAP (x
) = 1;
571 x
= omp_build_component_ref (x
, field
);
574 x
= build_simple_mem_ref (x
);
575 TREE_THIS_NOTRAP (x
) = 1;
581 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
582 of a parallel, this is a component reference; for workshare constructs
583 this is some variable. */
586 build_outer_var_ref (tree var
, omp_context
*ctx
,
587 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
590 omp_context
*outer
= ctx
->outer
;
591 while (outer
&& gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
592 outer
= outer
->outer
;
594 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
596 else if (is_variable_sized (var
))
598 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
599 x
= build_outer_var_ref (x
, ctx
, code
);
600 x
= build_simple_mem_ref (x
);
602 else if (is_taskreg_ctx (ctx
))
604 bool by_ref
= use_pointer_for_field (var
, NULL
);
605 x
= build_receiver_ref (var
, by_ref
, ctx
);
607 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
608 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
610 || (code
== OMP_CLAUSE_PRIVATE
611 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
612 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
613 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
615 /* #pragma omp simd isn't a worksharing construct, and can reference
616 even private vars in its linear etc. clauses.
617 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
618 to private vars in all worksharing constructs. */
620 if (outer
&& is_taskreg_ctx (outer
))
621 x
= lookup_decl (var
, outer
);
623 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
627 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
631 = splay_tree_lookup (outer
->field_map
,
632 (splay_tree_key
) &DECL_UID (var
));
635 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
638 x
= lookup_decl (var
, outer
);
642 tree field
= (tree
) n
->value
;
643 /* If the receiver record type was remapped in the child function,
644 remap the field into the new record type. */
645 x
= maybe_lookup_field (field
, outer
);
649 x
= build_simple_mem_ref (outer
->receiver_decl
);
650 x
= omp_build_component_ref (x
, field
);
651 if (use_pointer_for_field (var
, outer
))
652 x
= build_simple_mem_ref (x
);
657 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_GRID_BODY
)
659 outer
= outer
->outer
;
661 && gimple_code (outer
->stmt
) != GIMPLE_OMP_GRID_BODY
);
663 x
= lookup_decl (var
, outer
);
665 else if (omp_is_reference (var
))
666 /* This can happen with orphaned constructs. If var is reference, it is
667 possible it is shared and as such valid. */
669 else if (omp_member_access_dummy_var (var
))
676 tree t
= omp_member_access_dummy_var (var
);
679 x
= DECL_VALUE_EXPR (var
);
680 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
682 x
= unshare_and_remap (x
, t
, o
);
684 x
= unshare_expr (x
);
688 if (omp_is_reference (var
))
689 x
= build_simple_mem_ref (x
);
694 /* Build tree nodes to access the field for VAR on the sender side. */
697 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
699 tree field
= lookup_sfield (key
, ctx
);
700 return omp_build_component_ref (ctx
->sender_decl
, field
);
704 build_sender_ref (tree var
, omp_context
*ctx
)
706 return build_sender_ref ((splay_tree_key
) var
, ctx
);
709 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
710 BASE_POINTERS_RESTRICT, declare the field with restrict. */
713 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
715 tree field
, type
, sfield
= NULL_TREE
;
716 splay_tree_key key
= (splay_tree_key
) var
;
718 if ((mask
& 16) != 0)
720 key
= (splay_tree_key
) &DECL_NAME (var
);
721 gcc_checking_assert (key
!= (splay_tree_key
) var
);
725 key
= (splay_tree_key
) &DECL_UID (var
);
726 gcc_checking_assert (key
!= (splay_tree_key
) var
);
728 gcc_assert ((mask
& 1) == 0
729 || !splay_tree_lookup (ctx
->field_map
, key
));
730 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
731 || !splay_tree_lookup (ctx
->sfield_map
, key
));
732 gcc_assert ((mask
& 3) == 3
733 || !is_gimple_omp_oacc (ctx
->stmt
));
735 type
= TREE_TYPE (var
);
736 if ((mask
& 16) != 0)
737 type
= lang_hooks
.decls
.omp_array_data (var
, true);
739 /* Prevent redeclaring the var in the split-off function with a restrict
740 pointer type. Note that we only clear type itself, restrict qualifiers in
741 the pointed-to type will be ignored by points-to analysis. */
742 if (POINTER_TYPE_P (type
)
743 && TYPE_RESTRICT (type
))
744 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
748 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
749 type
= build_pointer_type (build_pointer_type (type
));
752 type
= build_pointer_type (type
);
753 else if ((mask
& 3) == 1 && omp_is_reference (var
))
754 type
= TREE_TYPE (type
);
756 field
= build_decl (DECL_SOURCE_LOCATION (var
),
757 FIELD_DECL
, DECL_NAME (var
), type
);
759 /* Remember what variable this field was created for. This does have a
760 side effect of making dwarf2out ignore this member, so for helpful
761 debugging we clear it later in delete_omp_context. */
762 DECL_ABSTRACT_ORIGIN (field
) = var
;
763 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
765 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
766 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
767 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
770 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
774 insert_field_into_struct (ctx
->record_type
, field
);
775 if (ctx
->srecord_type
)
777 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
778 FIELD_DECL
, DECL_NAME (var
), type
);
779 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
780 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
781 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
782 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
783 insert_field_into_struct (ctx
->srecord_type
, sfield
);
788 if (ctx
->srecord_type
== NULL_TREE
)
792 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
793 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
794 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
796 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
797 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
798 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
799 insert_field_into_struct (ctx
->srecord_type
, sfield
);
800 splay_tree_insert (ctx
->sfield_map
,
801 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
802 (splay_tree_value
) sfield
);
806 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
807 : ctx
->srecord_type
, field
);
811 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
812 if ((mask
& 2) && ctx
->sfield_map
)
813 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
817 install_var_local (tree var
, omp_context
*ctx
)
819 tree new_var
= omp_copy_decl_1 (var
, ctx
);
820 insert_decl_map (&ctx
->cb
, var
, new_var
);
824 /* Adjust the replacement for DECL in CTX for the new context. This means
825 copying the DECL_VALUE_EXPR, and fixing up the type. */
828 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
832 new_decl
= lookup_decl (decl
, ctx
);
834 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
836 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
837 && DECL_HAS_VALUE_EXPR_P (decl
))
839 tree ve
= DECL_VALUE_EXPR (decl
);
840 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
841 SET_DECL_VALUE_EXPR (new_decl
, ve
);
842 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
845 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
847 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
848 if (size
== error_mark_node
)
849 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
850 DECL_SIZE (new_decl
) = size
;
852 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
853 if (size
== error_mark_node
)
854 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
855 DECL_SIZE_UNIT (new_decl
) = size
;
859 /* The callback for remap_decl. Search all containing contexts for a
860 mapping of the variable; this avoids having to duplicate the splay
861 tree ahead of time. We know a mapping doesn't already exist in the
862 given context. Create new mappings to implement default semantics. */
865 omp_copy_decl (tree var
, copy_body_data
*cb
)
867 omp_context
*ctx
= (omp_context
*) cb
;
870 if (TREE_CODE (var
) == LABEL_DECL
)
872 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
874 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
875 DECL_CONTEXT (new_var
) = current_function_decl
;
876 insert_decl_map (&ctx
->cb
, var
, new_var
);
880 while (!is_taskreg_ctx (ctx
))
885 new_var
= maybe_lookup_decl (var
, ctx
);
890 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
893 return error_mark_node
;
896 /* Create a new context, with OUTER_CTX being the surrounding context. */
899 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
901 omp_context
*ctx
= XCNEW (omp_context
);
903 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
904 (splay_tree_value
) ctx
);
909 ctx
->outer
= outer_ctx
;
910 ctx
->cb
= outer_ctx
->cb
;
911 ctx
->cb
.block
= NULL
;
912 ctx
->depth
= outer_ctx
->depth
+ 1;
916 ctx
->cb
.src_fn
= current_function_decl
;
917 ctx
->cb
.dst_fn
= current_function_decl
;
918 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
919 gcc_checking_assert (ctx
->cb
.src_node
);
920 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
921 ctx
->cb
.src_cfun
= cfun
;
922 ctx
->cb
.copy_decl
= omp_copy_decl
;
923 ctx
->cb
.eh_lp_nr
= 0;
924 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
925 ctx
->cb
.adjust_array_error_bounds
= true;
926 ctx
->cb
.dont_remap_vla_if_no_change
= true;
930 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
935 static gimple_seq
maybe_catch_exception (gimple_seq
);
937 /* Finalize task copyfn. */
940 finalize_task_copyfn (gomp_task
*task_stmt
)
942 struct function
*child_cfun
;
944 gimple_seq seq
= NULL
, new_seq
;
947 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
948 if (child_fn
== NULL_TREE
)
951 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
952 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
954 push_cfun (child_cfun
);
955 bind
= gimplify_body (child_fn
, false);
956 gimple_seq_add_stmt (&seq
, bind
);
957 new_seq
= maybe_catch_exception (seq
);
960 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
962 gimple_seq_add_stmt (&seq
, bind
);
964 gimple_set_body (child_fn
, seq
);
967 /* Inform the callgraph about the new function. */
968 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
969 node
->parallelized_function
= 1;
970 cgraph_node::add_new_function (child_fn
, false);
973 /* Destroy a omp_context data structures. Called through the splay tree
974 value delete callback. */
977 delete_omp_context (splay_tree_value value
)
979 omp_context
*ctx
= (omp_context
*) value
;
981 delete ctx
->cb
.decl_map
;
984 splay_tree_delete (ctx
->field_map
);
986 splay_tree_delete (ctx
->sfield_map
);
988 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
989 it produces corrupt debug information. */
990 if (ctx
->record_type
)
993 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
994 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
996 if (ctx
->srecord_type
)
999 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1000 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1003 if (is_task_ctx (ctx
))
1004 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
1006 if (ctx
->task_reduction_map
)
1008 ctx
->task_reductions
.release ();
1009 delete ctx
->task_reduction_map
;
1012 delete ctx
->lastprivate_conditional_map
;
1017 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1021 fixup_child_record_type (omp_context
*ctx
)
1023 tree f
, type
= ctx
->record_type
;
1025 if (!ctx
->receiver_decl
)
1027 /* ??? It isn't sufficient to just call remap_type here, because
1028 variably_modified_type_p doesn't work the way we expect for
1029 record types. Testing each field for whether it needs remapping
1030 and creating a new record by hand works, however. */
1031 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1032 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1036 tree name
, new_fields
= NULL
;
1038 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1039 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1040 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1041 TYPE_DECL
, name
, type
);
1042 TYPE_NAME (type
) = name
;
1044 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1046 tree new_f
= copy_node (f
);
1047 DECL_CONTEXT (new_f
) = type
;
1048 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1049 DECL_CHAIN (new_f
) = new_fields
;
1050 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1051 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1053 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1057 /* Arrange to be able to look up the receiver field
1058 given the sender field. */
1059 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1060 (splay_tree_value
) new_f
);
1062 TYPE_FIELDS (type
) = nreverse (new_fields
);
1066 /* In a target region we never modify any of the pointers in *.omp_data_i,
1067 so attempt to help the optimizers. */
1068 if (is_gimple_omp_offloaded (ctx
->stmt
))
1069 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1071 TREE_TYPE (ctx
->receiver_decl
)
1072 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1075 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1076 specified by CLAUSES. */
1079 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1082 bool scan_array_reductions
= false;
1084 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1088 switch (OMP_CLAUSE_CODE (c
))
1090 case OMP_CLAUSE_PRIVATE
:
1091 decl
= OMP_CLAUSE_DECL (c
);
1092 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1094 else if (!is_variable_sized (decl
))
1095 install_var_local (decl
, ctx
);
1098 case OMP_CLAUSE_SHARED
:
1099 decl
= OMP_CLAUSE_DECL (c
);
1100 /* Ignore shared directives in teams construct inside of
1101 target construct. */
1102 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1103 && !is_host_teams_ctx (ctx
))
1105 /* Global variables don't need to be copied,
1106 the receiver side will use them directly. */
1107 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1108 if (is_global_var (odecl
))
1110 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1113 gcc_assert (is_taskreg_ctx (ctx
));
1114 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1115 || !is_variable_sized (decl
));
1116 /* Global variables don't need to be copied,
1117 the receiver side will use them directly. */
1118 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1120 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1122 use_pointer_for_field (decl
, ctx
);
1125 by_ref
= use_pointer_for_field (decl
, NULL
);
1126 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1127 || TREE_ADDRESSABLE (decl
)
1129 || omp_is_reference (decl
))
1131 by_ref
= use_pointer_for_field (decl
, ctx
);
1132 install_var_field (decl
, by_ref
, 3, ctx
);
1133 install_var_local (decl
, ctx
);
1136 /* We don't need to copy const scalar vars back. */
1137 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1140 case OMP_CLAUSE_REDUCTION
:
1141 case OMP_CLAUSE_IN_REDUCTION
:
1142 decl
= OMP_CLAUSE_DECL (c
);
1143 if (TREE_CODE (decl
) == MEM_REF
)
1145 tree t
= TREE_OPERAND (decl
, 0);
1146 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1147 t
= TREE_OPERAND (t
, 0);
1148 if (TREE_CODE (t
) == INDIRECT_REF
1149 || TREE_CODE (t
) == ADDR_EXPR
)
1150 t
= TREE_OPERAND (t
, 0);
1151 install_var_local (t
, ctx
);
1152 if (is_taskreg_ctx (ctx
)
1153 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1154 || (is_task_ctx (ctx
)
1155 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1156 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1157 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1158 == POINTER_TYPE
)))))
1159 && !is_variable_sized (t
)
1160 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1161 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1162 && !is_task_ctx (ctx
))))
1164 by_ref
= use_pointer_for_field (t
, NULL
);
1165 if (is_task_ctx (ctx
)
1166 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1167 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1169 install_var_field (t
, false, 1, ctx
);
1170 install_var_field (t
, by_ref
, 2, ctx
);
1173 install_var_field (t
, by_ref
, 3, ctx
);
1177 if (is_task_ctx (ctx
)
1178 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1179 && OMP_CLAUSE_REDUCTION_TASK (c
)
1180 && is_parallel_ctx (ctx
)))
1182 /* Global variables don't need to be copied,
1183 the receiver side will use them directly. */
1184 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1186 by_ref
= use_pointer_for_field (decl
, ctx
);
1187 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1188 install_var_field (decl
, by_ref
, 3, ctx
);
1190 install_var_local (decl
, ctx
);
1193 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1194 && OMP_CLAUSE_REDUCTION_TASK (c
))
1196 install_var_local (decl
, ctx
);
1201 case OMP_CLAUSE_LASTPRIVATE
:
1202 /* Let the corresponding firstprivate clause create
1204 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1208 case OMP_CLAUSE_FIRSTPRIVATE
:
1209 case OMP_CLAUSE_LINEAR
:
1210 decl
= OMP_CLAUSE_DECL (c
);
1212 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1213 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1214 && is_gimple_omp_offloaded (ctx
->stmt
))
1216 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1217 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1218 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1219 install_var_field (decl
, true, 3, ctx
);
1221 install_var_field (decl
, false, 3, ctx
);
1223 if (is_variable_sized (decl
))
1225 if (is_task_ctx (ctx
))
1226 install_var_field (decl
, false, 1, ctx
);
1229 else if (is_taskreg_ctx (ctx
))
1232 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1233 by_ref
= use_pointer_for_field (decl
, NULL
);
1235 if (is_task_ctx (ctx
)
1236 && (global
|| by_ref
|| omp_is_reference (decl
)))
1238 install_var_field (decl
, false, 1, ctx
);
1240 install_var_field (decl
, by_ref
, 2, ctx
);
1243 install_var_field (decl
, by_ref
, 3, ctx
);
1245 install_var_local (decl
, ctx
);
1248 case OMP_CLAUSE_USE_DEVICE_PTR
:
1249 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1250 decl
= OMP_CLAUSE_DECL (c
);
1252 /* Fortran array descriptors. */
1253 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1254 install_var_field (decl
, false, 19, ctx
);
1255 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1256 && !omp_is_reference (decl
)
1257 && !omp_is_allocatable_or_ptr (decl
))
1258 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1259 install_var_field (decl
, true, 11, ctx
);
1261 install_var_field (decl
, false, 11, ctx
);
1262 if (DECL_SIZE (decl
)
1263 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1265 tree decl2
= DECL_VALUE_EXPR (decl
);
1266 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1267 decl2
= TREE_OPERAND (decl2
, 0);
1268 gcc_assert (DECL_P (decl2
));
1269 install_var_local (decl2
, ctx
);
1271 install_var_local (decl
, ctx
);
1274 case OMP_CLAUSE_IS_DEVICE_PTR
:
1275 decl
= OMP_CLAUSE_DECL (c
);
1278 case OMP_CLAUSE__LOOPTEMP_
:
1279 case OMP_CLAUSE__REDUCTEMP_
:
1280 gcc_assert (is_taskreg_ctx (ctx
));
1281 decl
= OMP_CLAUSE_DECL (c
);
1282 install_var_field (decl
, false, 3, ctx
);
1283 install_var_local (decl
, ctx
);
1286 case OMP_CLAUSE_COPYPRIVATE
:
1287 case OMP_CLAUSE_COPYIN
:
1288 decl
= OMP_CLAUSE_DECL (c
);
1289 by_ref
= use_pointer_for_field (decl
, NULL
);
1290 install_var_field (decl
, by_ref
, 3, ctx
);
1293 case OMP_CLAUSE_FINAL
:
1295 case OMP_CLAUSE_NUM_THREADS
:
1296 case OMP_CLAUSE_NUM_TEAMS
:
1297 case OMP_CLAUSE_THREAD_LIMIT
:
1298 case OMP_CLAUSE_DEVICE
:
1299 case OMP_CLAUSE_SCHEDULE
:
1300 case OMP_CLAUSE_DIST_SCHEDULE
:
1301 case OMP_CLAUSE_DEPEND
:
1302 case OMP_CLAUSE_PRIORITY
:
1303 case OMP_CLAUSE_GRAINSIZE
:
1304 case OMP_CLAUSE_NUM_TASKS
:
1305 case OMP_CLAUSE_NUM_GANGS
:
1306 case OMP_CLAUSE_NUM_WORKERS
:
1307 case OMP_CLAUSE_VECTOR_LENGTH
:
1309 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1313 case OMP_CLAUSE_FROM
:
1314 case OMP_CLAUSE_MAP
:
1316 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1317 decl
= OMP_CLAUSE_DECL (c
);
1318 /* Global variables with "omp declare target" attribute
1319 don't need to be copied, the receiver side will use them
1320 directly. However, global variables with "omp declare target link"
1321 attribute need to be copied. Or when ALWAYS modifier is used. */
1322 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1324 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1325 && (OMP_CLAUSE_MAP_KIND (c
)
1326 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1327 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1328 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1329 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1330 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1331 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1332 && varpool_node::get_create (decl
)->offloadable
1333 && !lookup_attribute ("omp declare target link",
1334 DECL_ATTRIBUTES (decl
)))
1336 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1337 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1339 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1340 not offloaded; there is nothing to map for those. */
1341 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1342 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1343 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1346 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1347 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1348 || (OMP_CLAUSE_MAP_KIND (c
)
1349 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1351 if (TREE_CODE (decl
) == COMPONENT_REF
1352 || (TREE_CODE (decl
) == INDIRECT_REF
1353 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1354 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1355 == REFERENCE_TYPE
)))
1357 if (DECL_SIZE (decl
)
1358 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1360 tree decl2
= DECL_VALUE_EXPR (decl
);
1361 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1362 decl2
= TREE_OPERAND (decl2
, 0);
1363 gcc_assert (DECL_P (decl2
));
1364 install_var_local (decl2
, ctx
);
1366 install_var_local (decl
, ctx
);
1371 if (DECL_SIZE (decl
)
1372 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1374 tree decl2
= DECL_VALUE_EXPR (decl
);
1375 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1376 decl2
= TREE_OPERAND (decl2
, 0);
1377 gcc_assert (DECL_P (decl2
));
1378 install_var_field (decl2
, true, 3, ctx
);
1379 install_var_local (decl2
, ctx
);
1380 install_var_local (decl
, ctx
);
1384 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1385 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1386 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1387 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1388 install_var_field (decl
, true, 7, ctx
);
1390 install_var_field (decl
, true, 3, ctx
);
1391 if (is_gimple_omp_offloaded (ctx
->stmt
)
1392 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1393 install_var_local (decl
, ctx
);
1398 tree base
= get_base_address (decl
);
1399 tree nc
= OMP_CLAUSE_CHAIN (c
);
1402 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1403 && OMP_CLAUSE_DECL (nc
) == base
1404 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1405 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1407 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1408 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1414 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1415 decl
= OMP_CLAUSE_DECL (c
);
1417 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1418 (splay_tree_key
) decl
));
1420 = build_decl (OMP_CLAUSE_LOCATION (c
),
1421 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1422 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1423 insert_field_into_struct (ctx
->record_type
, field
);
1424 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1425 (splay_tree_value
) field
);
1430 case OMP_CLAUSE__GRIDDIM_
:
1433 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c
), ctx
->outer
);
1434 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c
), ctx
->outer
);
1438 case OMP_CLAUSE_ORDER
:
1439 ctx
->order_concurrent
= true;
1442 case OMP_CLAUSE_BIND
:
1446 case OMP_CLAUSE_NOWAIT
:
1447 case OMP_CLAUSE_ORDERED
:
1448 case OMP_CLAUSE_COLLAPSE
:
1449 case OMP_CLAUSE_UNTIED
:
1450 case OMP_CLAUSE_MERGEABLE
:
1451 case OMP_CLAUSE_PROC_BIND
:
1452 case OMP_CLAUSE_SAFELEN
:
1453 case OMP_CLAUSE_SIMDLEN
:
1454 case OMP_CLAUSE_THREADS
:
1455 case OMP_CLAUSE_SIMD
:
1456 case OMP_CLAUSE_NOGROUP
:
1457 case OMP_CLAUSE_DEFAULTMAP
:
1458 case OMP_CLAUSE_ASYNC
:
1459 case OMP_CLAUSE_WAIT
:
1460 case OMP_CLAUSE_GANG
:
1461 case OMP_CLAUSE_WORKER
:
1462 case OMP_CLAUSE_VECTOR
:
1463 case OMP_CLAUSE_INDEPENDENT
:
1464 case OMP_CLAUSE_AUTO
:
1465 case OMP_CLAUSE_SEQ
:
1466 case OMP_CLAUSE_TILE
:
1467 case OMP_CLAUSE__SIMT_
:
1468 case OMP_CLAUSE_DEFAULT
:
1469 case OMP_CLAUSE_NONTEMPORAL
:
1470 case OMP_CLAUSE_IF_PRESENT
:
1471 case OMP_CLAUSE_FINALIZE
:
1472 case OMP_CLAUSE_TASK_REDUCTION
:
1475 case OMP_CLAUSE_ALIGNED
:
1476 decl
= OMP_CLAUSE_DECL (c
);
1477 if (is_global_var (decl
)
1478 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1479 install_var_local (decl
, ctx
);
1482 case OMP_CLAUSE__CONDTEMP_
:
1483 decl
= OMP_CLAUSE_DECL (c
);
1484 if (is_parallel_ctx (ctx
))
1486 install_var_field (decl
, false, 3, ctx
);
1487 install_var_local (decl
, ctx
);
1489 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1490 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1491 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1492 install_var_local (decl
, ctx
);
1495 case OMP_CLAUSE__CACHE_
:
1501 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1503 switch (OMP_CLAUSE_CODE (c
))
1505 case OMP_CLAUSE_LASTPRIVATE
:
1506 /* Let the corresponding firstprivate clause create
1508 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1509 scan_array_reductions
= true;
1510 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1514 case OMP_CLAUSE_FIRSTPRIVATE
:
1515 case OMP_CLAUSE_PRIVATE
:
1516 case OMP_CLAUSE_LINEAR
:
1517 case OMP_CLAUSE_IS_DEVICE_PTR
:
1518 decl
= OMP_CLAUSE_DECL (c
);
1519 if (is_variable_sized (decl
))
1521 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1522 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1523 && is_gimple_omp_offloaded (ctx
->stmt
))
1525 tree decl2
= DECL_VALUE_EXPR (decl
);
1526 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1527 decl2
= TREE_OPERAND (decl2
, 0);
1528 gcc_assert (DECL_P (decl2
));
1529 install_var_local (decl2
, ctx
);
1530 fixup_remapped_decl (decl2
, ctx
, false);
1532 install_var_local (decl
, ctx
);
1534 fixup_remapped_decl (decl
, ctx
,
1535 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1536 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1537 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1538 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1539 scan_array_reductions
= true;
1542 case OMP_CLAUSE_REDUCTION
:
1543 case OMP_CLAUSE_IN_REDUCTION
:
1544 decl
= OMP_CLAUSE_DECL (c
);
1545 if (TREE_CODE (decl
) != MEM_REF
)
1547 if (is_variable_sized (decl
))
1548 install_var_local (decl
, ctx
);
1549 fixup_remapped_decl (decl
, ctx
, false);
1551 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1552 scan_array_reductions
= true;
1555 case OMP_CLAUSE_TASK_REDUCTION
:
1556 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1557 scan_array_reductions
= true;
1560 case OMP_CLAUSE_SHARED
:
1561 /* Ignore shared directives in teams construct inside of
1562 target construct. */
1563 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1564 && !is_host_teams_ctx (ctx
))
1566 decl
= OMP_CLAUSE_DECL (c
);
1567 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1569 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1571 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1574 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1575 install_var_field (decl
, by_ref
, 11, ctx
);
1578 fixup_remapped_decl (decl
, ctx
, false);
1581 case OMP_CLAUSE_MAP
:
1582 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1584 decl
= OMP_CLAUSE_DECL (c
);
1586 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1587 && (OMP_CLAUSE_MAP_KIND (c
)
1588 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1589 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1590 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1591 && varpool_node::get_create (decl
)->offloadable
)
1595 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1596 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1597 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1598 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1600 tree new_decl
= lookup_decl (decl
, ctx
);
1601 TREE_TYPE (new_decl
)
1602 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1604 else if (DECL_SIZE (decl
)
1605 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1607 tree decl2
= DECL_VALUE_EXPR (decl
);
1608 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1609 decl2
= TREE_OPERAND (decl2
, 0);
1610 gcc_assert (DECL_P (decl2
));
1611 fixup_remapped_decl (decl2
, ctx
, false);
1612 fixup_remapped_decl (decl
, ctx
, true);
1615 fixup_remapped_decl (decl
, ctx
, false);
1619 case OMP_CLAUSE_COPYPRIVATE
:
1620 case OMP_CLAUSE_COPYIN
:
1621 case OMP_CLAUSE_DEFAULT
:
1623 case OMP_CLAUSE_NUM_THREADS
:
1624 case OMP_CLAUSE_NUM_TEAMS
:
1625 case OMP_CLAUSE_THREAD_LIMIT
:
1626 case OMP_CLAUSE_DEVICE
:
1627 case OMP_CLAUSE_SCHEDULE
:
1628 case OMP_CLAUSE_DIST_SCHEDULE
:
1629 case OMP_CLAUSE_NOWAIT
:
1630 case OMP_CLAUSE_ORDERED
:
1631 case OMP_CLAUSE_COLLAPSE
:
1632 case OMP_CLAUSE_UNTIED
:
1633 case OMP_CLAUSE_FINAL
:
1634 case OMP_CLAUSE_MERGEABLE
:
1635 case OMP_CLAUSE_PROC_BIND
:
1636 case OMP_CLAUSE_SAFELEN
:
1637 case OMP_CLAUSE_SIMDLEN
:
1638 case OMP_CLAUSE_ALIGNED
:
1639 case OMP_CLAUSE_DEPEND
:
1640 case OMP_CLAUSE__LOOPTEMP_
:
1641 case OMP_CLAUSE__REDUCTEMP_
:
1643 case OMP_CLAUSE_FROM
:
1644 case OMP_CLAUSE_PRIORITY
:
1645 case OMP_CLAUSE_GRAINSIZE
:
1646 case OMP_CLAUSE_NUM_TASKS
:
1647 case OMP_CLAUSE_THREADS
:
1648 case OMP_CLAUSE_SIMD
:
1649 case OMP_CLAUSE_NOGROUP
:
1650 case OMP_CLAUSE_DEFAULTMAP
:
1651 case OMP_CLAUSE_ORDER
:
1652 case OMP_CLAUSE_BIND
:
1653 case OMP_CLAUSE_USE_DEVICE_PTR
:
1654 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1655 case OMP_CLAUSE_NONTEMPORAL
:
1656 case OMP_CLAUSE_ASYNC
:
1657 case OMP_CLAUSE_WAIT
:
1658 case OMP_CLAUSE_NUM_GANGS
:
1659 case OMP_CLAUSE_NUM_WORKERS
:
1660 case OMP_CLAUSE_VECTOR_LENGTH
:
1661 case OMP_CLAUSE_GANG
:
1662 case OMP_CLAUSE_WORKER
:
1663 case OMP_CLAUSE_VECTOR
:
1664 case OMP_CLAUSE_INDEPENDENT
:
1665 case OMP_CLAUSE_AUTO
:
1666 case OMP_CLAUSE_SEQ
:
1667 case OMP_CLAUSE_TILE
:
1668 case OMP_CLAUSE__GRIDDIM_
:
1669 case OMP_CLAUSE__SIMT_
:
1670 case OMP_CLAUSE_IF_PRESENT
:
1671 case OMP_CLAUSE_FINALIZE
:
1672 case OMP_CLAUSE__CONDTEMP_
:
1675 case OMP_CLAUSE__CACHE_
:
1681 gcc_checking_assert (!scan_array_reductions
1682 || !is_gimple_omp_oacc (ctx
->stmt
));
1683 if (scan_array_reductions
)
1685 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1686 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1687 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1688 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1689 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1691 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1692 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1694 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1695 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1696 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1697 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1698 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1699 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1703 /* Create a new name for omp child function. Returns an identifier. */
1706 create_omp_child_function_name (bool task_copy
)
1708 return clone_function_name_numbered (current_function_decl
,
1709 task_copy
? "_omp_cpyfn" : "_omp_fn");
1712 /* Return true if CTX may belong to offloaded code: either if current function
1713 is offloaded, or any enclosing context corresponds to a target region. */
1716 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1718 if (cgraph_node::get (current_function_decl
)->offloadable
)
1720 for (; ctx
; ctx
= ctx
->outer
)
1721 if (is_gimple_omp_offloaded (ctx
->stmt
))
1726 /* Build a decl for the omp child function. It'll not contain a body
1727 yet, just the bare decl. */
1730 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1732 tree decl
, type
, name
, t
;
1734 name
= create_omp_child_function_name (task_copy
);
1736 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1737 ptr_type_node
, NULL_TREE
);
1739 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1741 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1743 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1746 ctx
->cb
.dst_fn
= decl
;
1748 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1750 TREE_STATIC (decl
) = 1;
1751 TREE_USED (decl
) = 1;
1752 DECL_ARTIFICIAL (decl
) = 1;
1753 DECL_IGNORED_P (decl
) = 0;
1754 TREE_PUBLIC (decl
) = 0;
1755 DECL_UNINLINABLE (decl
) = 1;
1756 DECL_EXTERNAL (decl
) = 0;
1757 DECL_CONTEXT (decl
) = NULL_TREE
;
1758 DECL_INITIAL (decl
) = make_node (BLOCK
);
1759 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1760 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1761 /* Remove omp declare simd attribute from the new attributes. */
1762 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1764 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1767 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1768 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1769 *p
= TREE_CHAIN (*p
);
1772 tree chain
= TREE_CHAIN (*p
);
1773 *p
= copy_node (*p
);
1774 p
= &TREE_CHAIN (*p
);
1778 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1779 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1780 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1781 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1782 DECL_FUNCTION_VERSIONED (decl
)
1783 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1785 if (omp_maybe_offloaded_ctx (ctx
))
1787 cgraph_node::get_create (decl
)->offloadable
= 1;
1788 if (ENABLE_OFFLOADING
)
1789 g
->have_offload
= true;
1792 if (cgraph_node::get_create (decl
)->offloadable
1793 && !lookup_attribute ("omp declare target",
1794 DECL_ATTRIBUTES (current_function_decl
)))
1796 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1797 ? "omp target entrypoint"
1798 : "omp declare target");
1799 DECL_ATTRIBUTES (decl
)
1800 = tree_cons (get_identifier (target_attr
),
1801 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1804 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1805 RESULT_DECL
, NULL_TREE
, void_type_node
);
1806 DECL_ARTIFICIAL (t
) = 1;
1807 DECL_IGNORED_P (t
) = 1;
1808 DECL_CONTEXT (t
) = decl
;
1809 DECL_RESULT (decl
) = t
;
1811 tree data_name
= get_identifier (".omp_data_i");
1812 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1814 DECL_ARTIFICIAL (t
) = 1;
1815 DECL_NAMELESS (t
) = 1;
1816 DECL_ARG_TYPE (t
) = ptr_type_node
;
1817 DECL_CONTEXT (t
) = current_function_decl
;
1819 TREE_READONLY (t
) = 1;
1820 DECL_ARGUMENTS (decl
) = t
;
1822 ctx
->receiver_decl
= t
;
1825 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1826 PARM_DECL
, get_identifier (".omp_data_o"),
1828 DECL_ARTIFICIAL (t
) = 1;
1829 DECL_NAMELESS (t
) = 1;
1830 DECL_ARG_TYPE (t
) = ptr_type_node
;
1831 DECL_CONTEXT (t
) = current_function_decl
;
1833 TREE_ADDRESSABLE (t
) = 1;
1834 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1835 DECL_ARGUMENTS (decl
) = t
;
1838 /* Allocate memory for the function structure. The call to
1839 allocate_struct_function clobbers CFUN, so we need to restore
1841 push_struct_function (decl
);
1842 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1843 init_tree_ssa (cfun
);
1847 /* Callback for walk_gimple_seq. Check if combined parallel
1848 contains gimple_omp_for_combined_into_p OMP_FOR. */
1851 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1852 bool *handled_ops_p
,
1853 struct walk_stmt_info
*wi
)
1855 gimple
*stmt
= gsi_stmt (*gsi_p
);
1857 *handled_ops_p
= true;
1858 switch (gimple_code (stmt
))
1862 case GIMPLE_OMP_FOR
:
1863 if (gimple_omp_for_combined_into_p (stmt
)
1864 && gimple_omp_for_kind (stmt
)
1865 == *(const enum gf_mask
*) (wi
->info
))
1868 return integer_zero_node
;
1877 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1880 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1881 omp_context
*outer_ctx
)
1883 struct walk_stmt_info wi
;
1885 memset (&wi
, 0, sizeof (wi
));
1887 wi
.info
= (void *) &msk
;
1888 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1889 if (wi
.info
!= (void *) &msk
)
1891 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1892 struct omp_for_data fd
;
1893 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1894 /* We need two temporaries with fd.loop.v type (istart/iend)
1895 and then (fd.collapse - 1) temporaries with the same
1896 type for count2 ... countN-1 vars if not constant. */
1897 size_t count
= 2, i
;
1898 tree type
= fd
.iter_type
;
1900 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1902 count
+= fd
.collapse
- 1;
1903 /* If there are lastprivate clauses on the inner
1904 GIMPLE_OMP_FOR, add one more temporaries for the total number
1905 of iterations (product of count1 ... countN-1). */
1906 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1907 OMP_CLAUSE_LASTPRIVATE
))
1909 else if (msk
== GF_OMP_FOR_KIND_FOR
1910 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1911 OMP_CLAUSE_LASTPRIVATE
))
1914 for (i
= 0; i
< count
; i
++)
1916 tree temp
= create_tmp_var (type
);
1917 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1918 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1919 OMP_CLAUSE_DECL (c
) = temp
;
1920 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1921 gimple_omp_taskreg_set_clauses (stmt
, c
);
1924 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
1925 && omp_find_clause (gimple_omp_task_clauses (stmt
),
1926 OMP_CLAUSE_REDUCTION
))
1928 tree type
= build_pointer_type (pointer_sized_int_node
);
1929 tree temp
= create_tmp_var (type
);
1930 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1931 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1932 OMP_CLAUSE_DECL (c
) = temp
;
1933 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
1934 gimple_omp_task_set_clauses (stmt
, c
);
1938 /* Scan an OpenMP parallel directive. */
1941 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1945 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1947 /* Ignore parallel directives with empty bodies, unless there
1948 are copyin clauses. */
1950 && empty_body_p (gimple_omp_body (stmt
))
1951 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1952 OMP_CLAUSE_COPYIN
) == NULL
)
1954 gsi_replace (gsi
, gimple_build_nop (), false);
1958 if (gimple_omp_parallel_combined_p (stmt
))
1959 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1960 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1961 OMP_CLAUSE_REDUCTION
);
1962 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
1963 if (OMP_CLAUSE_REDUCTION_TASK (c
))
1965 tree type
= build_pointer_type (pointer_sized_int_node
);
1966 tree temp
= create_tmp_var (type
);
1967 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1969 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1970 OMP_CLAUSE_DECL (c
) = temp
;
1971 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
1972 gimple_omp_parallel_set_clauses (stmt
, c
);
1975 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
1978 ctx
= new_omp_context (stmt
, outer_ctx
);
1979 taskreg_contexts
.safe_push (ctx
);
1980 if (taskreg_nesting_level
> 1)
1981 ctx
->is_nested
= true;
1982 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1983 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1984 name
= create_tmp_var_name (".omp_data_s");
1985 name
= build_decl (gimple_location (stmt
),
1986 TYPE_DECL
, name
, ctx
->record_type
);
1987 DECL_ARTIFICIAL (name
) = 1;
1988 DECL_NAMELESS (name
) = 1;
1989 TYPE_NAME (ctx
->record_type
) = name
;
1990 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1991 if (!gimple_omp_parallel_grid_phony (stmt
))
1993 create_omp_child_function (ctx
, false);
1994 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1997 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
1998 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2000 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2001 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2004 /* Scan an OpenMP task directive. */
2007 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2011 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2013 /* Ignore task directives with empty bodies, unless they have depend
2016 && gimple_omp_body (stmt
)
2017 && empty_body_p (gimple_omp_body (stmt
))
2018 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2020 gsi_replace (gsi
, gimple_build_nop (), false);
2024 if (gimple_omp_task_taskloop_p (stmt
))
2025 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2027 ctx
= new_omp_context (stmt
, outer_ctx
);
2029 if (gimple_omp_task_taskwait_p (stmt
))
2031 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2035 taskreg_contexts
.safe_push (ctx
);
2036 if (taskreg_nesting_level
> 1)
2037 ctx
->is_nested
= true;
2038 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2039 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2040 name
= create_tmp_var_name (".omp_data_s");
2041 name
= build_decl (gimple_location (stmt
),
2042 TYPE_DECL
, name
, ctx
->record_type
);
2043 DECL_ARTIFICIAL (name
) = 1;
2044 DECL_NAMELESS (name
) = 1;
2045 TYPE_NAME (ctx
->record_type
) = name
;
2046 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2047 create_omp_child_function (ctx
, false);
2048 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2050 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2052 if (ctx
->srecord_type
)
2054 name
= create_tmp_var_name (".omp_data_a");
2055 name
= build_decl (gimple_location (stmt
),
2056 TYPE_DECL
, name
, ctx
->srecord_type
);
2057 DECL_ARTIFICIAL (name
) = 1;
2058 DECL_NAMELESS (name
) = 1;
2059 TYPE_NAME (ctx
->srecord_type
) = name
;
2060 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2061 create_omp_child_function (ctx
, true);
2064 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2066 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2068 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2069 t
= build_int_cst (long_integer_type_node
, 0);
2070 gimple_omp_task_set_arg_size (stmt
, t
);
2071 t
= build_int_cst (long_integer_type_node
, 1);
2072 gimple_omp_task_set_arg_align (stmt
, t
);
2076 /* Helper function for finish_taskreg_scan, called through walk_tree.
2077 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2078 tree, replace it in the expression. */
2081 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2085 omp_context
*ctx
= (omp_context
*) data
;
2086 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2089 if (DECL_HAS_VALUE_EXPR_P (t
))
2090 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2095 else if (IS_TYPE_OR_DECL_P (*tp
))
2100 /* If any decls have been made addressable during scan_omp,
2101 adjust their fields if needed, and layout record types
2102 of parallel/task constructs. */
2105 finish_taskreg_scan (omp_context
*ctx
)
2107 if (ctx
->record_type
== NULL_TREE
)
2110 /* If any task_shared_vars were needed, verify all
2111 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2112 statements if use_pointer_for_field hasn't changed
2113 because of that. If it did, update field types now. */
2114 if (task_shared_vars
)
2118 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2119 c
; c
= OMP_CLAUSE_CHAIN (c
))
2120 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2121 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2123 tree decl
= OMP_CLAUSE_DECL (c
);
2125 /* Global variables don't need to be copied,
2126 the receiver side will use them directly. */
2127 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2129 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2130 || !use_pointer_for_field (decl
, ctx
))
2132 tree field
= lookup_field (decl
, ctx
);
2133 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2134 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2136 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2137 TREE_THIS_VOLATILE (field
) = 0;
2138 DECL_USER_ALIGN (field
) = 0;
2139 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2140 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2141 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2142 if (ctx
->srecord_type
)
2144 tree sfield
= lookup_sfield (decl
, ctx
);
2145 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2146 TREE_THIS_VOLATILE (sfield
) = 0;
2147 DECL_USER_ALIGN (sfield
) = 0;
2148 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2149 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2150 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2155 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2157 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2158 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2161 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2162 expects to find it at the start of data. */
2163 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2164 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2168 *p
= DECL_CHAIN (*p
);
2172 p
= &DECL_CHAIN (*p
);
2173 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2174 TYPE_FIELDS (ctx
->record_type
) = f
;
2176 layout_type (ctx
->record_type
);
2177 fixup_child_record_type (ctx
);
2179 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2181 layout_type (ctx
->record_type
);
2182 fixup_child_record_type (ctx
);
2186 location_t loc
= gimple_location (ctx
->stmt
);
2187 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2188 /* Move VLA fields to the end. */
2189 p
= &TYPE_FIELDS (ctx
->record_type
);
2191 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2192 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2195 *p
= TREE_CHAIN (*p
);
2196 TREE_CHAIN (*q
) = NULL_TREE
;
2197 q
= &TREE_CHAIN (*q
);
2200 p
= &DECL_CHAIN (*p
);
2202 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2204 /* Move fields corresponding to first and second _looptemp_
2205 clause first. There are filled by GOMP_taskloop
2206 and thus need to be in specific positions. */
2207 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2208 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2209 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2210 OMP_CLAUSE__LOOPTEMP_
);
2211 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2212 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2213 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2214 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2215 p
= &TYPE_FIELDS (ctx
->record_type
);
2217 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2218 *p
= DECL_CHAIN (*p
);
2220 p
= &DECL_CHAIN (*p
);
2221 DECL_CHAIN (f1
) = f2
;
2224 DECL_CHAIN (f2
) = f3
;
2225 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2228 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2229 TYPE_FIELDS (ctx
->record_type
) = f1
;
2230 if (ctx
->srecord_type
)
2232 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2233 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2235 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2236 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2238 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2239 *p
= DECL_CHAIN (*p
);
2241 p
= &DECL_CHAIN (*p
);
2242 DECL_CHAIN (f1
) = f2
;
2243 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2246 DECL_CHAIN (f2
) = f3
;
2247 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2250 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2251 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2254 layout_type (ctx
->record_type
);
2255 fixup_child_record_type (ctx
);
2256 if (ctx
->srecord_type
)
2257 layout_type (ctx
->srecord_type
);
2258 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2259 TYPE_SIZE_UNIT (ctx
->record_type
));
2260 if (TREE_CODE (t
) != INTEGER_CST
)
2262 t
= unshare_expr (t
);
2263 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2265 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2266 t
= build_int_cst (long_integer_type_node
,
2267 TYPE_ALIGN_UNIT (ctx
->record_type
));
2268 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2272 /* Find the enclosing offload context. */
2274 static omp_context
*
2275 enclosing_target_ctx (omp_context
*ctx
)
2277 for (; ctx
; ctx
= ctx
->outer
)
2278 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2284 /* Return true if ctx is part of an oacc kernels region. */
2287 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2289 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2291 gimple
*stmt
= ctx
->stmt
;
2292 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2293 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2300 /* Check the parallelism clauses inside a kernels regions.
2301 Until kernels handling moves to use the same loop indirection
2302 scheme as parallel, we need to do this checking early. */
2305 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2307 bool checking
= true;
2308 unsigned outer_mask
= 0;
2309 unsigned this_mask
= 0;
2310 bool has_seq
= false, has_auto
= false;
2313 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2317 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2319 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2322 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2324 switch (OMP_CLAUSE_CODE (c
))
2326 case OMP_CLAUSE_GANG
:
2327 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2329 case OMP_CLAUSE_WORKER
:
2330 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2332 case OMP_CLAUSE_VECTOR
:
2333 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2335 case OMP_CLAUSE_SEQ
:
2338 case OMP_CLAUSE_AUTO
:
2348 if (has_seq
&& (this_mask
|| has_auto
))
2349 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2350 " OpenACC loop specifiers");
2351 else if (has_auto
&& this_mask
)
2352 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2353 " OpenACC loop specifiers");
2355 if (this_mask
& outer_mask
)
2356 error_at (gimple_location (stmt
), "inner loop uses same"
2357 " OpenACC parallelism as containing loop");
2360 return outer_mask
| this_mask
;
2363 /* Scan a GIMPLE_OMP_FOR. */
2365 static omp_context
*
2366 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2370 tree clauses
= gimple_omp_for_clauses (stmt
);
2372 ctx
= new_omp_context (stmt
, outer_ctx
);
2374 if (is_gimple_omp_oacc (stmt
))
2376 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2378 if (!tgt
|| is_oacc_parallel (tgt
))
2379 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2381 char const *check
= NULL
;
2383 switch (OMP_CLAUSE_CODE (c
))
2385 case OMP_CLAUSE_GANG
:
2389 case OMP_CLAUSE_WORKER
:
2393 case OMP_CLAUSE_VECTOR
:
2401 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2402 error_at (gimple_location (stmt
),
2403 "argument not permitted on %qs clause in"
2404 " OpenACC %<parallel%>", check
);
2407 if (tgt
&& is_oacc_kernels (tgt
))
2409 /* Strip out reductions, as they are not handled yet. */
2410 tree
*prev_ptr
= &clauses
;
2412 while (tree probe
= *prev_ptr
)
2414 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2416 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2417 *prev_ptr
= *next_ptr
;
2419 prev_ptr
= next_ptr
;
2422 gimple_omp_for_set_clauses (stmt
, clauses
);
2423 check_oacc_kernel_gwv (stmt
, ctx
);
2427 scan_sharing_clauses (clauses
, ctx
);
2429 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2430 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2432 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2433 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2434 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2435 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2437 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2441 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2444 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2445 omp_context
*outer_ctx
)
2447 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2448 gsi_replace (gsi
, bind
, false);
2449 gimple_seq seq
= NULL
;
2450 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2451 tree cond
= create_tmp_var_raw (integer_type_node
);
2452 DECL_CONTEXT (cond
) = current_function_decl
;
2453 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2454 gimple_bind_set_vars (bind
, cond
);
2455 gimple_call_set_lhs (g
, cond
);
2456 gimple_seq_add_stmt (&seq
, g
);
2457 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2458 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2459 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2460 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2461 gimple_seq_add_stmt (&seq
, g
);
2462 g
= gimple_build_label (lab1
);
2463 gimple_seq_add_stmt (&seq
, g
);
2464 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2465 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2466 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2467 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2468 gimple_omp_for_set_clauses (new_stmt
, clause
);
2469 gimple_seq_add_stmt (&seq
, new_stmt
);
2470 g
= gimple_build_goto (lab3
);
2471 gimple_seq_add_stmt (&seq
, g
);
2472 g
= gimple_build_label (lab2
);
2473 gimple_seq_add_stmt (&seq
, g
);
2474 gimple_seq_add_stmt (&seq
, stmt
);
2475 g
= gimple_build_label (lab3
);
2476 gimple_seq_add_stmt (&seq
, g
);
2477 gimple_bind_set_body (bind
, seq
);
2479 scan_omp_for (new_stmt
, outer_ctx
);
2480 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2483 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2484 struct walk_stmt_info
*);
2485 static omp_context
*maybe_lookup_ctx (gimple
*);
2487 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2488 for scan phase loop. */
2491 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2492 omp_context
*outer_ctx
)
2494 /* The only change between inclusive and exclusive scan will be
2495 within the first simd loop, so just use inclusive in the
2496 worksharing loop. */
2497 outer_ctx
->scan_inclusive
= true;
2498 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2499 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2501 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2502 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2503 gsi_replace (gsi
, input_stmt
, false);
2504 gimple_seq input_body
= NULL
;
2505 gimple_seq_add_stmt (&input_body
, stmt
);
2506 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2508 gimple_stmt_iterator input1_gsi
= gsi_none ();
2509 struct walk_stmt_info wi
;
2510 memset (&wi
, 0, sizeof (wi
));
2512 wi
.info
= (void *) &input1_gsi
;
2513 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2514 gcc_assert (!gsi_end_p (input1_gsi
));
2516 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2517 gsi_next (&input1_gsi
);
2518 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2519 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2520 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
2521 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2522 std::swap (input_stmt1
, scan_stmt1
);
2524 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
2525 gimple_omp_set_body (input_stmt1
, NULL
);
2527 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
2528 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
2530 gimple_omp_set_body (input_stmt1
, input_body1
);
2531 gimple_omp_set_body (scan_stmt1
, NULL
);
2533 gimple_stmt_iterator input2_gsi
= gsi_none ();
2534 memset (&wi
, 0, sizeof (wi
));
2536 wi
.info
= (void *) &input2_gsi
;
2537 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
2539 gcc_assert (!gsi_end_p (input2_gsi
));
2541 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
2542 gsi_next (&input2_gsi
);
2543 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
2544 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
2545 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2546 std::swap (input_stmt2
, scan_stmt2
);
2548 gimple_omp_set_body (input_stmt2
, NULL
);
2550 gimple_omp_set_body (input_stmt
, input_body
);
2551 gimple_omp_set_body (scan_stmt
, scan_body
);
2553 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
2554 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
2556 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
2557 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
2559 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
2562 /* Scan an OpenMP sections directive. */
2565 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2569 ctx
= new_omp_context (stmt
, outer_ctx
);
2570 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2571 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2574 /* Scan an OpenMP single directive. */
2577 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2582 ctx
= new_omp_context (stmt
, outer_ctx
);
2583 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2584 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2585 name
= create_tmp_var_name (".omp_copy_s");
2586 name
= build_decl (gimple_location (stmt
),
2587 TYPE_DECL
, name
, ctx
->record_type
);
2588 TYPE_NAME (ctx
->record_type
) = name
;
2590 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2591 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2593 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2594 ctx
->record_type
= NULL
;
2596 layout_type (ctx
->record_type
);
2599 /* Scan a GIMPLE_OMP_TARGET. */
2602 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2606 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2607 tree clauses
= gimple_omp_target_clauses (stmt
);
2609 ctx
= new_omp_context (stmt
, outer_ctx
);
2610 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2611 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2612 name
= create_tmp_var_name (".omp_data_t");
2613 name
= build_decl (gimple_location (stmt
),
2614 TYPE_DECL
, name
, ctx
->record_type
);
2615 DECL_ARTIFICIAL (name
) = 1;
2616 DECL_NAMELESS (name
) = 1;
2617 TYPE_NAME (ctx
->record_type
) = name
;
2618 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2622 create_omp_child_function (ctx
, false);
2623 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2626 scan_sharing_clauses (clauses
, ctx
);
2627 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2629 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2630 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2633 TYPE_FIELDS (ctx
->record_type
)
2634 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2637 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2638 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2640 field
= DECL_CHAIN (field
))
2641 gcc_assert (DECL_ALIGN (field
) == align
);
2643 layout_type (ctx
->record_type
);
2645 fixup_child_record_type (ctx
);
2649 /* Scan an OpenMP teams directive. */
2652 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2654 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2656 if (!gimple_omp_teams_host (stmt
))
2658 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2659 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2662 taskreg_contexts
.safe_push (ctx
);
2663 gcc_assert (taskreg_nesting_level
== 1);
2664 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2665 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2666 tree name
= create_tmp_var_name (".omp_data_s");
2667 name
= build_decl (gimple_location (stmt
),
2668 TYPE_DECL
, name
, ctx
->record_type
);
2669 DECL_ARTIFICIAL (name
) = 1;
2670 DECL_NAMELESS (name
) = 1;
2671 TYPE_NAME (ctx
->record_type
) = name
;
2672 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2673 create_omp_child_function (ctx
, false);
2674 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2676 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2677 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2679 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2680 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2683 /* Check nesting restrictions. */
2685 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2689 if (ctx
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_GRID_BODY
)
2690 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2691 the original copy of its contents. */
2694 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2695 inside an OpenACC CTX. */
2696 if (!(is_gimple_omp (stmt
)
2697 && is_gimple_omp_oacc (stmt
))
2698 /* Except for atomic codes that we share with OpenMP. */
2699 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2700 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2702 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2704 error_at (gimple_location (stmt
),
2705 "non-OpenACC construct inside of OpenACC routine");
2709 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2710 if (is_gimple_omp (octx
->stmt
)
2711 && is_gimple_omp_oacc (octx
->stmt
))
2713 error_at (gimple_location (stmt
),
2714 "non-OpenACC construct inside of OpenACC region");
2721 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
2723 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
2725 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2726 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
2730 if (ctx
->order_concurrent
2731 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
2732 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2733 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2735 error_at (gimple_location (stmt
),
2736 "OpenMP constructs other than %<parallel%>, %<loop%>"
2737 " or %<simd%> may not be nested inside a region with"
2738 " the %<order(concurrent)%> clause");
2741 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2743 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2744 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2746 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2747 && (ctx
->outer
== NULL
2748 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2749 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2750 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2751 != GF_OMP_FOR_KIND_FOR
)
2752 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2754 error_at (gimple_location (stmt
),
2755 "%<ordered simd threads%> must be closely "
2756 "nested inside of %<for simd%> region");
2762 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2763 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
2764 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
2766 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
2767 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
2769 error_at (gimple_location (stmt
),
2770 "OpenMP constructs other than "
2771 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2772 "not be nested inside %<simd%> region");
2775 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2777 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2778 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
2779 && gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
2780 && omp_find_clause (gimple_omp_for_clauses (stmt
),
2781 OMP_CLAUSE_BIND
) == NULL_TREE
))
2782 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2784 error_at (gimple_location (stmt
),
2785 "only %<distribute%>, %<parallel%> or %<loop%> "
2786 "regions are allowed to be strictly nested inside "
2787 "%<teams%> region");
2791 else if (ctx
->order_concurrent
2792 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
2793 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
2794 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
2795 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
2798 error_at (gimple_location (stmt
),
2799 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2800 "%<simd%> may not be nested inside a %<loop%> region");
2802 error_at (gimple_location (stmt
),
2803 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2804 "%<simd%> may not be nested inside a region with "
2805 "the %<order(concurrent)%> clause");
2809 switch (gimple_code (stmt
))
2811 case GIMPLE_OMP_FOR
:
2812 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
2814 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2816 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2818 error_at (gimple_location (stmt
),
2819 "%<distribute%> region must be strictly nested "
2820 "inside %<teams%> construct");
2825 /* We split taskloop into task and nested taskloop in it. */
2826 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2828 /* For now, hope this will change and loop bind(parallel) will not
2829 be allowed in lots of contexts. */
2830 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
2831 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
2833 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2838 switch (gimple_code (ctx
->stmt
))
2840 case GIMPLE_OMP_FOR
:
2841 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2842 == GF_OMP_FOR_KIND_OACC_LOOP
);
2845 case GIMPLE_OMP_TARGET
:
2846 switch (gimple_omp_target_kind (ctx
->stmt
))
2848 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2849 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2860 else if (oacc_get_fn_attrib (current_function_decl
))
2864 error_at (gimple_location (stmt
),
2865 "OpenACC loop directive must be associated with"
2866 " an OpenACC compute region");
2872 if (is_gimple_call (stmt
)
2873 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2874 == BUILT_IN_GOMP_CANCEL
2875 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2876 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2878 const char *bad
= NULL
;
2879 const char *kind
= NULL
;
2880 const char *construct
2881 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2882 == BUILT_IN_GOMP_CANCEL
)
2884 : "cancellation point";
2887 error_at (gimple_location (stmt
), "orphaned %qs construct",
2891 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2892 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2896 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2898 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2899 == BUILT_IN_GOMP_CANCEL
2900 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2901 ctx
->cancellable
= true;
2905 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2906 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2908 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2909 == BUILT_IN_GOMP_CANCEL
2910 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2912 ctx
->cancellable
= true;
2913 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2915 warning_at (gimple_location (stmt
), 0,
2916 "%<cancel for%> inside "
2917 "%<nowait%> for construct");
2918 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2919 OMP_CLAUSE_ORDERED
))
2920 warning_at (gimple_location (stmt
), 0,
2921 "%<cancel for%> inside "
2922 "%<ordered%> for construct");
2927 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
2928 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
2930 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2931 == BUILT_IN_GOMP_CANCEL
2932 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2934 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
2936 ctx
->cancellable
= true;
2937 if (omp_find_clause (gimple_omp_sections_clauses
2940 warning_at (gimple_location (stmt
), 0,
2941 "%<cancel sections%> inside "
2942 "%<nowait%> sections construct");
2946 gcc_assert (ctx
->outer
2947 && gimple_code (ctx
->outer
->stmt
)
2948 == GIMPLE_OMP_SECTIONS
);
2949 ctx
->outer
->cancellable
= true;
2950 if (omp_find_clause (gimple_omp_sections_clauses
2953 warning_at (gimple_location (stmt
), 0,
2954 "%<cancel sections%> inside "
2955 "%<nowait%> sections construct");
2961 if (!is_task_ctx (ctx
)
2962 && (!is_taskloop_ctx (ctx
)
2963 || ctx
->outer
== NULL
2964 || !is_task_ctx (ctx
->outer
)))
2968 for (omp_context
*octx
= ctx
->outer
;
2969 octx
; octx
= octx
->outer
)
2971 switch (gimple_code (octx
->stmt
))
2973 case GIMPLE_OMP_TASKGROUP
:
2975 case GIMPLE_OMP_TARGET
:
2976 if (gimple_omp_target_kind (octx
->stmt
)
2977 != GF_OMP_TARGET_KIND_REGION
)
2980 case GIMPLE_OMP_PARALLEL
:
2981 case GIMPLE_OMP_TEAMS
:
2982 error_at (gimple_location (stmt
),
2983 "%<%s taskgroup%> construct not closely "
2984 "nested inside of %<taskgroup%> region",
2987 case GIMPLE_OMP_TASK
:
2988 if (gimple_omp_task_taskloop_p (octx
->stmt
)
2990 && is_taskloop_ctx (octx
->outer
))
2993 = gimple_omp_for_clauses (octx
->outer
->stmt
);
2994 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3003 ctx
->cancellable
= true;
3008 error_at (gimple_location (stmt
), "invalid arguments");
3013 error_at (gimple_location (stmt
),
3014 "%<%s %s%> construct not closely nested inside of %qs",
3015 construct
, kind
, bad
);
3020 case GIMPLE_OMP_SECTIONS
:
3021 case GIMPLE_OMP_SINGLE
:
3022 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3023 switch (gimple_code (ctx
->stmt
))
3025 case GIMPLE_OMP_FOR
:
3026 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3027 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3030 case GIMPLE_OMP_SECTIONS
:
3031 case GIMPLE_OMP_SINGLE
:
3032 case GIMPLE_OMP_ORDERED
:
3033 case GIMPLE_OMP_MASTER
:
3034 case GIMPLE_OMP_TASK
:
3035 case GIMPLE_OMP_CRITICAL
:
3036 if (is_gimple_call (stmt
))
3038 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3039 != BUILT_IN_GOMP_BARRIER
)
3041 error_at (gimple_location (stmt
),
3042 "barrier region may not be closely nested inside "
3043 "of work-sharing, %<loop%>, %<critical%>, "
3044 "%<ordered%>, %<master%>, explicit %<task%> or "
3045 "%<taskloop%> region");
3048 error_at (gimple_location (stmt
),
3049 "work-sharing region may not be closely nested inside "
3050 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3051 "%<master%>, explicit %<task%> or %<taskloop%> region");
3053 case GIMPLE_OMP_PARALLEL
:
3054 case GIMPLE_OMP_TEAMS
:
3056 case GIMPLE_OMP_TARGET
:
3057 if (gimple_omp_target_kind (ctx
->stmt
)
3058 == GF_OMP_TARGET_KIND_REGION
)
3065 case GIMPLE_OMP_MASTER
:
3066 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3067 switch (gimple_code (ctx
->stmt
))
3069 case GIMPLE_OMP_FOR
:
3070 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3071 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3074 case GIMPLE_OMP_SECTIONS
:
3075 case GIMPLE_OMP_SINGLE
:
3076 case GIMPLE_OMP_TASK
:
3077 error_at (gimple_location (stmt
),
3078 "%<master%> region may not be closely nested inside "
3079 "of work-sharing, %<loop%>, explicit %<task%> or "
3080 "%<taskloop%> region");
3082 case GIMPLE_OMP_PARALLEL
:
3083 case GIMPLE_OMP_TEAMS
:
3085 case GIMPLE_OMP_TARGET
:
3086 if (gimple_omp_target_kind (ctx
->stmt
)
3087 == GF_OMP_TARGET_KIND_REGION
)
3094 case GIMPLE_OMP_TASK
:
3095 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3096 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3097 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3098 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3100 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3101 error_at (OMP_CLAUSE_LOCATION (c
),
3102 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3103 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3107 case GIMPLE_OMP_ORDERED
:
3108 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3109 c
; c
= OMP_CLAUSE_CHAIN (c
))
3111 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
3113 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3114 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3117 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3118 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
3119 || kind
== OMP_CLAUSE_DEPEND_SINK
)
3122 /* Look for containing ordered(N) loop. */
3124 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3126 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3127 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3129 error_at (OMP_CLAUSE_LOCATION (c
),
3130 "%<ordered%> construct with %<depend%> clause "
3131 "must be closely nested inside an %<ordered%> "
3135 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
3137 error_at (OMP_CLAUSE_LOCATION (c
),
3138 "%<ordered%> construct with %<depend%> clause "
3139 "must be closely nested inside a loop with "
3140 "%<ordered%> clause with a parameter");
3146 error_at (OMP_CLAUSE_LOCATION (c
),
3147 "invalid depend kind in omp %<ordered%> %<depend%>");
3151 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3152 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3154 /* ordered simd must be closely nested inside of simd region,
3155 and simd region must not encounter constructs other than
3156 ordered simd, therefore ordered simd may be either orphaned,
3157 or ctx->stmt must be simd. The latter case is handled already
3161 error_at (gimple_location (stmt
),
3162 "%<ordered%> %<simd%> must be closely nested inside "
3167 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3168 switch (gimple_code (ctx
->stmt
))
3170 case GIMPLE_OMP_CRITICAL
:
3171 case GIMPLE_OMP_TASK
:
3172 case GIMPLE_OMP_ORDERED
:
3173 ordered_in_taskloop
:
3174 error_at (gimple_location (stmt
),
3175 "%<ordered%> region may not be closely nested inside "
3176 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3177 "%<taskloop%> region");
3179 case GIMPLE_OMP_FOR
:
3180 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3181 goto ordered_in_taskloop
;
3183 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3184 OMP_CLAUSE_ORDERED
);
3187 error_at (gimple_location (stmt
),
3188 "%<ordered%> region must be closely nested inside "
3189 "a loop region with an %<ordered%> clause");
3192 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3193 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3195 error_at (gimple_location (stmt
),
3196 "%<ordered%> region without %<depend%> clause may "
3197 "not be closely nested inside a loop region with "
3198 "an %<ordered%> clause with a parameter");
3202 case GIMPLE_OMP_TARGET
:
3203 if (gimple_omp_target_kind (ctx
->stmt
)
3204 != GF_OMP_TARGET_KIND_REGION
)
3207 case GIMPLE_OMP_PARALLEL
:
3208 case GIMPLE_OMP_TEAMS
:
3209 error_at (gimple_location (stmt
),
3210 "%<ordered%> region must be closely nested inside "
3211 "a loop region with an %<ordered%> clause");
3217 case GIMPLE_OMP_CRITICAL
:
3220 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3221 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3222 if (gomp_critical
*other_crit
3223 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3224 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3226 error_at (gimple_location (stmt
),
3227 "%<critical%> region may not be nested inside "
3228 "a %<critical%> region with the same name");
3233 case GIMPLE_OMP_TEAMS
:
3236 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3237 || (gimple_omp_target_kind (ctx
->stmt
)
3238 != GF_OMP_TARGET_KIND_REGION
))
3240 /* Teams construct can appear either strictly nested inside of
3241 target construct with no intervening stmts, or can be encountered
3242 only by initial task (so must not appear inside any OpenMP
3244 error_at (gimple_location (stmt
),
3245 "%<teams%> construct must be closely nested inside of "
3246 "%<target%> construct or not nested in any OpenMP "
3251 case GIMPLE_OMP_TARGET
:
3252 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3253 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3254 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3255 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3257 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3258 error_at (OMP_CLAUSE_LOCATION (c
),
3259 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3260 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3263 if (is_gimple_omp_offloaded (stmt
)
3264 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3266 error_at (gimple_location (stmt
),
3267 "OpenACC region inside of OpenACC routine, nested "
3268 "parallelism not supported yet");
3271 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3273 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3275 if (is_gimple_omp (stmt
)
3276 && is_gimple_omp_oacc (stmt
)
3277 && is_gimple_omp (ctx
->stmt
))
3279 error_at (gimple_location (stmt
),
3280 "OpenACC construct inside of non-OpenACC region");
3286 const char *stmt_name
, *ctx_stmt_name
;
3287 switch (gimple_omp_target_kind (stmt
))
3289 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3290 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3291 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3292 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3293 stmt_name
= "target enter data"; break;
3294 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3295 stmt_name
= "target exit data"; break;
3296 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3297 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3298 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3299 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3300 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
3301 stmt_name
= "enter/exit data"; break;
3302 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3303 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3305 default: gcc_unreachable ();
3307 switch (gimple_omp_target_kind (ctx
->stmt
))
3309 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3310 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3311 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3312 ctx_stmt_name
= "parallel"; break;
3313 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3314 ctx_stmt_name
= "kernels"; break;
3315 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3316 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3317 ctx_stmt_name
= "host_data"; break;
3318 default: gcc_unreachable ();
3321 /* OpenACC/OpenMP mismatch? */
3322 if (is_gimple_omp_oacc (stmt
)
3323 != is_gimple_omp_oacc (ctx
->stmt
))
3325 error_at (gimple_location (stmt
),
3326 "%s %qs construct inside of %s %qs region",
3327 (is_gimple_omp_oacc (stmt
)
3328 ? "OpenACC" : "OpenMP"), stmt_name
,
3329 (is_gimple_omp_oacc (ctx
->stmt
)
3330 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3333 if (is_gimple_omp_offloaded (ctx
->stmt
))
3335 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3336 if (is_gimple_omp_oacc (ctx
->stmt
))
3338 error_at (gimple_location (stmt
),
3339 "%qs construct inside of %qs region",
3340 stmt_name
, ctx_stmt_name
);
3345 warning_at (gimple_location (stmt
), 0,
3346 "%qs construct inside of %qs region",
3347 stmt_name
, ctx_stmt_name
);
3359 /* Helper function scan_omp.
3361 Callback for walk_tree or operators in walk_gimple_stmt used to
3362 scan for OMP directives in TP. */
3365 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3367 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3368 omp_context
*ctx
= (omp_context
*) wi
->info
;
3371 switch (TREE_CODE (t
))
3379 tree repl
= remap_decl (t
, &ctx
->cb
);
3380 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3386 if (ctx
&& TYPE_P (t
))
3387 *tp
= remap_type (t
, &ctx
->cb
);
3388 else if (!DECL_P (t
))
3393 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3394 if (tem
!= TREE_TYPE (t
))
3396 if (TREE_CODE (t
) == INTEGER_CST
)
3397 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3399 TREE_TYPE (t
) = tem
;
3409 /* Return true if FNDECL is a setjmp or a longjmp. */
3412 setjmp_or_longjmp_p (const_tree fndecl
)
3414 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3415 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3418 tree declname
= DECL_NAME (fndecl
);
3420 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3421 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3422 || !TREE_PUBLIC (fndecl
))
3425 const char *name
= IDENTIFIER_POINTER (declname
);
3426 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3429 /* Return true if FNDECL is an omp_* runtime API call. */
3432 omp_runtime_api_call (const_tree fndecl
)
3434 tree declname
= DECL_NAME (fndecl
);
3436 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3437 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3438 || !TREE_PUBLIC (fndecl
))
3441 const char *name
= IDENTIFIER_POINTER (declname
);
3442 if (strncmp (name
, "omp_", 4) != 0)
3445 static const char *omp_runtime_apis
[] =
3447 /* This array has 3 sections. First omp_* calls that don't
3448 have any suffixes. */
3450 "target_associate_ptr",
3451 "target_disassociate_ptr",
3453 "target_is_present",
3455 "target_memcpy_rect",
3457 /* Now omp_* calls that are available as omp_* and omp_*_. */
3460 "destroy_nest_lock",
3463 "get_affinity_format",
3465 "get_default_device",
3467 "get_initial_device",
3469 "get_max_active_levels",
3470 "get_max_task_priority",
3478 "get_partition_num_places",
3490 "is_initial_device",
3492 "pause_resource_all",
3493 "set_affinity_format",
3501 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3502 "get_ancestor_thread_num",
3503 "get_partition_place_nums",
3504 "get_place_num_procs",
3505 "get_place_proc_ids",
3508 "set_default_device",
3510 "set_max_active_levels",
3517 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
3519 if (omp_runtime_apis
[i
] == NULL
)
3524 size_t len
= strlen (omp_runtime_apis
[i
]);
3525 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
3526 && (name
[4 + len
] == '\0'
3528 && name
[4 + len
] == '_'
3529 && (name
[4 + len
+ 1] == '\0'
3531 && strcmp (name
+ 4 + len
+ 1, "8_") == 0)))))
3537 /* Helper function for scan_omp.
3539 Callback for walk_gimple_stmt used to scan for OMP directives in
3540 the current statement in GSI. */
3543 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3544 struct walk_stmt_info
*wi
)
3546 gimple
*stmt
= gsi_stmt (*gsi
);
3547 omp_context
*ctx
= (omp_context
*) wi
->info
;
3549 if (gimple_has_location (stmt
))
3550 input_location
= gimple_location (stmt
);
3552 /* Check the nesting restrictions. */
3553 bool remove
= false;
3554 if (is_gimple_omp (stmt
))
3555 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3556 else if (is_gimple_call (stmt
))
3558 tree fndecl
= gimple_call_fndecl (stmt
);
3562 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3563 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3564 && setjmp_or_longjmp_p (fndecl
)
3568 error_at (gimple_location (stmt
),
3569 "setjmp/longjmp inside %<simd%> construct");
3571 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3572 switch (DECL_FUNCTION_CODE (fndecl
))
3574 case BUILT_IN_GOMP_BARRIER
:
3575 case BUILT_IN_GOMP_CANCEL
:
3576 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3577 case BUILT_IN_GOMP_TASKYIELD
:
3578 case BUILT_IN_GOMP_TASKWAIT
:
3579 case BUILT_IN_GOMP_TASKGROUP_START
:
3580 case BUILT_IN_GOMP_TASKGROUP_END
:
3581 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3588 omp_context
*octx
= ctx
;
3589 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
3591 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
3594 error_at (gimple_location (stmt
),
3595 "OpenMP runtime API call %qD in a region with "
3596 "%<order(concurrent)%> clause", fndecl
);
3603 stmt
= gimple_build_nop ();
3604 gsi_replace (gsi
, stmt
, false);
3607 *handled_ops_p
= true;
3609 switch (gimple_code (stmt
))
3611 case GIMPLE_OMP_PARALLEL
:
3612 taskreg_nesting_level
++;
3613 scan_omp_parallel (gsi
, ctx
);
3614 taskreg_nesting_level
--;
3617 case GIMPLE_OMP_TASK
:
3618 taskreg_nesting_level
++;
3619 scan_omp_task (gsi
, ctx
);
3620 taskreg_nesting_level
--;
3623 case GIMPLE_OMP_FOR
:
3624 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3625 == GF_OMP_FOR_KIND_SIMD
)
3626 && gimple_omp_for_combined_into_p (stmt
)
3627 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
3629 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
3630 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
3631 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
3633 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3637 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3638 == GF_OMP_FOR_KIND_SIMD
)
3639 && omp_maybe_offloaded_ctx (ctx
)
3640 && omp_max_simt_vf ())
3641 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3643 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3646 case GIMPLE_OMP_SECTIONS
:
3647 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3650 case GIMPLE_OMP_SINGLE
:
3651 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3654 case GIMPLE_OMP_SCAN
:
3655 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
3657 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
3658 ctx
->scan_inclusive
= true;
3659 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
3660 ctx
->scan_exclusive
= true;
3663 case GIMPLE_OMP_SECTION
:
3664 case GIMPLE_OMP_MASTER
:
3665 case GIMPLE_OMP_ORDERED
:
3666 case GIMPLE_OMP_CRITICAL
:
3667 case GIMPLE_OMP_GRID_BODY
:
3668 ctx
= new_omp_context (stmt
, ctx
);
3669 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3672 case GIMPLE_OMP_TASKGROUP
:
3673 ctx
= new_omp_context (stmt
, ctx
);
3674 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
3675 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3678 case GIMPLE_OMP_TARGET
:
3679 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3682 case GIMPLE_OMP_TEAMS
:
3683 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
3685 taskreg_nesting_level
++;
3686 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3687 taskreg_nesting_level
--;
3690 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3697 *handled_ops_p
= false;
3699 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3701 var
= DECL_CHAIN (var
))
3702 insert_decl_map (&ctx
->cb
, var
, var
);
3706 *handled_ops_p
= false;
3714 /* Scan all the statements starting at the current statement. CTX
3715 contains context information about the OMP directives and
3716 clauses found during the scan. */
3719 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3721 location_t saved_location
;
3722 struct walk_stmt_info wi
;
3724 memset (&wi
, 0, sizeof (wi
));
3726 wi
.want_locations
= true;
3728 saved_location
= input_location
;
3729 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3730 input_location
= saved_location
;
3733 /* Re-gimplification and code generation routines. */
3735 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3736 of BIND if in a method. */
3739 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3741 if (DECL_ARGUMENTS (current_function_decl
)
3742 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3743 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3746 tree vars
= gimple_bind_vars (bind
);
3747 for (tree
*pvar
= &vars
; *pvar
; )
3748 if (omp_member_access_dummy_var (*pvar
))
3749 *pvar
= DECL_CHAIN (*pvar
);
3751 pvar
= &DECL_CHAIN (*pvar
);
3752 gimple_bind_set_vars (bind
, vars
);
3756 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3757 block and its subblocks. */
3760 remove_member_access_dummy_vars (tree block
)
3762 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3763 if (omp_member_access_dummy_var (*pvar
))
3764 *pvar
= DECL_CHAIN (*pvar
);
3766 pvar
= &DECL_CHAIN (*pvar
);
3768 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3769 remove_member_access_dummy_vars (block
);
3772 /* If a context was created for STMT when it was scanned, return it. */
3774 static omp_context
*
3775 maybe_lookup_ctx (gimple
*stmt
)
3778 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3779 return n
? (omp_context
*) n
->value
: NULL
;
3783 /* Find the mapping for DECL in CTX or the immediately enclosing
3784 context that has a mapping for DECL.
3786 If CTX is a nested parallel directive, we may have to use the decl
3787 mappings created in CTX's parent context. Suppose that we have the
3788 following parallel nesting (variable UIDs showed for clarity):
3791 #omp parallel shared(iD.1562) -> outer parallel
3792 iD.1562 = iD.1562 + 1;
3794 #omp parallel shared (iD.1562) -> inner parallel
3795 iD.1562 = iD.1562 - 1;
3797 Each parallel structure will create a distinct .omp_data_s structure
3798 for copying iD.1562 in/out of the directive:
3800 outer parallel .omp_data_s.1.i -> iD.1562
3801 inner parallel .omp_data_s.2.i -> iD.1562
3803 A shared variable mapping will produce a copy-out operation before
3804 the parallel directive and a copy-in operation after it. So, in
3805 this case we would have:
3808 .omp_data_o.1.i = iD.1562;
3809 #omp parallel shared(iD.1562) -> outer parallel
3810 .omp_data_i.1 = &.omp_data_o.1
3811 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3813 .omp_data_o.2.i = iD.1562; -> **
3814 #omp parallel shared(iD.1562) -> inner parallel
3815 .omp_data_i.2 = &.omp_data_o.2
3816 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3819 ** This is a problem. The symbol iD.1562 cannot be referenced
3820 inside the body of the outer parallel region. But since we are
3821 emitting this copy operation while expanding the inner parallel
3822 directive, we need to access the CTX structure of the outer
3823 parallel directive to get the correct mapping:
3825 .omp_data_o.2.i = .omp_data_i.1->i
3827 Since there may be other workshare or parallel directives enclosing
3828 the parallel directive, it may be necessary to walk up the context
3829 parent chain. This is not a problem in general because nested
3830 parallelism happens only rarely. */
3833 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3838 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3839 t
= maybe_lookup_decl (decl
, up
);
3841 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3843 return t
? t
: decl
;
3847 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3848 in outer contexts. */
3851 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3856 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3857 t
= maybe_lookup_decl (decl
, up
);
3859 return t
? t
: decl
;
3863 /* Construct the initialization value for reduction operation OP. */
3866 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3875 case TRUTH_ORIF_EXPR
:
3876 case TRUTH_XOR_EXPR
:
3878 return build_zero_cst (type
);
3881 case TRUTH_AND_EXPR
:
3882 case TRUTH_ANDIF_EXPR
:
3884 return fold_convert_loc (loc
, type
, integer_one_node
);
3887 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3890 if (SCALAR_FLOAT_TYPE_P (type
))
3892 REAL_VALUE_TYPE max
, min
;
3893 if (HONOR_INFINITIES (type
))
3896 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3899 real_maxval (&min
, 1, TYPE_MODE (type
));
3900 return build_real (type
, min
);
3902 else if (POINTER_TYPE_P (type
))
3905 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3906 return wide_int_to_tree (type
, min
);
3910 gcc_assert (INTEGRAL_TYPE_P (type
));
3911 return TYPE_MIN_VALUE (type
);
3915 if (SCALAR_FLOAT_TYPE_P (type
))
3917 REAL_VALUE_TYPE max
;
3918 if (HONOR_INFINITIES (type
))
3921 real_maxval (&max
, 0, TYPE_MODE (type
));
3922 return build_real (type
, max
);
3924 else if (POINTER_TYPE_P (type
))
3927 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3928 return wide_int_to_tree (type
, max
);
3932 gcc_assert (INTEGRAL_TYPE_P (type
));
3933 return TYPE_MAX_VALUE (type
);
3941 /* Construct the initialization value for reduction CLAUSE. */
3944 omp_reduction_init (tree clause
, tree type
)
3946 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
3947 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
3950 /* Return alignment to be assumed for var in CLAUSE, which should be
3951 OMP_CLAUSE_ALIGNED. */
3954 omp_clause_aligned_alignment (tree clause
)
3956 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
3957 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
3959 /* Otherwise return implementation defined alignment. */
3960 unsigned int al
= 1;
3961 opt_scalar_mode mode_iter
;
3962 auto_vector_sizes sizes
;
3963 targetm
.vectorize
.autovectorize_vector_sizes (&sizes
, true);
3965 for (unsigned int i
= 0; i
< sizes
.length (); ++i
)
3966 vs
= ordered_max (vs
, sizes
[i
]);
3967 static enum mode_class classes
[]
3968 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
3969 for (int i
= 0; i
< 4; i
+= 2)
3970 /* The for loop above dictates that we only walk through scalar classes. */
3971 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
3973 scalar_mode mode
= mode_iter
.require ();
3974 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
3975 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
3977 while (maybe_ne (vs
, 0U)
3978 && known_lt (GET_MODE_SIZE (vmode
), vs
)
3979 && GET_MODE_2XWIDER_MODE (vmode
).exists ())
3980 vmode
= GET_MODE_2XWIDER_MODE (vmode
).require ();
3982 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3983 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
3985 poly_uint64 nelts
= exact_div (GET_MODE_SIZE (vmode
),
3986 GET_MODE_SIZE (mode
));
3987 type
= build_vector_type (type
, nelts
);
3988 if (TYPE_MODE (type
) != vmode
)
3990 if (TYPE_ALIGN_UNIT (type
) > al
)
3991 al
= TYPE_ALIGN_UNIT (type
);
3993 return build_int_cst (integer_type_node
, al
);
3997 /* This structure is part of the interface between lower_rec_simd_input_clauses
3998 and lower_rec_input_clauses. */
4000 class omplow_simd_context
{
4002 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4006 vec
<tree
, va_heap
> simt_eargs
;
4007 gimple_seq simt_dlist
;
4008 poly_uint64_pod max_vf
;
4012 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4016 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4017 omplow_simd_context
*sctx
, tree
&ivar
,
4018 tree
&lvar
, tree
*rvar
= NULL
,
4021 if (known_eq (sctx
->max_vf
, 0U))
4023 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4024 if (maybe_gt (sctx
->max_vf
, 1U))
4026 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4027 OMP_CLAUSE_SAFELEN
);
4030 poly_uint64 safe_len
;
4031 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4032 || maybe_lt (safe_len
, 1U))
4035 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4038 if (maybe_gt (sctx
->max_vf
, 1U))
4040 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4041 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4044 if (known_eq (sctx
->max_vf
, 1U))
4049 if (is_gimple_reg (new_var
))
4051 ivar
= lvar
= new_var
;
4054 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4055 ivar
= lvar
= create_tmp_var (type
);
4056 TREE_ADDRESSABLE (ivar
) = 1;
4057 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4058 NULL
, DECL_ATTRIBUTES (ivar
));
4059 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4060 tree clobber
= build_clobber (type
);
4061 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4062 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4066 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4067 tree avar
= create_tmp_var_raw (atype
);
4068 if (TREE_ADDRESSABLE (new_var
))
4069 TREE_ADDRESSABLE (avar
) = 1;
4070 DECL_ATTRIBUTES (avar
)
4071 = tree_cons (get_identifier ("omp simd array"), NULL
,
4072 DECL_ATTRIBUTES (avar
));
4073 gimple_add_tmp_var (avar
);
4075 if (rvar
&& !ctx
->for_simd_scan_phase
)
4077 /* For inscan reductions, create another array temporary,
4078 which will hold the reduced value. */
4079 iavar
= create_tmp_var_raw (atype
);
4080 if (TREE_ADDRESSABLE (new_var
))
4081 TREE_ADDRESSABLE (iavar
) = 1;
4082 DECL_ATTRIBUTES (iavar
)
4083 = tree_cons (get_identifier ("omp simd array"), NULL
,
4084 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4085 DECL_ATTRIBUTES (iavar
)));
4086 gimple_add_tmp_var (iavar
);
4087 ctx
->cb
.decl_map
->put (avar
, iavar
);
4088 if (sctx
->lastlane
== NULL_TREE
)
4089 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4090 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4091 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4092 TREE_THIS_NOTRAP (*rvar
) = 1;
4094 if (ctx
->scan_exclusive
)
4096 /* And for exclusive scan yet another one, which will
4097 hold the value during the scan phase. */
4098 tree savar
= create_tmp_var_raw (atype
);
4099 if (TREE_ADDRESSABLE (new_var
))
4100 TREE_ADDRESSABLE (savar
) = 1;
4101 DECL_ATTRIBUTES (savar
)
4102 = tree_cons (get_identifier ("omp simd array"), NULL
,
4103 tree_cons (get_identifier ("omp simd inscan "
4105 DECL_ATTRIBUTES (savar
)));
4106 gimple_add_tmp_var (savar
);
4107 ctx
->cb
.decl_map
->put (iavar
, savar
);
4108 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4109 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4110 TREE_THIS_NOTRAP (*rvar2
) = 1;
4113 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4114 NULL_TREE
, NULL_TREE
);
4115 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4116 NULL_TREE
, NULL_TREE
);
4117 TREE_THIS_NOTRAP (ivar
) = 1;
4118 TREE_THIS_NOTRAP (lvar
) = 1;
4120 if (DECL_P (new_var
))
4122 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4123 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4128 /* Helper function of lower_rec_input_clauses. For a reference
4129 in simd reduction, add an underlying variable it will reference. */
4132 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4134 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4135 if (TREE_CONSTANT (z
))
4137 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4138 get_name (new_vard
));
4139 gimple_add_tmp_var (z
);
4140 TREE_ADDRESSABLE (z
) = 1;
4141 z
= build_fold_addr_expr_loc (loc
, z
);
4142 gimplify_assign (new_vard
, z
, ilist
);
4146 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4147 code to emit (type) (tskred_temp[idx]). */
4150 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4153 unsigned HOST_WIDE_INT sz
4154 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4155 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4156 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4158 tree v
= create_tmp_var (pointer_sized_int_node
);
4159 gimple
*g
= gimple_build_assign (v
, r
);
4160 gimple_seq_add_stmt (ilist
, g
);
4161 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4163 v
= create_tmp_var (type
);
4164 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4165 gimple_seq_add_stmt (ilist
, g
);
4170 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4171 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4172 private variables. Initialization statements go in ILIST, while calls
4173 to destructors go in DLIST. */
4176 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4177 omp_context
*ctx
, struct omp_for_data
*fd
)
4179 tree c
, copyin_seq
, x
, ptr
;
4180 bool copyin_by_ref
= false;
4181 bool lastprivate_firstprivate
= false;
4182 bool reduction_omp_orig_ref
= false;
4184 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4185 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4186 omplow_simd_context sctx
= omplow_simd_context ();
4187 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4188 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4189 gimple_seq llist
[4] = { };
4190 tree nonconst_simd_if
= NULL_TREE
;
4193 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4195 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4196 with data sharing clauses referencing variable sized vars. That
4197 is unnecessarily hard to support and very unlikely to result in
4198 vectorized code anyway. */
4200 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4201 switch (OMP_CLAUSE_CODE (c
))
4203 case OMP_CLAUSE_LINEAR
:
4204 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4207 case OMP_CLAUSE_PRIVATE
:
4208 case OMP_CLAUSE_FIRSTPRIVATE
:
4209 case OMP_CLAUSE_LASTPRIVATE
:
4210 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4212 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4214 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4215 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4219 case OMP_CLAUSE_REDUCTION
:
4220 case OMP_CLAUSE_IN_REDUCTION
:
4221 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4222 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4224 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4226 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4227 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4232 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4234 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4235 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4237 case OMP_CLAUSE_SIMDLEN
:
4238 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4241 case OMP_CLAUSE__CONDTEMP_
:
4242 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4250 /* Add a placeholder for simduid. */
4251 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4252 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4254 unsigned task_reduction_cnt
= 0;
4255 unsigned task_reduction_cntorig
= 0;
4256 unsigned task_reduction_cnt_full
= 0;
4257 unsigned task_reduction_cntorig_full
= 0;
4258 unsigned task_reduction_other_cnt
= 0;
4259 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4260 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4261 /* Do all the fixed sized types in the first pass, and the variable sized
4262 types in the second pass. This makes sure that the scalar arguments to
4263 the variable sized types are processed before we use them in the
4264 variable sized operations. For task reductions we use 4 passes, in the
4265 first two we ignore them, in the third one gather arguments for
4266 GOMP_task_reduction_remap call and in the last pass actually handle
4267 the task reductions. */
4268 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
4271 if (pass
== 2 && task_reduction_cnt
)
4274 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
4275 + task_reduction_cntorig
);
4276 tskred_avar
= create_tmp_var_raw (tskred_atype
);
4277 gimple_add_tmp_var (tskred_avar
);
4278 TREE_ADDRESSABLE (tskred_avar
) = 1;
4279 task_reduction_cnt_full
= task_reduction_cnt
;
4280 task_reduction_cntorig_full
= task_reduction_cntorig
;
4282 else if (pass
== 3 && task_reduction_cnt
)
4284 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
4286 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
4287 size_int (task_reduction_cntorig
),
4288 build_fold_addr_expr (tskred_avar
));
4289 gimple_seq_add_stmt (ilist
, g
);
4291 if (pass
== 3 && task_reduction_other_cnt
)
4293 /* For reduction clauses, build
4294 tskred_base = (void *) tskred_temp[2]
4295 + omp_get_thread_num () * tskred_temp[1]
4296 or if tskred_temp[1] is known to be constant, that constant
4297 directly. This is the start of the private reduction copy block
4298 for the current thread. */
4299 tree v
= create_tmp_var (integer_type_node
);
4300 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
4301 gimple
*g
= gimple_build_call (x
, 0);
4302 gimple_call_set_lhs (g
, v
);
4303 gimple_seq_add_stmt (ilist
, g
);
4304 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
4305 tskred_temp
= OMP_CLAUSE_DECL (c
);
4306 if (is_taskreg_ctx (ctx
))
4307 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
4308 tree v2
= create_tmp_var (sizetype
);
4309 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
4310 gimple_seq_add_stmt (ilist
, g
);
4311 if (ctx
->task_reductions
[0])
4312 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
4314 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4315 tree v3
= create_tmp_var (sizetype
);
4316 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4317 gimple_seq_add_stmt (ilist
, g
);
4318 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4319 tskred_base
= create_tmp_var (ptr_type_node
);
4320 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4321 gimple_seq_add_stmt (ilist
, g
);
4323 task_reduction_cnt
= 0;
4324 task_reduction_cntorig
= 0;
4325 task_reduction_other_cnt
= 0;
4326 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4328 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4331 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4332 bool task_reduction_p
= false;
4333 bool task_reduction_needs_orig_p
= false;
4334 tree cond
= NULL_TREE
;
4338 case OMP_CLAUSE_PRIVATE
:
4339 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4342 case OMP_CLAUSE_SHARED
:
4343 /* Ignore shared directives in teams construct inside
4344 of target construct. */
4345 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4346 && !is_host_teams_ctx (ctx
))
4348 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
4350 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
4351 || is_global_var (OMP_CLAUSE_DECL (c
)));
4354 case OMP_CLAUSE_FIRSTPRIVATE
:
4355 case OMP_CLAUSE_COPYIN
:
4357 case OMP_CLAUSE_LINEAR
:
4358 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
4359 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4360 lastprivate_firstprivate
= true;
4362 case OMP_CLAUSE_REDUCTION
:
4363 case OMP_CLAUSE_IN_REDUCTION
:
4364 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
4366 task_reduction_p
= true;
4367 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4369 task_reduction_other_cnt
++;
4374 task_reduction_cnt
++;
4375 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4377 var
= OMP_CLAUSE_DECL (c
);
4378 /* If var is a global variable that isn't privatized
4379 in outer contexts, we don't need to look up the
4380 original address, it is always the address of the
4381 global variable itself. */
4383 || omp_is_reference (var
)
4385 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4387 task_reduction_needs_orig_p
= true;
4388 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4389 task_reduction_cntorig
++;
4393 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4394 reduction_omp_orig_ref
= true;
4396 case OMP_CLAUSE__REDUCTEMP_
:
4397 if (!is_taskreg_ctx (ctx
))
4400 case OMP_CLAUSE__LOOPTEMP_
:
4401 /* Handle _looptemp_/_reductemp_ clauses only on
4406 case OMP_CLAUSE_LASTPRIVATE
:
4407 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4409 lastprivate_firstprivate
= true;
4410 if (pass
!= 0 || is_taskloop_ctx (ctx
))
4413 /* Even without corresponding firstprivate, if
4414 decl is Fortran allocatable, it needs outer var
4417 && lang_hooks
.decls
.omp_private_outer_ref
4418 (OMP_CLAUSE_DECL (c
)))
4419 lastprivate_firstprivate
= true;
4421 case OMP_CLAUSE_ALIGNED
:
4424 var
= OMP_CLAUSE_DECL (c
);
4425 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
4426 && !is_global_var (var
))
4428 new_var
= maybe_lookup_decl (var
, ctx
);
4429 if (new_var
== NULL_TREE
)
4430 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4431 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4432 tree alarg
= omp_clause_aligned_alignment (c
);
4433 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4434 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
4435 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4436 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4437 gimplify_and_add (x
, ilist
);
4439 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
4440 && is_global_var (var
))
4442 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
4443 new_var
= lookup_decl (var
, ctx
);
4444 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4445 t
= build_fold_addr_expr_loc (clause_loc
, t
);
4446 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4447 tree alarg
= omp_clause_aligned_alignment (c
);
4448 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4449 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
4450 t
= fold_convert_loc (clause_loc
, ptype
, t
);
4451 x
= create_tmp_var (ptype
);
4452 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
4453 gimplify_and_add (t
, ilist
);
4454 t
= build_simple_mem_ref_loc (clause_loc
, x
);
4455 SET_DECL_VALUE_EXPR (new_var
, t
);
4456 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4459 case OMP_CLAUSE__CONDTEMP_
:
4460 if (is_parallel_ctx (ctx
)
4461 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
4468 if (task_reduction_p
!= (pass
>= 2))
4471 new_var
= var
= OMP_CLAUSE_DECL (c
);
4472 if ((c_kind
== OMP_CLAUSE_REDUCTION
4473 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4474 && TREE_CODE (var
) == MEM_REF
)
4476 var
= TREE_OPERAND (var
, 0);
4477 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
4478 var
= TREE_OPERAND (var
, 0);
4479 if (TREE_CODE (var
) == INDIRECT_REF
4480 || TREE_CODE (var
) == ADDR_EXPR
)
4481 var
= TREE_OPERAND (var
, 0);
4482 if (is_variable_sized (var
))
4484 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
4485 var
= DECL_VALUE_EXPR (var
);
4486 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
4487 var
= TREE_OPERAND (var
, 0);
4488 gcc_assert (DECL_P (var
));
4492 if (c_kind
!= OMP_CLAUSE_COPYIN
)
4493 new_var
= lookup_decl (var
, ctx
);
4495 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
4500 /* C/C++ array section reductions. */
4501 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4502 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4503 && var
!= OMP_CLAUSE_DECL (c
))
4508 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
4509 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
4511 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
4513 tree b
= TREE_OPERAND (orig_var
, 1);
4514 b
= maybe_lookup_decl (b
, ctx
);
4517 b
= TREE_OPERAND (orig_var
, 1);
4518 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
4520 if (integer_zerop (bias
))
4524 bias
= fold_convert_loc (clause_loc
,
4525 TREE_TYPE (b
), bias
);
4526 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4527 TREE_TYPE (b
), b
, bias
);
4529 orig_var
= TREE_OPERAND (orig_var
, 0);
4533 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4534 if (is_global_var (out
)
4535 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
4536 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
4537 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
4542 bool by_ref
= use_pointer_for_field (var
, NULL
);
4543 x
= build_receiver_ref (var
, by_ref
, ctx
);
4544 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
4545 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
4547 x
= build_fold_addr_expr (x
);
4549 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
4550 x
= build_simple_mem_ref (x
);
4551 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
4553 if (var
== TREE_OPERAND (orig_var
, 0))
4554 x
= build_fold_addr_expr (x
);
4556 bias
= fold_convert (sizetype
, bias
);
4557 x
= fold_convert (ptr_type_node
, x
);
4558 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
4559 TREE_TYPE (x
), x
, bias
);
4560 unsigned cnt
= task_reduction_cnt
- 1;
4561 if (!task_reduction_needs_orig_p
)
4562 cnt
+= (task_reduction_cntorig_full
4563 - task_reduction_cntorig
);
4565 cnt
= task_reduction_cntorig
- 1;
4566 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4567 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4568 gimplify_assign (r
, x
, ilist
);
4572 if (TREE_CODE (orig_var
) == INDIRECT_REF
4573 || TREE_CODE (orig_var
) == ADDR_EXPR
)
4574 orig_var
= TREE_OPERAND (orig_var
, 0);
4575 tree d
= OMP_CLAUSE_DECL (c
);
4576 tree type
= TREE_TYPE (d
);
4577 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
4578 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
4579 const char *name
= get_name (orig_var
);
4582 tree xv
= create_tmp_var (ptr_type_node
);
4583 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4585 unsigned cnt
= task_reduction_cnt
- 1;
4586 if (!task_reduction_needs_orig_p
)
4587 cnt
+= (task_reduction_cntorig_full
4588 - task_reduction_cntorig
);
4590 cnt
= task_reduction_cntorig
- 1;
4591 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4592 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4594 gimple
*g
= gimple_build_assign (xv
, x
);
4595 gimple_seq_add_stmt (ilist
, g
);
4599 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4601 if (ctx
->task_reductions
[1 + idx
])
4602 off
= fold_convert (sizetype
,
4603 ctx
->task_reductions
[1 + idx
]);
4605 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4607 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
4609 gimple_seq_add_stmt (ilist
, g
);
4611 x
= fold_convert (build_pointer_type (boolean_type_node
),
4613 if (TREE_CONSTANT (v
))
4614 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
4615 TYPE_SIZE_UNIT (type
));
4618 tree t
= maybe_lookup_decl (v
, ctx
);
4622 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4623 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
4625 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4627 build_int_cst (TREE_TYPE (v
), 1));
4628 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4630 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4631 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4633 cond
= create_tmp_var (TREE_TYPE (x
));
4634 gimplify_assign (cond
, x
, ilist
);
4637 else if (TREE_CONSTANT (v
))
4639 x
= create_tmp_var_raw (type
, name
);
4640 gimple_add_tmp_var (x
);
4641 TREE_ADDRESSABLE (x
) = 1;
4642 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4647 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4648 tree t
= maybe_lookup_decl (v
, ctx
);
4652 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4653 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
4654 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4656 build_int_cst (TREE_TYPE (v
), 1));
4657 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4659 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4660 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
4661 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
4664 tree ptype
= build_pointer_type (TREE_TYPE (type
));
4665 x
= fold_convert_loc (clause_loc
, ptype
, x
);
4666 tree y
= create_tmp_var (ptype
, name
);
4667 gimplify_assign (y
, x
, ilist
);
4671 if (!integer_zerop (bias
))
4673 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4675 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4677 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
4678 pointer_sized_int_node
, yb
, bias
);
4679 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
4680 yb
= create_tmp_var (ptype
, name
);
4681 gimplify_assign (yb
, x
, ilist
);
4685 d
= TREE_OPERAND (d
, 0);
4686 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
4687 d
= TREE_OPERAND (d
, 0);
4688 if (TREE_CODE (d
) == ADDR_EXPR
)
4690 if (orig_var
!= var
)
4692 gcc_assert (is_variable_sized (orig_var
));
4693 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
4695 gimplify_assign (new_var
, x
, ilist
);
4696 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
4697 tree t
= build_fold_indirect_ref (new_var
);
4698 DECL_IGNORED_P (new_var
) = 0;
4699 TREE_THIS_NOTRAP (t
) = 1;
4700 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
4701 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
4705 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
4706 build_int_cst (ptype
, 0));
4707 SET_DECL_VALUE_EXPR (new_var
, x
);
4708 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4713 gcc_assert (orig_var
== var
);
4714 if (TREE_CODE (d
) == INDIRECT_REF
)
4716 x
= create_tmp_var (ptype
, name
);
4717 TREE_ADDRESSABLE (x
) = 1;
4718 gimplify_assign (x
, yb
, ilist
);
4719 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4721 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4722 gimplify_assign (new_var
, x
, ilist
);
4724 /* GOMP_taskgroup_reduction_register memsets the whole
4725 array to zero. If the initializer is zero, we don't
4726 need to initialize it again, just mark it as ever
4727 used unconditionally, i.e. cond = true. */
4729 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
4730 && initializer_zerop (omp_reduction_init (c
,
4733 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
4735 gimple_seq_add_stmt (ilist
, g
);
4738 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4742 if (!is_parallel_ctx (ctx
))
4744 tree condv
= create_tmp_var (boolean_type_node
);
4745 g
= gimple_build_assign (condv
,
4746 build_simple_mem_ref (cond
));
4747 gimple_seq_add_stmt (ilist
, g
);
4748 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
4749 g
= gimple_build_cond (NE_EXPR
, condv
,
4750 boolean_false_node
, end
, lab1
);
4751 gimple_seq_add_stmt (ilist
, g
);
4752 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
4754 g
= gimple_build_assign (build_simple_mem_ref (cond
),
4756 gimple_seq_add_stmt (ilist
, g
);
4759 tree y1
= create_tmp_var (ptype
);
4760 gimplify_assign (y1
, y
, ilist
);
4761 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
4762 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
4763 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
4764 if (task_reduction_needs_orig_p
)
4766 y3
= create_tmp_var (ptype
);
4768 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4769 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4770 size_int (task_reduction_cnt_full
4771 + task_reduction_cntorig
- 1),
4772 NULL_TREE
, NULL_TREE
);
4775 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4776 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
4779 gimplify_assign (y3
, ref
, ilist
);
4781 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
4785 y2
= create_tmp_var (ptype
);
4786 gimplify_assign (y2
, y
, ilist
);
4788 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4790 tree ref
= build_outer_var_ref (var
, ctx
);
4791 /* For ref build_outer_var_ref already performs this. */
4792 if (TREE_CODE (d
) == INDIRECT_REF
)
4793 gcc_assert (omp_is_reference (var
));
4794 else if (TREE_CODE (d
) == ADDR_EXPR
)
4795 ref
= build_fold_addr_expr (ref
);
4796 else if (omp_is_reference (var
))
4797 ref
= build_fold_addr_expr (ref
);
4798 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
4799 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
4800 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4802 y3
= create_tmp_var (ptype
);
4803 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
4807 y4
= create_tmp_var (ptype
);
4808 gimplify_assign (y4
, ref
, dlist
);
4812 tree i
= create_tmp_var (TREE_TYPE (v
));
4813 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
4814 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4815 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
4818 i2
= create_tmp_var (TREE_TYPE (v
));
4819 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
4820 body2
= create_artificial_label (UNKNOWN_LOCATION
);
4821 end2
= create_artificial_label (UNKNOWN_LOCATION
);
4822 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
4824 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4826 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4827 tree decl_placeholder
4828 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
4829 SET_DECL_VALUE_EXPR (decl_placeholder
,
4830 build_simple_mem_ref (y1
));
4831 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
4832 SET_DECL_VALUE_EXPR (placeholder
,
4833 y3
? build_simple_mem_ref (y3
)
4835 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4836 x
= lang_hooks
.decls
.omp_clause_default_ctor
4837 (c
, build_simple_mem_ref (y1
),
4838 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
4840 gimplify_and_add (x
, ilist
);
4841 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4843 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4844 lower_omp (&tseq
, ctx
);
4845 gimple_seq_add_seq (ilist
, tseq
);
4847 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4850 SET_DECL_VALUE_EXPR (decl_placeholder
,
4851 build_simple_mem_ref (y2
));
4852 SET_DECL_VALUE_EXPR (placeholder
,
4853 build_simple_mem_ref (y4
));
4854 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4855 lower_omp (&tseq
, ctx
);
4856 gimple_seq_add_seq (dlist
, tseq
);
4857 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4859 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4860 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
4863 x
= lang_hooks
.decls
.omp_clause_dtor
4864 (c
, build_simple_mem_ref (y2
));
4866 gimplify_and_add (x
, dlist
);
4871 x
= omp_reduction_init (c
, TREE_TYPE (type
));
4872 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4874 /* reduction(-:var) sums up the partial results, so it
4875 acts identically to reduction(+:var). */
4876 if (code
== MINUS_EXPR
)
4879 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
4882 x
= build2 (code
, TREE_TYPE (type
),
4883 build_simple_mem_ref (y4
),
4884 build_simple_mem_ref (y2
));
4885 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
4889 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
4890 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4891 gimple_seq_add_stmt (ilist
, g
);
4894 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
4895 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4896 gimple_seq_add_stmt (ilist
, g
);
4898 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
4899 build_int_cst (TREE_TYPE (i
), 1));
4900 gimple_seq_add_stmt (ilist
, g
);
4901 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
4902 gimple_seq_add_stmt (ilist
, g
);
4903 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
4906 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
4907 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4908 gimple_seq_add_stmt (dlist
, g
);
4911 g
= gimple_build_assign
4912 (y4
, POINTER_PLUS_EXPR
, y4
,
4913 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4914 gimple_seq_add_stmt (dlist
, g
);
4916 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
4917 build_int_cst (TREE_TYPE (i2
), 1));
4918 gimple_seq_add_stmt (dlist
, g
);
4919 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
4920 gimple_seq_add_stmt (dlist
, g
);
4921 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
4927 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4931 bool by_ref
= use_pointer_for_field (var
, ctx
);
4932 x
= build_receiver_ref (var
, by_ref
, ctx
);
4934 if (!omp_is_reference (var
))
4935 x
= build_fold_addr_expr (x
);
4936 x
= fold_convert (ptr_type_node
, x
);
4937 unsigned cnt
= task_reduction_cnt
- 1;
4938 if (!task_reduction_needs_orig_p
)
4939 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
4941 cnt
= task_reduction_cntorig
- 1;
4942 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4943 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4944 gimplify_assign (r
, x
, ilist
);
4949 tree type
= TREE_TYPE (new_var
);
4950 if (!omp_is_reference (var
))
4951 type
= build_pointer_type (type
);
4952 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4954 unsigned cnt
= task_reduction_cnt
- 1;
4955 if (!task_reduction_needs_orig_p
)
4956 cnt
+= (task_reduction_cntorig_full
4957 - task_reduction_cntorig
);
4959 cnt
= task_reduction_cntorig
- 1;
4960 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4961 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4965 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4967 if (ctx
->task_reductions
[1 + idx
])
4968 off
= fold_convert (sizetype
,
4969 ctx
->task_reductions
[1 + idx
]);
4971 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4973 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
4976 x
= fold_convert (type
, x
);
4978 if (omp_is_reference (var
))
4980 gimplify_assign (new_var
, x
, ilist
);
4982 new_var
= build_simple_mem_ref (new_var
);
4986 t
= create_tmp_var (type
);
4987 gimplify_assign (t
, x
, ilist
);
4988 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
4989 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4991 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
4992 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
4993 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4994 cond
= create_tmp_var (TREE_TYPE (t
));
4995 gimplify_assign (cond
, t
, ilist
);
4997 else if (is_variable_sized (var
))
4999 /* For variable sized types, we need to allocate the
5000 actual storage here. Call alloca and store the
5001 result in the pointer decl that we created elsewhere. */
5005 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5010 ptr
= DECL_VALUE_EXPR (new_var
);
5011 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5012 ptr
= TREE_OPERAND (ptr
, 0);
5013 gcc_assert (DECL_P (ptr
));
5014 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5016 /* void *tmp = __builtin_alloca */
5017 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5018 stmt
= gimple_build_call (atmp
, 2, x
,
5019 size_int (DECL_ALIGN (var
)));
5020 tmp
= create_tmp_var_raw (ptr_type_node
);
5021 gimple_add_tmp_var (tmp
);
5022 gimple_call_set_lhs (stmt
, tmp
);
5024 gimple_seq_add_stmt (ilist
, stmt
);
5026 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5027 gimplify_assign (ptr
, x
, ilist
);
5030 else if (omp_is_reference (var
)
5031 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5032 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5034 /* For references that are being privatized for Fortran,
5035 allocate new backing storage for the new pointer
5036 variable. This allows us to avoid changing all the
5037 code that expects a pointer to something that expects
5038 a direct variable. */
5042 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5043 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5045 x
= build_receiver_ref (var
, false, ctx
);
5046 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5048 else if (TREE_CONSTANT (x
))
5050 /* For reduction in SIMD loop, defer adding the
5051 initialization of the reference, because if we decide
5052 to use SIMD array for it, the initilization could cause
5053 expansion ICE. Ditto for other privatization clauses. */
5058 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5060 gimple_add_tmp_var (x
);
5061 TREE_ADDRESSABLE (x
) = 1;
5062 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5068 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5069 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5070 tree al
= size_int (TYPE_ALIGN (rtype
));
5071 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5076 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5077 gimplify_assign (new_var
, x
, ilist
);
5080 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5082 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5083 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5084 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5092 switch (OMP_CLAUSE_CODE (c
))
5094 case OMP_CLAUSE_SHARED
:
5095 /* Ignore shared directives in teams construct inside
5096 target construct. */
5097 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5098 && !is_host_teams_ctx (ctx
))
5100 /* Shared global vars are just accessed directly. */
5101 if (is_global_var (new_var
))
5103 /* For taskloop firstprivate/lastprivate, represented
5104 as firstprivate and shared clause on the task, new_var
5105 is the firstprivate var. */
5106 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5108 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5109 needs to be delayed until after fixup_child_record_type so
5110 that we get the correct type during the dereference. */
5111 by_ref
= use_pointer_for_field (var
, ctx
);
5112 x
= build_receiver_ref (var
, by_ref
, ctx
);
5113 SET_DECL_VALUE_EXPR (new_var
, x
);
5114 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5116 /* ??? If VAR is not passed by reference, and the variable
5117 hasn't been initialized yet, then we'll get a warning for
5118 the store into the omp_data_s structure. Ideally, we'd be
5119 able to notice this and not store anything at all, but
5120 we're generating code too early. Suppress the warning. */
5122 TREE_NO_WARNING (var
) = 1;
5125 case OMP_CLAUSE__CONDTEMP_
:
5126 if (is_parallel_ctx (ctx
))
5128 x
= build_receiver_ref (var
, false, ctx
);
5129 SET_DECL_VALUE_EXPR (new_var
, x
);
5130 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5132 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5134 x
= build_zero_cst (TREE_TYPE (var
));
5139 case OMP_CLAUSE_LASTPRIVATE
:
5140 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5144 case OMP_CLAUSE_PRIVATE
:
5145 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5146 x
= build_outer_var_ref (var
, ctx
);
5147 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5149 if (is_task_ctx (ctx
))
5150 x
= build_receiver_ref (var
, false, ctx
);
5152 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5160 nx
= unshare_expr (new_var
);
5162 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5163 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5166 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5168 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5171 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5172 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5173 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5174 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5175 || (gimple_omp_for_index (ctx
->stmt
, 0)
5177 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5178 || omp_is_reference (var
))
5179 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5182 if (omp_is_reference (var
))
5184 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5185 tree new_vard
= TREE_OPERAND (new_var
, 0);
5186 gcc_assert (DECL_P (new_vard
));
5187 SET_DECL_VALUE_EXPR (new_vard
,
5188 build_fold_addr_expr (lvar
));
5189 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5194 tree iv
= unshare_expr (ivar
);
5196 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
5199 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
5203 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
5205 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
5206 unshare_expr (ivar
), x
);
5210 gimplify_and_add (x
, &llist
[0]);
5211 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5212 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5217 gcc_assert (TREE_CODE (v
) == MEM_REF
);
5218 v
= TREE_OPERAND (v
, 0);
5219 gcc_assert (DECL_P (v
));
5221 v
= *ctx
->lastprivate_conditional_map
->get (v
);
5222 tree t
= create_tmp_var (TREE_TYPE (v
));
5223 tree z
= build_zero_cst (TREE_TYPE (v
));
5225 = build_outer_var_ref (var
, ctx
,
5226 OMP_CLAUSE_LASTPRIVATE
);
5227 gimple_seq_add_stmt (dlist
,
5228 gimple_build_assign (t
, z
));
5229 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
5230 tree civar
= DECL_VALUE_EXPR (v
);
5231 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
5232 civar
= unshare_expr (civar
);
5233 TREE_OPERAND (civar
, 1) = sctx
.idx
;
5234 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
5235 unshare_expr (civar
));
5236 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
5237 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
5238 orig_v
, unshare_expr (ivar
)));
5239 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
5241 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
5243 gimple_seq tseq
= NULL
;
5244 gimplify_and_add (x
, &tseq
);
5246 lower_omp (&tseq
, ctx
->outer
);
5247 gimple_seq_add_seq (&llist
[1], tseq
);
5249 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5250 && ctx
->for_simd_scan_phase
)
5252 x
= unshare_expr (ivar
);
5254 = build_outer_var_ref (var
, ctx
,
5255 OMP_CLAUSE_LASTPRIVATE
);
5256 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5258 gimplify_and_add (x
, &llist
[0]);
5262 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5264 gimplify_and_add (y
, &llist
[1]);
5268 if (omp_is_reference (var
))
5270 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5271 tree new_vard
= TREE_OPERAND (new_var
, 0);
5272 gcc_assert (DECL_P (new_vard
));
5273 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5274 x
= TYPE_SIZE_UNIT (type
);
5275 if (TREE_CONSTANT (x
))
5277 x
= create_tmp_var_raw (type
, get_name (var
));
5278 gimple_add_tmp_var (x
);
5279 TREE_ADDRESSABLE (x
) = 1;
5280 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5281 x
= fold_convert_loc (clause_loc
,
5282 TREE_TYPE (new_vard
), x
);
5283 gimplify_assign (new_vard
, x
, ilist
);
5288 gimplify_and_add (nx
, ilist
);
5289 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5291 && ctx
->for_simd_scan_phase
)
5293 tree orig_v
= build_outer_var_ref (var
, ctx
,
5294 OMP_CLAUSE_LASTPRIVATE
);
5295 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
5297 gimplify_and_add (x
, ilist
);
5302 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5304 gimplify_and_add (x
, dlist
);
5307 case OMP_CLAUSE_LINEAR
:
5308 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
5309 goto do_firstprivate
;
5310 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5313 x
= build_outer_var_ref (var
, ctx
);
5316 case OMP_CLAUSE_FIRSTPRIVATE
:
5317 if (is_task_ctx (ctx
))
5319 if ((omp_is_reference (var
)
5320 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
5321 || is_variable_sized (var
))
5323 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
5325 || use_pointer_for_field (var
, NULL
))
5327 x
= build_receiver_ref (var
, false, ctx
);
5328 SET_DECL_VALUE_EXPR (new_var
, x
);
5329 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5333 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
5334 && omp_is_reference (var
))
5336 x
= build_outer_var_ref (var
, ctx
);
5337 gcc_assert (TREE_CODE (x
) == MEM_REF
5338 && integer_zerop (TREE_OPERAND (x
, 1)));
5339 x
= TREE_OPERAND (x
, 0);
5340 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5341 (c
, unshare_expr (new_var
), x
);
5342 gimplify_and_add (x
, ilist
);
5346 x
= build_outer_var_ref (var
, ctx
);
5349 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5350 && gimple_omp_for_combined_into_p (ctx
->stmt
))
5352 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5353 tree stept
= TREE_TYPE (t
);
5354 tree ct
= omp_find_clause (clauses
,
5355 OMP_CLAUSE__LOOPTEMP_
);
5357 tree l
= OMP_CLAUSE_DECL (ct
);
5358 tree n1
= fd
->loop
.n1
;
5359 tree step
= fd
->loop
.step
;
5360 tree itype
= TREE_TYPE (l
);
5361 if (POINTER_TYPE_P (itype
))
5362 itype
= signed_type_for (itype
);
5363 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
5364 if (TYPE_UNSIGNED (itype
)
5365 && fd
->loop
.cond_code
== GT_EXPR
)
5366 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
5367 fold_build1 (NEGATE_EXPR
, itype
, l
),
5368 fold_build1 (NEGATE_EXPR
,
5371 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
5372 t
= fold_build2 (MULT_EXPR
, stept
,
5373 fold_convert (stept
, l
), t
);
5375 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
5377 if (omp_is_reference (var
))
5379 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5380 tree new_vard
= TREE_OPERAND (new_var
, 0);
5381 gcc_assert (DECL_P (new_vard
));
5382 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5383 nx
= TYPE_SIZE_UNIT (type
);
5384 if (TREE_CONSTANT (nx
))
5386 nx
= create_tmp_var_raw (type
,
5388 gimple_add_tmp_var (nx
);
5389 TREE_ADDRESSABLE (nx
) = 1;
5390 nx
= build_fold_addr_expr_loc (clause_loc
,
5392 nx
= fold_convert_loc (clause_loc
,
5393 TREE_TYPE (new_vard
),
5395 gimplify_assign (new_vard
, nx
, ilist
);
5399 x
= lang_hooks
.decls
.omp_clause_linear_ctor
5401 gimplify_and_add (x
, ilist
);
5405 if (POINTER_TYPE_P (TREE_TYPE (x
)))
5406 x
= fold_build2 (POINTER_PLUS_EXPR
,
5407 TREE_TYPE (x
), x
, t
);
5409 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5412 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
5413 || TREE_ADDRESSABLE (new_var
)
5414 || omp_is_reference (var
))
5415 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5418 if (omp_is_reference (var
))
5420 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5421 tree new_vard
= TREE_OPERAND (new_var
, 0);
5422 gcc_assert (DECL_P (new_vard
));
5423 SET_DECL_VALUE_EXPR (new_vard
,
5424 build_fold_addr_expr (lvar
));
5425 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5427 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
5429 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
5430 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
5431 gimplify_and_add (x
, ilist
);
5432 gimple_stmt_iterator gsi
5433 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5435 = gimple_build_assign (unshare_expr (lvar
), iv
);
5436 gsi_insert_before_without_update (&gsi
, g
,
5438 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5439 enum tree_code code
= PLUS_EXPR
;
5440 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
5441 code
= POINTER_PLUS_EXPR
;
5442 g
= gimple_build_assign (iv
, code
, iv
, t
);
5443 gsi_insert_before_without_update (&gsi
, g
,
5447 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5448 (c
, unshare_expr (ivar
), x
);
5449 gimplify_and_add (x
, &llist
[0]);
5450 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5452 gimplify_and_add (x
, &llist
[1]);
5455 if (omp_is_reference (var
))
5457 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5458 tree new_vard
= TREE_OPERAND (new_var
, 0);
5459 gcc_assert (DECL_P (new_vard
));
5460 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5461 nx
= TYPE_SIZE_UNIT (type
);
5462 if (TREE_CONSTANT (nx
))
5464 nx
= create_tmp_var_raw (type
, get_name (var
));
5465 gimple_add_tmp_var (nx
);
5466 TREE_ADDRESSABLE (nx
) = 1;
5467 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
5468 nx
= fold_convert_loc (clause_loc
,
5469 TREE_TYPE (new_vard
), nx
);
5470 gimplify_assign (new_vard
, nx
, ilist
);
5474 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5475 (c
, unshare_expr (new_var
), x
);
5476 gimplify_and_add (x
, ilist
);
5479 case OMP_CLAUSE__LOOPTEMP_
:
5480 case OMP_CLAUSE__REDUCTEMP_
:
5481 gcc_assert (is_taskreg_ctx (ctx
));
5482 x
= build_outer_var_ref (var
, ctx
);
5483 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5484 gimplify_and_add (x
, ilist
);
5487 case OMP_CLAUSE_COPYIN
:
5488 by_ref
= use_pointer_for_field (var
, NULL
);
5489 x
= build_receiver_ref (var
, by_ref
, ctx
);
5490 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
5491 append_to_statement_list (x
, ©in_seq
);
5492 copyin_by_ref
|= by_ref
;
5495 case OMP_CLAUSE_REDUCTION
:
5496 case OMP_CLAUSE_IN_REDUCTION
:
5497 /* OpenACC reductions are initialized using the
5498 GOACC_REDUCTION internal function. */
5499 if (is_gimple_omp_oacc (ctx
->stmt
))
5501 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5503 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5505 tree ptype
= TREE_TYPE (placeholder
);
5508 x
= error_mark_node
;
5509 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
5510 && !task_reduction_needs_orig_p
)
5512 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5514 tree pptype
= build_pointer_type (ptype
);
5515 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5516 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5517 size_int (task_reduction_cnt_full
5518 + task_reduction_cntorig
- 1),
5519 NULL_TREE
, NULL_TREE
);
5523 = *ctx
->task_reduction_map
->get (c
);
5524 x
= task_reduction_read (ilist
, tskred_temp
,
5525 pptype
, 7 + 3 * idx
);
5527 x
= fold_convert (pptype
, x
);
5528 x
= build_simple_mem_ref (x
);
5533 x
= build_outer_var_ref (var
, ctx
);
5535 if (omp_is_reference (var
)
5536 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
5537 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5539 SET_DECL_VALUE_EXPR (placeholder
, x
);
5540 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5541 tree new_vard
= new_var
;
5542 if (omp_is_reference (var
))
5544 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5545 new_vard
= TREE_OPERAND (new_var
, 0);
5546 gcc_assert (DECL_P (new_vard
));
5548 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5550 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5551 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5554 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5558 if (new_vard
== new_var
)
5560 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
5561 SET_DECL_VALUE_EXPR (new_var
, ivar
);
5565 SET_DECL_VALUE_EXPR (new_vard
,
5566 build_fold_addr_expr (ivar
));
5567 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5569 x
= lang_hooks
.decls
.omp_clause_default_ctor
5570 (c
, unshare_expr (ivar
),
5571 build_outer_var_ref (var
, ctx
));
5572 if (rvarp
&& ctx
->for_simd_scan_phase
)
5575 gimplify_and_add (x
, &llist
[0]);
5576 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5578 gimplify_and_add (x
, &llist
[1]);
5585 gimplify_and_add (x
, &llist
[0]);
5587 tree ivar2
= unshare_expr (lvar
);
5588 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5589 x
= lang_hooks
.decls
.omp_clause_default_ctor
5590 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
5591 gimplify_and_add (x
, &llist
[0]);
5595 x
= lang_hooks
.decls
.omp_clause_default_ctor
5596 (c
, unshare_expr (rvar2
),
5597 build_outer_var_ref (var
, ctx
));
5598 gimplify_and_add (x
, &llist
[0]);
5601 /* For types that need construction, add another
5602 private var which will be default constructed
5603 and optionally initialized with
5604 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5605 loop we want to assign this value instead of
5606 constructing and destructing it in each
5608 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
5609 gimple_add_tmp_var (nv
);
5610 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
5614 x
= lang_hooks
.decls
.omp_clause_default_ctor
5615 (c
, nv
, build_outer_var_ref (var
, ctx
));
5616 gimplify_and_add (x
, ilist
);
5618 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5620 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5621 x
= DECL_VALUE_EXPR (new_vard
);
5623 if (new_vard
!= new_var
)
5624 vexpr
= build_fold_addr_expr (nv
);
5625 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5626 lower_omp (&tseq
, ctx
);
5627 SET_DECL_VALUE_EXPR (new_vard
, x
);
5628 gimple_seq_add_seq (ilist
, tseq
);
5629 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5632 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5634 gimplify_and_add (x
, dlist
);
5637 tree ref
= build_outer_var_ref (var
, ctx
);
5638 x
= unshare_expr (ivar
);
5639 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5641 gimplify_and_add (x
, &llist
[0]);
5643 ref
= build_outer_var_ref (var
, ctx
);
5644 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
5646 gimplify_and_add (x
, &llist
[3]);
5648 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5649 if (new_vard
== new_var
)
5650 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5652 SET_DECL_VALUE_EXPR (new_vard
,
5653 build_fold_addr_expr (lvar
));
5655 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5657 gimplify_and_add (x
, &llist
[1]);
5659 tree ivar2
= unshare_expr (lvar
);
5660 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5661 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
5663 gimplify_and_add (x
, &llist
[1]);
5667 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
5669 gimplify_and_add (x
, &llist
[1]);
5674 gimplify_and_add (x
, &llist
[0]);
5675 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5677 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5678 lower_omp (&tseq
, ctx
);
5679 gimple_seq_add_seq (&llist
[0], tseq
);
5681 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5682 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5683 lower_omp (&tseq
, ctx
);
5684 gimple_seq_add_seq (&llist
[1], tseq
);
5685 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5686 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5687 if (new_vard
== new_var
)
5688 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5690 SET_DECL_VALUE_EXPR (new_vard
,
5691 build_fold_addr_expr (lvar
));
5692 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5694 gimplify_and_add (x
, &llist
[1]);
5697 /* If this is a reference to constant size reduction var
5698 with placeholder, we haven't emitted the initializer
5699 for it because it is undesirable if SIMD arrays are used.
5700 But if they aren't used, we need to emit the deferred
5701 initialization now. */
5702 else if (omp_is_reference (var
) && is_simd
)
5703 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5705 tree lab2
= NULL_TREE
;
5709 if (!is_parallel_ctx (ctx
))
5711 tree condv
= create_tmp_var (boolean_type_node
);
5712 tree m
= build_simple_mem_ref (cond
);
5713 g
= gimple_build_assign (condv
, m
);
5714 gimple_seq_add_stmt (ilist
, g
);
5716 = create_artificial_label (UNKNOWN_LOCATION
);
5717 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5718 g
= gimple_build_cond (NE_EXPR
, condv
,
5721 gimple_seq_add_stmt (ilist
, g
);
5722 gimple_seq_add_stmt (ilist
,
5723 gimple_build_label (lab1
));
5725 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5727 gimple_seq_add_stmt (ilist
, g
);
5729 x
= lang_hooks
.decls
.omp_clause_default_ctor
5730 (c
, unshare_expr (new_var
),
5732 : build_outer_var_ref (var
, ctx
));
5734 gimplify_and_add (x
, ilist
);
5736 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5737 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5739 if (ctx
->for_simd_scan_phase
)
5742 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
5744 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
5745 gimple_add_tmp_var (nv
);
5746 ctx
->cb
.decl_map
->put (new_vard
, nv
);
5747 x
= lang_hooks
.decls
.omp_clause_default_ctor
5748 (c
, nv
, build_outer_var_ref (var
, ctx
));
5750 gimplify_and_add (x
, ilist
);
5751 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5753 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5755 if (new_vard
!= new_var
)
5756 vexpr
= build_fold_addr_expr (nv
);
5757 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5758 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5759 lower_omp (&tseq
, ctx
);
5760 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
5761 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
5762 gimple_seq_add_seq (ilist
, tseq
);
5764 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5765 if (is_simd
&& ctx
->scan_exclusive
)
5768 = create_tmp_var_raw (TREE_TYPE (new_var
));
5769 gimple_add_tmp_var (nv2
);
5770 ctx
->cb
.decl_map
->put (nv
, nv2
);
5771 x
= lang_hooks
.decls
.omp_clause_default_ctor
5772 (c
, nv2
, build_outer_var_ref (var
, ctx
));
5773 gimplify_and_add (x
, ilist
);
5774 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5776 gimplify_and_add (x
, dlist
);
5778 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5780 gimplify_and_add (x
, dlist
);
5783 && ctx
->scan_exclusive
5784 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
5786 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
5787 gimple_add_tmp_var (nv2
);
5788 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
5789 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5791 gimplify_and_add (x
, dlist
);
5793 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5797 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5799 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5800 lower_omp (&tseq
, ctx
);
5801 gimple_seq_add_seq (ilist
, tseq
);
5803 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5806 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5807 lower_omp (&tseq
, ctx
);
5808 gimple_seq_add_seq (dlist
, tseq
);
5809 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5811 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5815 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5822 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
5823 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
5824 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5829 tree lab2
= NULL_TREE
;
5830 /* GOMP_taskgroup_reduction_register memsets the whole
5831 array to zero. If the initializer is zero, we don't
5832 need to initialize it again, just mark it as ever
5833 used unconditionally, i.e. cond = true. */
5834 if (initializer_zerop (x
))
5836 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5838 gimple_seq_add_stmt (ilist
, g
);
5843 if (!cond) { cond = true; new_var = x; } */
5844 if (!is_parallel_ctx (ctx
))
5846 tree condv
= create_tmp_var (boolean_type_node
);
5847 tree m
= build_simple_mem_ref (cond
);
5848 g
= gimple_build_assign (condv
, m
);
5849 gimple_seq_add_stmt (ilist
, g
);
5851 = create_artificial_label (UNKNOWN_LOCATION
);
5852 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5853 g
= gimple_build_cond (NE_EXPR
, condv
,
5856 gimple_seq_add_stmt (ilist
, g
);
5857 gimple_seq_add_stmt (ilist
,
5858 gimple_build_label (lab1
));
5860 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5862 gimple_seq_add_stmt (ilist
, g
);
5863 gimplify_assign (new_var
, x
, ilist
);
5865 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5869 /* reduction(-:var) sums up the partial results, so it
5870 acts identically to reduction(+:var). */
5871 if (code
== MINUS_EXPR
)
5874 tree new_vard
= new_var
;
5875 if (is_simd
&& omp_is_reference (var
))
5877 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5878 new_vard
= TREE_OPERAND (new_var
, 0);
5879 gcc_assert (DECL_P (new_vard
));
5881 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5883 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5884 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5887 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5891 if (new_vard
!= new_var
)
5893 SET_DECL_VALUE_EXPR (new_vard
,
5894 build_fold_addr_expr (lvar
));
5895 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5898 tree ref
= build_outer_var_ref (var
, ctx
);
5902 if (ctx
->for_simd_scan_phase
)
5904 gimplify_assign (ivar
, ref
, &llist
[0]);
5905 ref
= build_outer_var_ref (var
, ctx
);
5906 gimplify_assign (ref
, rvar
, &llist
[3]);
5910 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
5915 simt_lane
= create_tmp_var (unsigned_type_node
);
5916 x
= build_call_expr_internal_loc
5917 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
5918 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
5919 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
5920 gimplify_assign (ivar
, x
, &llist
[2]);
5922 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
5923 ref
= build_outer_var_ref (var
, ctx
);
5924 gimplify_assign (ref
, x
, &llist
[1]);
5929 if (omp_is_reference (var
) && is_simd
)
5930 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5931 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5932 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5934 gimplify_assign (new_var
, x
, ilist
);
5937 tree ref
= build_outer_var_ref (var
, ctx
);
5939 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
5940 ref
= build_outer_var_ref (var
, ctx
);
5941 gimplify_assign (ref
, x
, dlist
);
5954 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
5955 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
5958 if (known_eq (sctx
.max_vf
, 1U))
5960 sctx
.is_simt
= false;
5961 if (ctx
->lastprivate_conditional_map
)
5963 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
5965 /* Signal to lower_omp_1 that it should use parent context. */
5966 ctx
->combined_into_simd_safelen1
= true;
5967 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5968 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5969 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5971 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
5972 omp_context
*outer
= ctx
->outer
;
5973 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
5974 outer
= outer
->outer
;
5975 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
5976 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
5977 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
5983 /* When not vectorized, treat lastprivate(conditional:) like
5984 normal lastprivate, as there will be just one simd lane
5985 writing the privatized variable. */
5986 delete ctx
->lastprivate_conditional_map
;
5987 ctx
->lastprivate_conditional_map
= NULL
;
5992 if (nonconst_simd_if
)
5994 if (sctx
.lane
== NULL_TREE
)
5996 sctx
.idx
= create_tmp_var (unsigned_type_node
);
5997 sctx
.lane
= create_tmp_var (unsigned_type_node
);
5999 /* FIXME: For now. */
6000 sctx
.is_simt
= false;
6003 if (sctx
.lane
|| sctx
.is_simt
)
6005 uid
= create_tmp_var (ptr_type_node
, "simduid");
6006 /* Don't want uninit warnings on simduid, it is always uninitialized,
6007 but we use it not for the value, but for the DECL_UID only. */
6008 TREE_NO_WARNING (uid
) = 1;
6009 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6010 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6011 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6012 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6014 /* Emit calls denoting privatized variables and initializing a pointer to
6015 structure that holds private variables as fields after ompdevlow pass. */
6018 sctx
.simt_eargs
[0] = uid
;
6020 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6021 gimple_call_set_lhs (g
, uid
);
6022 gimple_seq_add_stmt (ilist
, g
);
6023 sctx
.simt_eargs
.release ();
6025 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6026 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6027 gimple_call_set_lhs (g
, simtrec
);
6028 gimple_seq_add_stmt (ilist
, g
);
6032 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6033 2 + (nonconst_simd_if
!= NULL
),
6034 uid
, integer_zero_node
,
6036 gimple_call_set_lhs (g
, sctx
.lane
);
6037 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6038 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6039 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6040 build_int_cst (unsigned_type_node
, 0));
6041 gimple_seq_add_stmt (ilist
, g
);
6044 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6046 gimple_call_set_lhs (g
, sctx
.lastlane
);
6047 gimple_seq_add_stmt (dlist
, g
);
6048 gimple_seq_add_seq (dlist
, llist
[3]);
6050 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6053 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6054 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6055 gimple_call_set_lhs (g
, simt_vf
);
6056 gimple_seq_add_stmt (dlist
, g
);
6058 tree t
= build_int_cst (unsigned_type_node
, 1);
6059 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6060 gimple_seq_add_stmt (dlist
, g
);
6062 t
= build_int_cst (unsigned_type_node
, 0);
6063 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6064 gimple_seq_add_stmt (dlist
, g
);
6066 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6067 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6068 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6069 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6070 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6072 gimple_seq_add_seq (dlist
, llist
[2]);
6074 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6075 gimple_seq_add_stmt (dlist
, g
);
6077 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6078 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6079 gimple_seq_add_stmt (dlist
, g
);
6081 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6083 for (int i
= 0; i
< 2; i
++)
6086 tree vf
= create_tmp_var (unsigned_type_node
);
6087 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6088 gimple_call_set_lhs (g
, vf
);
6089 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
6090 gimple_seq_add_stmt (seq
, g
);
6091 tree t
= build_int_cst (unsigned_type_node
, 0);
6092 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6093 gimple_seq_add_stmt (seq
, g
);
6094 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6095 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6096 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6097 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
6098 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
6099 gimple_seq_add_seq (seq
, llist
[i
]);
6100 t
= build_int_cst (unsigned_type_node
, 1);
6101 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
6102 gimple_seq_add_stmt (seq
, g
);
6103 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
6104 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
6105 gimple_seq_add_stmt (seq
, g
);
6106 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
6111 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
6113 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
6114 gimple_seq_add_stmt (dlist
, g
);
6117 /* The copyin sequence is not to be executed by the main thread, since
6118 that would result in self-copies. Perhaps not visible to scalars,
6119 but it certainly is to C++ operator=. */
6122 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
6124 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
6125 build_int_cst (TREE_TYPE (x
), 0));
6126 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
6127 gimplify_and_add (x
, ilist
);
6130 /* If any copyin variable is passed by reference, we must ensure the
6131 master thread doesn't modify it before it is copied over in all
6132 threads. Similarly for variables in both firstprivate and
6133 lastprivate clauses we need to ensure the lastprivate copying
6134 happens after firstprivate copying in all threads. And similarly
6135 for UDRs if initializer expression refers to omp_orig. */
6136 if (copyin_by_ref
|| lastprivate_firstprivate
6137 || (reduction_omp_orig_ref
6138 && !ctx
->scan_inclusive
6139 && !ctx
->scan_exclusive
))
6141 /* Don't add any barrier for #pragma omp simd or
6142 #pragma omp distribute. */
6143 if (!is_task_ctx (ctx
)
6144 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
6145 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
6146 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
6149 /* If max_vf is non-zero, then we can use only a vectorization factor
6150 up to the max_vf we chose. So stick it into the safelen clause. */
6151 if (maybe_ne (sctx
.max_vf
, 0U))
6153 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
6154 OMP_CLAUSE_SAFELEN
);
6155 poly_uint64 safe_len
;
6157 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
6158 && maybe_gt (safe_len
, sctx
.max_vf
)))
6160 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
6161 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
6163 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6164 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6169 /* Create temporary variables for lastprivate(conditional:) implementation
6170 in context CTX with CLAUSES. */
6173 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
6175 tree iter_type
= NULL_TREE
;
6176 tree cond_ptr
= NULL_TREE
;
6177 tree iter_var
= NULL_TREE
;
6178 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6179 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
6180 tree next
= *clauses
;
6181 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6182 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6183 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6187 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
6189 if (iter_type
== NULL_TREE
)
6191 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
6192 iter_var
= create_tmp_var_raw (iter_type
);
6193 DECL_CONTEXT (iter_var
) = current_function_decl
;
6194 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6195 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6196 ctx
->block_vars
= iter_var
;
6198 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6199 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6200 OMP_CLAUSE_DECL (c3
) = iter_var
;
6201 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
6203 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6205 next
= OMP_CLAUSE_CHAIN (cc
);
6206 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6207 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
6208 ctx
->lastprivate_conditional_map
->put (o
, v
);
6211 if (iter_type
== NULL
)
6213 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
6215 struct omp_for_data fd
;
6216 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
6218 iter_type
= unsigned_type_for (fd
.iter_type
);
6220 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
6221 iter_type
= unsigned_type_node
;
6222 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
6226 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
6227 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6231 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
6232 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
6233 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
6234 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
6235 ctx
->block_vars
= cond_ptr
;
6236 c2
= build_omp_clause (UNKNOWN_LOCATION
,
6237 OMP_CLAUSE__CONDTEMP_
);
6238 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6239 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
6242 iter_var
= create_tmp_var_raw (iter_type
);
6243 DECL_CONTEXT (iter_var
) = current_function_decl
;
6244 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6245 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6246 ctx
->block_vars
= iter_var
;
6248 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6249 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6250 OMP_CLAUSE_DECL (c3
) = iter_var
;
6251 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
6252 OMP_CLAUSE_CHAIN (c2
) = c3
;
6253 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6255 tree v
= create_tmp_var_raw (iter_type
);
6256 DECL_CONTEXT (v
) = current_function_decl
;
6257 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
6258 DECL_CHAIN (v
) = ctx
->block_vars
;
6259 ctx
->block_vars
= v
;
6260 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6261 ctx
->lastprivate_conditional_map
->put (o
, v
);
6266 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6267 both parallel and workshare constructs. PREDICATE may be NULL if it's
6268 always true. BODY_P is the sequence to insert early initialization
6269 if needed, STMT_LIST is where the non-conditional lastprivate handling
6270 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6274 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
6275 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
6278 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
6279 bool par_clauses
= false;
6280 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
6281 unsigned HOST_WIDE_INT conditional_off
= 0;
6282 gimple_seq post_stmt_list
= NULL
;
6284 /* Early exit if there are no lastprivate or linear clauses. */
6285 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
6286 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
6287 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
6288 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
6290 if (clauses
== NULL
)
6292 /* If this was a workshare clause, see if it had been combined
6293 with its parallel. In that case, look for the clauses on the
6294 parallel statement itself. */
6295 if (is_parallel_ctx (ctx
))
6299 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6302 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6303 OMP_CLAUSE_LASTPRIVATE
);
6304 if (clauses
== NULL
)
6309 bool maybe_simt
= false;
6310 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6311 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6313 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
6314 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
6316 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
6322 tree label_true
, arm1
, arm2
;
6323 enum tree_code pred_code
= TREE_CODE (predicate
);
6325 label
= create_artificial_label (UNKNOWN_LOCATION
);
6326 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
6327 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
6329 arm1
= TREE_OPERAND (predicate
, 0);
6330 arm2
= TREE_OPERAND (predicate
, 1);
6331 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6332 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6337 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6338 arm2
= boolean_false_node
;
6339 pred_code
= NE_EXPR
;
6343 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
6344 c
= fold_convert (integer_type_node
, c
);
6345 simtcond
= create_tmp_var (integer_type_node
);
6346 gimplify_assign (simtcond
, c
, stmt_list
);
6347 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
6349 c
= create_tmp_var (integer_type_node
);
6350 gimple_call_set_lhs (g
, c
);
6351 gimple_seq_add_stmt (stmt_list
, g
);
6352 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
6356 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
6357 gimple_seq_add_stmt (stmt_list
, stmt
);
6358 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
6361 tree cond_ptr
= NULL_TREE
;
6362 for (c
= clauses
; c
;)
6365 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6366 gimple_seq
*this_stmt_list
= stmt_list
;
6367 tree lab2
= NULL_TREE
;
6369 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6370 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6371 && ctx
->lastprivate_conditional_map
6372 && !ctx
->combined_into_simd_safelen1
)
6374 gcc_assert (body_p
);
6377 if (cond_ptr
== NULL_TREE
)
6379 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
6380 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
6382 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
6383 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6384 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
6385 gimplify_assign (v
, build_zero_cst (type
), body_p
);
6386 this_stmt_list
= cstmt_list
;
6388 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
6390 mem
= build2 (MEM_REF
, type
, cond_ptr
,
6391 build_int_cst (TREE_TYPE (cond_ptr
),
6393 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
6396 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
6397 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
6398 tree mem2
= copy_node (mem
);
6399 gimple_seq seq
= NULL
;
6400 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
6401 gimple_seq_add_seq (this_stmt_list
, seq
);
6402 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
6403 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6404 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
6405 gimple_seq_add_stmt (this_stmt_list
, g
);
6406 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
6407 gimplify_assign (mem2
, v
, this_stmt_list
);
6410 && ctx
->combined_into_simd_safelen1
6411 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6412 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6413 && ctx
->lastprivate_conditional_map
)
6414 this_stmt_list
= &post_stmt_list
;
6416 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6417 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6418 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
6420 var
= OMP_CLAUSE_DECL (c
);
6421 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6422 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
6423 && is_taskloop_ctx (ctx
))
6425 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
6426 new_var
= lookup_decl (var
, ctx
->outer
);
6430 new_var
= lookup_decl (var
, ctx
);
6431 /* Avoid uninitialized warnings for lastprivate and
6432 for linear iterators. */
6434 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6435 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
6436 TREE_NO_WARNING (new_var
) = 1;
6439 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
6441 tree val
= DECL_VALUE_EXPR (new_var
);
6442 if (TREE_CODE (val
) == ARRAY_REF
6443 && VAR_P (TREE_OPERAND (val
, 0))
6444 && lookup_attribute ("omp simd array",
6445 DECL_ATTRIBUTES (TREE_OPERAND (val
,
6448 if (lastlane
== NULL
)
6450 lastlane
= create_tmp_var (unsigned_type_node
);
6452 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6454 TREE_OPERAND (val
, 1));
6455 gimple_call_set_lhs (g
, lastlane
);
6456 gimple_seq_add_stmt (this_stmt_list
, g
);
6458 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
6459 TREE_OPERAND (val
, 0), lastlane
,
6460 NULL_TREE
, NULL_TREE
);
6461 TREE_THIS_NOTRAP (new_var
) = 1;
6464 else if (maybe_simt
)
6466 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
6467 ? DECL_VALUE_EXPR (new_var
)
6469 if (simtlast
== NULL
)
6471 simtlast
= create_tmp_var (unsigned_type_node
);
6472 gcall
*g
= gimple_build_call_internal
6473 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
6474 gimple_call_set_lhs (g
, simtlast
);
6475 gimple_seq_add_stmt (this_stmt_list
, g
);
6477 x
= build_call_expr_internal_loc
6478 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
6479 TREE_TYPE (val
), 2, val
, simtlast
);
6480 new_var
= unshare_expr (new_var
);
6481 gimplify_assign (new_var
, x
, this_stmt_list
);
6482 new_var
= unshare_expr (new_var
);
6485 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6486 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
6488 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
6489 gimple_seq_add_seq (this_stmt_list
,
6490 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
6491 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
6493 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6494 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
6496 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
6497 gimple_seq_add_seq (this_stmt_list
,
6498 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
6499 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
6503 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6504 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
6505 && is_taskloop_ctx (ctx
))
6507 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
6509 if (is_global_var (ovar
))
6513 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
6514 if (omp_is_reference (var
))
6515 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6516 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
6517 gimplify_and_add (x
, this_stmt_list
);
6520 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
6524 c
= OMP_CLAUSE_CHAIN (c
);
6525 if (c
== NULL
&& !par_clauses
)
6527 /* If this was a workshare clause, see if it had been combined
6528 with its parallel. In that case, continue looking for the
6529 clauses also on the parallel statement itself. */
6530 if (is_parallel_ctx (ctx
))
6534 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6537 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6538 OMP_CLAUSE_LASTPRIVATE
);
6544 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
6545 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
6548 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6549 (which might be a placeholder). INNER is true if this is an inner
6550 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6551 join markers. Generate the before-loop forking sequence in
6552 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6553 general form of these sequences is
6555 GOACC_REDUCTION_SETUP
6557 GOACC_REDUCTION_INIT
6559 GOACC_REDUCTION_FINI
6561 GOACC_REDUCTION_TEARDOWN. */
6564 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
6565 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
6566 gimple_seq
*join_seq
, omp_context
*ctx
)
6568 gimple_seq before_fork
= NULL
;
6569 gimple_seq after_fork
= NULL
;
6570 gimple_seq before_join
= NULL
;
6571 gimple_seq after_join
= NULL
;
6572 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
6573 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
6574 unsigned offset
= 0;
6576 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6577 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
6579 tree orig
= OMP_CLAUSE_DECL (c
);
6580 tree var
= maybe_lookup_decl (orig
, ctx
);
6581 tree ref_to_res
= NULL_TREE
;
6582 tree incoming
, outgoing
, v1
, v2
, v3
;
6583 bool is_private
= false;
6585 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
6586 if (rcode
== MINUS_EXPR
)
6588 else if (rcode
== TRUTH_ANDIF_EXPR
)
6589 rcode
= BIT_AND_EXPR
;
6590 else if (rcode
== TRUTH_ORIF_EXPR
)
6591 rcode
= BIT_IOR_EXPR
;
6592 tree op
= build_int_cst (unsigned_type_node
, rcode
);
6597 incoming
= outgoing
= var
;
6601 /* See if an outer construct also reduces this variable. */
6602 omp_context
*outer
= ctx
;
6604 while (omp_context
*probe
= outer
->outer
)
6606 enum gimple_code type
= gimple_code (probe
->stmt
);
6611 case GIMPLE_OMP_FOR
:
6612 cls
= gimple_omp_for_clauses (probe
->stmt
);
6615 case GIMPLE_OMP_TARGET
:
6616 if (gimple_omp_target_kind (probe
->stmt
)
6617 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
6620 cls
= gimple_omp_target_clauses (probe
->stmt
);
6628 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
6629 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
6630 && orig
== OMP_CLAUSE_DECL (cls
))
6632 incoming
= outgoing
= lookup_decl (orig
, probe
);
6633 goto has_outer_reduction
;
6635 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
6636 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
6637 && orig
== OMP_CLAUSE_DECL (cls
))
6645 /* This is the outermost construct with this reduction,
6646 see if there's a mapping for it. */
6647 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
6648 && maybe_lookup_field (orig
, outer
) && !is_private
)
6650 ref_to_res
= build_receiver_ref (orig
, false, outer
);
6651 if (omp_is_reference (orig
))
6652 ref_to_res
= build_simple_mem_ref (ref_to_res
);
6654 tree type
= TREE_TYPE (var
);
6655 if (POINTER_TYPE_P (type
))
6656 type
= TREE_TYPE (type
);
6659 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
6663 /* Try to look at enclosing contexts for reduction var,
6664 use original if no mapping found. */
6666 omp_context
*c
= ctx
->outer
;
6669 t
= maybe_lookup_decl (orig
, c
);
6672 incoming
= outgoing
= (t
? t
: orig
);
6675 has_outer_reduction
:;
6679 ref_to_res
= integer_zero_node
;
6681 if (omp_is_reference (orig
))
6683 tree type
= TREE_TYPE (var
);
6684 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
6688 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
6689 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
6692 v1
= create_tmp_var (type
, id
);
6693 v2
= create_tmp_var (type
, id
);
6694 v3
= create_tmp_var (type
, id
);
6696 gimplify_assign (v1
, var
, fork_seq
);
6697 gimplify_assign (v2
, var
, fork_seq
);
6698 gimplify_assign (v3
, var
, fork_seq
);
6700 var
= build_simple_mem_ref (var
);
6701 v1
= build_simple_mem_ref (v1
);
6702 v2
= build_simple_mem_ref (v2
);
6703 v3
= build_simple_mem_ref (v3
);
6704 outgoing
= build_simple_mem_ref (outgoing
);
6706 if (!TREE_CONSTANT (incoming
))
6707 incoming
= build_simple_mem_ref (incoming
);
6712 /* Determine position in reduction buffer, which may be used
6713 by target. The parser has ensured that this is not a
6714 variable-sized type. */
6715 fixed_size_mode mode
6716 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
6717 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
6718 offset
= (offset
+ align
- 1) & ~(align
- 1);
6719 tree off
= build_int_cst (sizetype
, offset
);
6720 offset
+= GET_MODE_SIZE (mode
);
6724 init_code
= build_int_cst (integer_type_node
,
6725 IFN_GOACC_REDUCTION_INIT
);
6726 fini_code
= build_int_cst (integer_type_node
,
6727 IFN_GOACC_REDUCTION_FINI
);
6728 setup_code
= build_int_cst (integer_type_node
,
6729 IFN_GOACC_REDUCTION_SETUP
);
6730 teardown_code
= build_int_cst (integer_type_node
,
6731 IFN_GOACC_REDUCTION_TEARDOWN
);
6735 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6736 TREE_TYPE (var
), 6, setup_code
,
6737 unshare_expr (ref_to_res
),
6738 incoming
, level
, op
, off
);
6740 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6741 TREE_TYPE (var
), 6, init_code
,
6742 unshare_expr (ref_to_res
),
6743 v1
, level
, op
, off
);
6745 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6746 TREE_TYPE (var
), 6, fini_code
,
6747 unshare_expr (ref_to_res
),
6748 v2
, level
, op
, off
);
6750 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6751 TREE_TYPE (var
), 6, teardown_code
,
6752 ref_to_res
, v3
, level
, op
, off
);
6754 gimplify_assign (v1
, setup_call
, &before_fork
);
6755 gimplify_assign (v2
, init_call
, &after_fork
);
6756 gimplify_assign (v3
, fini_call
, &before_join
);
6757 gimplify_assign (outgoing
, teardown_call
, &after_join
);
6760 /* Now stitch things together. */
6761 gimple_seq_add_seq (fork_seq
, before_fork
);
6763 gimple_seq_add_stmt (fork_seq
, fork
);
6764 gimple_seq_add_seq (fork_seq
, after_fork
);
6766 gimple_seq_add_seq (join_seq
, before_join
);
6768 gimple_seq_add_stmt (join_seq
, join
);
6769 gimple_seq_add_seq (join_seq
, after_join
);
6772 /* Generate code to implement the REDUCTION clauses, append it
6773 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6774 that should be emitted also inside of the critical section,
6775 in that case clear *CLIST afterwards, otherwise leave it as is
6776 and let the caller emit it itself. */
6779 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
6780 gimple_seq
*clist
, omp_context
*ctx
)
6782 gimple_seq sub_seq
= NULL
;
6787 /* OpenACC loop reductions are handled elsewhere. */
6788 if (is_gimple_omp_oacc (ctx
->stmt
))
6791 /* SIMD reductions are handled in lower_rec_input_clauses. */
6792 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6793 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6796 /* inscan reductions are handled elsewhere. */
6797 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
6800 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6801 update in that case, otherwise use a lock. */
6802 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
6803 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6804 && !OMP_CLAUSE_REDUCTION_TASK (c
))
6806 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
6807 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6809 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6819 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6821 tree var
, ref
, new_var
, orig_var
;
6822 enum tree_code code
;
6823 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6825 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
6826 || OMP_CLAUSE_REDUCTION_TASK (c
))
6829 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
6830 orig_var
= var
= OMP_CLAUSE_DECL (c
);
6831 if (TREE_CODE (var
) == MEM_REF
)
6833 var
= TREE_OPERAND (var
, 0);
6834 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
6835 var
= TREE_OPERAND (var
, 0);
6836 if (TREE_CODE (var
) == ADDR_EXPR
)
6837 var
= TREE_OPERAND (var
, 0);
6840 /* If this is a pointer or referenced based array
6841 section, the var could be private in the outer
6842 context e.g. on orphaned loop construct. Pretend this
6843 is private variable's outer reference. */
6844 ccode
= OMP_CLAUSE_PRIVATE
;
6845 if (TREE_CODE (var
) == INDIRECT_REF
)
6846 var
= TREE_OPERAND (var
, 0);
6849 if (is_variable_sized (var
))
6851 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
6852 var
= DECL_VALUE_EXPR (var
);
6853 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
6854 var
= TREE_OPERAND (var
, 0);
6855 gcc_assert (DECL_P (var
));
6858 new_var
= lookup_decl (var
, ctx
);
6859 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
6860 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6861 ref
= build_outer_var_ref (var
, ctx
, ccode
);
6862 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6864 /* reduction(-:var) sums up the partial results, so it acts
6865 identically to reduction(+:var). */
6866 if (code
== MINUS_EXPR
)
6871 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
6873 addr
= save_expr (addr
);
6874 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
6875 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
6876 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
6877 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
6878 gimplify_and_add (x
, stmt_seqp
);
6881 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6883 tree d
= OMP_CLAUSE_DECL (c
);
6884 tree type
= TREE_TYPE (d
);
6885 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
6886 tree i
= create_tmp_var (TREE_TYPE (v
));
6887 tree ptype
= build_pointer_type (TREE_TYPE (type
));
6888 tree bias
= TREE_OPERAND (d
, 1);
6889 d
= TREE_OPERAND (d
, 0);
6890 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
6892 tree b
= TREE_OPERAND (d
, 1);
6893 b
= maybe_lookup_decl (b
, ctx
);
6896 b
= TREE_OPERAND (d
, 1);
6897 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
6899 if (integer_zerop (bias
))
6903 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
6904 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
6905 TREE_TYPE (b
), b
, bias
);
6907 d
= TREE_OPERAND (d
, 0);
6909 /* For ref build_outer_var_ref already performs this, so
6910 only new_var needs a dereference. */
6911 if (TREE_CODE (d
) == INDIRECT_REF
)
6913 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6914 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
6916 else if (TREE_CODE (d
) == ADDR_EXPR
)
6918 if (orig_var
== var
)
6920 new_var
= build_fold_addr_expr (new_var
);
6921 ref
= build_fold_addr_expr (ref
);
6926 gcc_assert (orig_var
== var
);
6927 if (omp_is_reference (var
))
6928 ref
= build_fold_addr_expr (ref
);
6932 tree t
= maybe_lookup_decl (v
, ctx
);
6936 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
6937 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
6939 if (!integer_zerop (bias
))
6941 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
6942 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
6943 TREE_TYPE (new_var
), new_var
,
6944 unshare_expr (bias
));
6945 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
6946 TREE_TYPE (ref
), ref
, bias
);
6948 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
6949 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
6950 tree m
= create_tmp_var (ptype
);
6951 gimplify_assign (m
, new_var
, stmt_seqp
);
6953 m
= create_tmp_var (ptype
);
6954 gimplify_assign (m
, ref
, stmt_seqp
);
6956 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
6957 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6958 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6959 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
6960 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6961 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
6962 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6964 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6965 tree decl_placeholder
6966 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
6967 SET_DECL_VALUE_EXPR (placeholder
, out
);
6968 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6969 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
6970 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
6971 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
6972 gimple_seq_add_seq (&sub_seq
,
6973 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
6974 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6975 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
6976 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
6980 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
6981 out
= unshare_expr (out
);
6982 gimplify_assign (out
, x
, &sub_seq
);
6984 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
6985 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
6986 gimple_seq_add_stmt (&sub_seq
, g
);
6987 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
6988 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
6989 gimple_seq_add_stmt (&sub_seq
, g
);
6990 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
6991 build_int_cst (TREE_TYPE (i
), 1));
6992 gimple_seq_add_stmt (&sub_seq
, g
);
6993 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
6994 gimple_seq_add_stmt (&sub_seq
, g
);
6995 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
6997 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6999 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7001 if (omp_is_reference (var
)
7002 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7004 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7005 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7006 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7007 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7008 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7009 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7010 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7014 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
7015 ref
= build_outer_var_ref (var
, ctx
);
7016 gimplify_assign (ref
, x
, &sub_seq
);
7020 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7022 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7024 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7028 gimple_seq_add_seq (stmt_seqp
, *clist
);
7032 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7034 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7038 /* Generate code to implement the COPYPRIVATE clauses. */
7041 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
7046 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7048 tree var
, new_var
, ref
, x
;
7050 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7052 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
7055 var
= OMP_CLAUSE_DECL (c
);
7056 by_ref
= use_pointer_for_field (var
, NULL
);
7058 ref
= build_sender_ref (var
, ctx
);
7059 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
7062 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
7063 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
7065 gimplify_assign (ref
, x
, slist
);
7067 ref
= build_receiver_ref (var
, false, ctx
);
7070 ref
= fold_convert_loc (clause_loc
,
7071 build_pointer_type (TREE_TYPE (new_var
)),
7073 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
7075 if (omp_is_reference (var
))
7077 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
7078 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
7079 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7081 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
7082 gimplify_and_add (x
, rlist
);
7087 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7088 and REDUCTION from the sender (aka parent) side. */
7091 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
7095 int ignored_looptemp
= 0;
7096 bool is_taskloop
= false;
7098 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7099 by GOMP_taskloop. */
7100 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
7102 ignored_looptemp
= 2;
7106 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7108 tree val
, ref
, x
, var
;
7109 bool by_ref
, do_in
= false, do_out
= false;
7110 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7112 switch (OMP_CLAUSE_CODE (c
))
7114 case OMP_CLAUSE_PRIVATE
:
7115 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7118 case OMP_CLAUSE_FIRSTPRIVATE
:
7119 case OMP_CLAUSE_COPYIN
:
7120 case OMP_CLAUSE_LASTPRIVATE
:
7121 case OMP_CLAUSE_IN_REDUCTION
:
7122 case OMP_CLAUSE__REDUCTEMP_
:
7124 case OMP_CLAUSE_REDUCTION
:
7125 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
7128 case OMP_CLAUSE_SHARED
:
7129 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7132 case OMP_CLAUSE__LOOPTEMP_
:
7133 if (ignored_looptemp
)
7143 val
= OMP_CLAUSE_DECL (c
);
7144 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7145 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
7146 && TREE_CODE (val
) == MEM_REF
)
7148 val
= TREE_OPERAND (val
, 0);
7149 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
7150 val
= TREE_OPERAND (val
, 0);
7151 if (TREE_CODE (val
) == INDIRECT_REF
7152 || TREE_CODE (val
) == ADDR_EXPR
)
7153 val
= TREE_OPERAND (val
, 0);
7154 if (is_variable_sized (val
))
7158 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7159 outer taskloop region. */
7160 omp_context
*ctx_for_o
= ctx
;
7162 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
7163 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7164 ctx_for_o
= ctx
->outer
;
7166 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
7168 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
7169 && is_global_var (var
)
7170 && (val
== OMP_CLAUSE_DECL (c
)
7171 || !is_task_ctx (ctx
)
7172 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
7173 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
7174 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
7175 != POINTER_TYPE
)))))
7178 t
= omp_member_access_dummy_var (var
);
7181 var
= DECL_VALUE_EXPR (var
);
7182 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
7184 var
= unshare_and_remap (var
, t
, o
);
7186 var
= unshare_expr (var
);
7189 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
7191 /* Handle taskloop firstprivate/lastprivate, where the
7192 lastprivate on GIMPLE_OMP_TASK is represented as
7193 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7194 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
7195 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
7196 if (use_pointer_for_field (val
, ctx
))
7197 var
= build_fold_addr_expr (var
);
7198 gimplify_assign (x
, var
, ilist
);
7199 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
7203 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7204 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
7205 || val
== OMP_CLAUSE_DECL (c
))
7206 && is_variable_sized (val
))
7208 by_ref
= use_pointer_for_field (val
, NULL
);
7210 switch (OMP_CLAUSE_CODE (c
))
7212 case OMP_CLAUSE_FIRSTPRIVATE
:
7213 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
7215 && is_task_ctx (ctx
))
7216 TREE_NO_WARNING (var
) = 1;
7220 case OMP_CLAUSE_PRIVATE
:
7221 case OMP_CLAUSE_COPYIN
:
7222 case OMP_CLAUSE__LOOPTEMP_
:
7223 case OMP_CLAUSE__REDUCTEMP_
:
7227 case OMP_CLAUSE_LASTPRIVATE
:
7228 if (by_ref
|| omp_is_reference (val
))
7230 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
7237 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
7242 case OMP_CLAUSE_REDUCTION
:
7243 case OMP_CLAUSE_IN_REDUCTION
:
7245 if (val
== OMP_CLAUSE_DECL (c
))
7247 if (is_task_ctx (ctx
))
7248 by_ref
= use_pointer_for_field (val
, ctx
);
7250 do_out
= !(by_ref
|| omp_is_reference (val
));
7253 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
7262 ref
= build_sender_ref (val
, ctx
);
7263 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
7264 gimplify_assign (ref
, x
, ilist
);
7265 if (is_task_ctx (ctx
))
7266 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
7271 ref
= build_sender_ref (val
, ctx
);
7272 gimplify_assign (var
, ref
, olist
);
7277 /* Generate code to implement SHARED from the sender (aka parent)
7278 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7279 list things that got automatically shared. */
7282 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
7284 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
7286 if (ctx
->record_type
== NULL
)
7289 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
7290 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
7292 ovar
= DECL_ABSTRACT_ORIGIN (f
);
7293 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
7296 nvar
= maybe_lookup_decl (ovar
, ctx
);
7297 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
7300 /* If CTX is a nested parallel directive. Find the immediately
7301 enclosing parallel or workshare construct that contains a
7302 mapping for OVAR. */
7303 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7305 t
= omp_member_access_dummy_var (var
);
7308 var
= DECL_VALUE_EXPR (var
);
7309 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
7311 var
= unshare_and_remap (var
, t
, o
);
7313 var
= unshare_expr (var
);
7316 if (use_pointer_for_field (ovar
, ctx
))
7318 x
= build_sender_ref (ovar
, ctx
);
7319 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
7320 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
7322 gcc_assert (is_parallel_ctx (ctx
)
7323 && DECL_ARTIFICIAL (ovar
));
7324 /* _condtemp_ clause. */
7325 var
= build_constructor (TREE_TYPE (x
), NULL
);
7328 var
= build_fold_addr_expr (var
);
7329 gimplify_assign (x
, var
, ilist
);
7333 x
= build_sender_ref (ovar
, ctx
);
7334 gimplify_assign (x
, var
, ilist
);
7336 if (!TREE_READONLY (var
)
7337 /* We don't need to receive a new reference to a result
7338 or parm decl. In fact we may not store to it as we will
7339 invalidate any pending RSO and generate wrong gimple
7341 && !((TREE_CODE (var
) == RESULT_DECL
7342 || TREE_CODE (var
) == PARM_DECL
)
7343 && DECL_BY_REFERENCE (var
)))
7345 x
= build_sender_ref (ovar
, ctx
);
7346 gimplify_assign (var
, x
, olist
);
7352 /* Emit an OpenACC head marker call, encapulating the partitioning and
7353 other information that must be processed by the target compiler.
7354 Return the maximum number of dimensions the associated loop might
7355 be partitioned over. */
7358 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
7359 gimple_seq
*seq
, omp_context
*ctx
)
7361 unsigned levels
= 0;
7363 tree gang_static
= NULL_TREE
;
7364 auto_vec
<tree
, 5> args
;
7366 args
.quick_push (build_int_cst
7367 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
7368 args
.quick_push (ddvar
);
7369 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7371 switch (OMP_CLAUSE_CODE (c
))
7373 case OMP_CLAUSE_GANG
:
7374 tag
|= OLF_DIM_GANG
;
7375 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
7376 /* static:* is represented by -1, and we can ignore it, as
7377 scheduling is always static. */
7378 if (gang_static
&& integer_minus_onep (gang_static
))
7379 gang_static
= NULL_TREE
;
7383 case OMP_CLAUSE_WORKER
:
7384 tag
|= OLF_DIM_WORKER
;
7388 case OMP_CLAUSE_VECTOR
:
7389 tag
|= OLF_DIM_VECTOR
;
7393 case OMP_CLAUSE_SEQ
:
7397 case OMP_CLAUSE_AUTO
:
7401 case OMP_CLAUSE_INDEPENDENT
:
7402 tag
|= OLF_INDEPENDENT
;
7405 case OMP_CLAUSE_TILE
:
7416 if (DECL_P (gang_static
))
7417 gang_static
= build_outer_var_ref (gang_static
, ctx
);
7418 tag
|= OLF_GANG_STATIC
;
7421 /* In a parallel region, loops are implicitly INDEPENDENT. */
7422 omp_context
*tgt
= enclosing_target_ctx (ctx
);
7423 if (!tgt
|| is_oacc_parallel (tgt
))
7424 tag
|= OLF_INDEPENDENT
;
7427 /* Tiling could use all 3 levels. */
7431 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7432 Ensure at least one level, or 2 for possible auto
7434 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
7435 << OLF_DIM_BASE
) | OLF_SEQ
));
7437 if (levels
< 1u + maybe_auto
)
7438 levels
= 1u + maybe_auto
;
7441 args
.quick_push (build_int_cst (integer_type_node
, levels
));
7442 args
.quick_push (build_int_cst (integer_type_node
, tag
));
7444 args
.quick_push (gang_static
);
7446 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
7447 gimple_set_location (call
, loc
);
7448 gimple_set_lhs (call
, ddvar
);
7449 gimple_seq_add_stmt (seq
, call
);
7454 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7455 partitioning level of the enclosed region. */
7458 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
7459 tree tofollow
, gimple_seq
*seq
)
7461 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
7462 : IFN_UNIQUE_OACC_TAIL_MARK
);
7463 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
7464 int nargs
= 2 + (tofollow
!= NULL_TREE
);
7465 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
7466 marker
, ddvar
, tofollow
);
7467 gimple_set_location (call
, loc
);
7468 gimple_set_lhs (call
, ddvar
);
7469 gimple_seq_add_stmt (seq
, call
);
7472 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7473 the loop clauses, from which we extract reductions. Initialize
7477 lower_oacc_head_tail (location_t loc
, tree clauses
,
7478 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
7481 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
7482 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
7484 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
7485 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
7486 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
7489 for (unsigned done
= 1; count
; count
--, done
++)
7491 gimple_seq fork_seq
= NULL
;
7492 gimple_seq join_seq
= NULL
;
7494 tree place
= build_int_cst (integer_type_node
, -1);
7495 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7496 fork_kind
, ddvar
, place
);
7497 gimple_set_location (fork
, loc
);
7498 gimple_set_lhs (fork
, ddvar
);
7500 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7501 join_kind
, ddvar
, place
);
7502 gimple_set_location (join
, loc
);
7503 gimple_set_lhs (join
, ddvar
);
7505 /* Mark the beginning of this level sequence. */
7507 lower_oacc_loop_marker (loc
, ddvar
, true,
7508 build_int_cst (integer_type_node
, count
),
7510 lower_oacc_loop_marker (loc
, ddvar
, false,
7511 build_int_cst (integer_type_node
, done
),
7514 lower_oacc_reductions (loc
, clauses
, place
, inner
,
7515 fork
, join
, &fork_seq
, &join_seq
, ctx
);
7517 /* Append this level to head. */
7518 gimple_seq_add_seq (head
, fork_seq
);
7519 /* Prepend it to tail. */
7520 gimple_seq_add_seq (&join_seq
, *tail
);
7526 /* Mark the end of the sequence. */
7527 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
7528 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
7531 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7532 catch handler and return it. This prevents programs from violating the
7533 structured block semantics with throws. */
7536 maybe_catch_exception (gimple_seq body
)
7541 if (!flag_exceptions
)
7544 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
7545 decl
= lang_hooks
.eh_protect_cleanup_actions ();
7547 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
7549 g
= gimple_build_eh_must_not_throw (decl
);
7550 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
7553 return gimple_seq_alloc_with_stmt (g
);
7557 /* Routines to lower OMP directives into OMP-GIMPLE. */
7559 /* If ctx is a worksharing context inside of a cancellable parallel
7560 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7561 and conditional branch to parallel's cancel_label to handle
7562 cancellation in the implicit barrier. */
7565 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
7568 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
7569 if (gimple_omp_return_nowait_p (omp_return
))
7571 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
7572 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
7573 && outer
->cancellable
)
7575 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
7576 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
7577 tree lhs
= create_tmp_var (c_bool_type
);
7578 gimple_omp_return_set_lhs (omp_return
, lhs
);
7579 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
7580 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
7581 fold_convert (c_bool_type
,
7582 boolean_false_node
),
7583 outer
->cancel_label
, fallthru_label
);
7584 gimple_seq_add_stmt (body
, g
);
7585 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
7587 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
7591 /* Find the first task_reduction or reduction clause or return NULL
7592 if there are none. */
7595 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
7596 enum omp_clause_code ccode
)
7600 clauses
= omp_find_clause (clauses
, ccode
);
7601 if (clauses
== NULL_TREE
)
7603 if (ccode
!= OMP_CLAUSE_REDUCTION
7604 || code
== OMP_TASKLOOP
7605 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
7607 clauses
= OMP_CLAUSE_CHAIN (clauses
);
7611 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
7612 gimple_seq
*, gimple_seq
*);
7614 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7615 CTX is the enclosing OMP context for the current statement. */
7618 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7620 tree block
, control
;
7621 gimple_stmt_iterator tgsi
;
7622 gomp_sections
*stmt
;
7624 gbind
*new_stmt
, *bind
;
7625 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
7627 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
7629 push_gimplify_context ();
7635 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
7636 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
7637 tree rtmp
= NULL_TREE
;
7640 tree type
= build_pointer_type (pointer_sized_int_node
);
7641 tree temp
= create_tmp_var (type
);
7642 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
7643 OMP_CLAUSE_DECL (c
) = temp
;
7644 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
7645 gimple_omp_sections_set_clauses (stmt
, c
);
7646 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
7647 gimple_omp_sections_clauses (stmt
),
7648 &ilist
, &tred_dlist
);
7650 rtmp
= make_ssa_name (type
);
7651 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
7654 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
7655 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
7657 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
7658 &ilist
, &dlist
, ctx
, NULL
);
7660 control
= create_tmp_var (unsigned_type_node
, ".section");
7661 gimple_omp_sections_set_control (stmt
, control
);
7663 new_body
= gimple_omp_body (stmt
);
7664 gimple_omp_set_body (stmt
, NULL
);
7665 tgsi
= gsi_start (new_body
);
7666 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
7671 sec_start
= gsi_stmt (tgsi
);
7672 sctx
= maybe_lookup_ctx (sec_start
);
7675 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
7676 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
7677 GSI_CONTINUE_LINKING
);
7678 gimple_omp_set_body (sec_start
, NULL
);
7680 if (gsi_one_before_end_p (tgsi
))
7682 gimple_seq l
= NULL
;
7683 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
7684 &ilist
, &l
, &clist
, ctx
);
7685 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
7686 gimple_omp_section_set_last (sec_start
);
7689 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
7690 GSI_CONTINUE_LINKING
);
7693 block
= make_node (BLOCK
);
7694 bind
= gimple_build_bind (NULL
, new_body
, block
);
7697 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
7701 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
7702 gcall
*g
= gimple_build_call (fndecl
, 0);
7703 gimple_seq_add_stmt (&olist
, g
);
7704 gimple_seq_add_seq (&olist
, clist
);
7705 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
7706 g
= gimple_build_call (fndecl
, 0);
7707 gimple_seq_add_stmt (&olist
, g
);
7710 block
= make_node (BLOCK
);
7711 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
7712 gsi_replace (gsi_p
, new_stmt
, true);
7714 pop_gimplify_context (new_stmt
);
7715 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
7716 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
7717 if (BLOCK_VARS (block
))
7718 TREE_USED (block
) = 1;
7721 gimple_seq_add_seq (&new_body
, ilist
);
7722 gimple_seq_add_stmt (&new_body
, stmt
);
7723 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
7724 gimple_seq_add_stmt (&new_body
, bind
);
7726 t
= gimple_build_omp_continue (control
, control
);
7727 gimple_seq_add_stmt (&new_body
, t
);
7729 gimple_seq_add_seq (&new_body
, olist
);
7730 if (ctx
->cancellable
)
7731 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7732 gimple_seq_add_seq (&new_body
, dlist
);
7734 new_body
= maybe_catch_exception (new_body
);
7736 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
7737 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7738 t
= gimple_build_omp_return (nowait
);
7739 gimple_seq_add_stmt (&new_body
, t
);
7740 gimple_seq_add_seq (&new_body
, tred_dlist
);
7741 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
7744 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
7746 gimple_bind_set_body (new_stmt
, new_body
);
7750 /* A subroutine of lower_omp_single. Expand the simple form of
7751 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7753 if (GOMP_single_start ())
7755 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7757 FIXME. It may be better to delay expanding the logic of this until
7758 pass_expand_omp. The expanded logic may make the job more difficult
7759 to a synchronization analysis pass. */
7762 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
7764 location_t loc
= gimple_location (single_stmt
);
7765 tree tlabel
= create_artificial_label (loc
);
7766 tree flabel
= create_artificial_label (loc
);
7767 gimple
*call
, *cond
;
7770 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
7771 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
7772 call
= gimple_build_call (decl
, 0);
7773 gimple_call_set_lhs (call
, lhs
);
7774 gimple_seq_add_stmt (pre_p
, call
);
7776 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
7777 fold_convert_loc (loc
, TREE_TYPE (lhs
),
7780 gimple_seq_add_stmt (pre_p
, cond
);
7781 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
7782 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7783 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
7787 /* A subroutine of lower_omp_single. Expand the simple form of
7788 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7790 #pragma omp single copyprivate (a, b, c)
7792 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7795 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7801 GOMP_single_copy_end (©out);
7812 FIXME. It may be better to delay expanding the logic of this until
7813 pass_expand_omp. The expanded logic may make the job more difficult
7814 to a synchronization analysis pass. */
7817 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
7820 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
7821 gimple_seq copyin_seq
;
7822 location_t loc
= gimple_location (single_stmt
);
7824 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
7826 ptr_type
= build_pointer_type (ctx
->record_type
);
7827 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
7829 l0
= create_artificial_label (loc
);
7830 l1
= create_artificial_label (loc
);
7831 l2
= create_artificial_label (loc
);
7833 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
7834 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
7835 t
= fold_convert_loc (loc
, ptr_type
, t
);
7836 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
7838 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
7839 build_int_cst (ptr_type
, 0));
7840 t
= build3 (COND_EXPR
, void_type_node
, t
,
7841 build_and_jump (&l0
), build_and_jump (&l1
));
7842 gimplify_and_add (t
, pre_p
);
7844 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
7846 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7849 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
7852 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
7853 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
7854 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
7855 gimplify_and_add (t
, pre_p
);
7857 t
= build_and_jump (&l2
);
7858 gimplify_and_add (t
, pre_p
);
7860 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
7862 gimple_seq_add_seq (pre_p
, copyin_seq
);
7864 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
7868 /* Expand code for an OpenMP single directive. */
7871 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7874 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
7876 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
7878 push_gimplify_context ();
7880 block
= make_node (BLOCK
);
7881 bind
= gimple_build_bind (NULL
, NULL
, block
);
7882 gsi_replace (gsi_p
, bind
, true);
7885 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
7886 &bind_body
, &dlist
, ctx
, NULL
);
7887 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
7889 gimple_seq_add_stmt (&bind_body
, single_stmt
);
7891 if (ctx
->record_type
)
7892 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
7894 lower_omp_single_simple (single_stmt
, &bind_body
);
7896 gimple_omp_set_body (single_stmt
, NULL
);
7898 gimple_seq_add_seq (&bind_body
, dlist
);
7900 bind_body
= maybe_catch_exception (bind_body
);
7902 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
7903 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7904 gimple
*g
= gimple_build_omp_return (nowait
);
7905 gimple_seq_add_stmt (&bind_body_tail
, g
);
7906 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
7907 if (ctx
->record_type
)
7909 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
7910 tree clobber
= build_clobber (ctx
->record_type
);
7911 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
7912 clobber
), GSI_SAME_STMT
);
7914 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
7915 gimple_bind_set_body (bind
, bind_body
);
7917 pop_gimplify_context (bind
);
7919 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7920 BLOCK_VARS (block
) = ctx
->block_vars
;
7921 if (BLOCK_VARS (block
))
7922 TREE_USED (block
) = 1;
7926 /* Expand code for an OpenMP master directive. */
7929 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7931 tree block
, lab
= NULL
, x
, bfn_decl
;
7932 gimple
*stmt
= gsi_stmt (*gsi_p
);
7934 location_t loc
= gimple_location (stmt
);
7937 push_gimplify_context ();
7939 block
= make_node (BLOCK
);
7940 bind
= gimple_build_bind (NULL
, NULL
, block
);
7941 gsi_replace (gsi_p
, bind
, true);
7942 gimple_bind_add_stmt (bind
, stmt
);
7944 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
7945 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
7946 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
7947 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
7949 gimplify_and_add (x
, &tseq
);
7950 gimple_bind_add_seq (bind
, tseq
);
7952 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
7953 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
7954 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
7955 gimple_omp_set_body (stmt
, NULL
);
7957 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
7959 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
7961 pop_gimplify_context (bind
);
7963 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7964 BLOCK_VARS (block
) = ctx
->block_vars
;
7967 /* Helper function for lower_omp_task_reductions. For a specific PASS
7968 find out the current clause it should be processed, or return false
7969 if all have been processed already. */
7972 omp_task_reduction_iterate (int pass
, enum tree_code code
,
7973 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
7974 tree
*type
, tree
*next
)
7976 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
7978 if (ccode
== OMP_CLAUSE_REDUCTION
7979 && code
!= OMP_TASKLOOP
7980 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
7982 *decl
= OMP_CLAUSE_DECL (*c
);
7983 *type
= TREE_TYPE (*decl
);
7984 if (TREE_CODE (*decl
) == MEM_REF
)
7991 if (omp_is_reference (*decl
))
7992 *type
= TREE_TYPE (*type
);
7993 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
7996 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
8005 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8006 OMP_TASKGROUP only with task modifier). Register mapping of those in
8007 START sequence and reducing them and unregister them in the END sequence. */
8010 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
8011 gimple_seq
*start
, gimple_seq
*end
)
8013 enum omp_clause_code ccode
8014 = (code
== OMP_TASKGROUP
8015 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
8016 tree cancellable
= NULL_TREE
;
8017 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
8018 if (clauses
== NULL_TREE
)
8020 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8022 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8023 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8024 && outer
->cancellable
)
8026 cancellable
= error_mark_node
;
8029 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
8032 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8033 tree
*last
= &TYPE_FIELDS (record_type
);
8037 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8039 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8042 DECL_CHAIN (field
) = ifield
;
8043 last
= &DECL_CHAIN (ifield
);
8044 DECL_CONTEXT (field
) = record_type
;
8045 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8046 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8047 DECL_CONTEXT (ifield
) = record_type
;
8048 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
8049 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
8051 for (int pass
= 0; pass
< 2; pass
++)
8053 tree decl
, type
, next
;
8054 for (tree c
= clauses
;
8055 omp_task_reduction_iterate (pass
, code
, ccode
,
8056 &c
, &decl
, &type
, &next
); c
= next
)
8059 tree new_type
= type
;
8061 new_type
= remap_type (type
, &ctx
->outer
->cb
);
8063 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
8064 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
8066 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
8068 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
8069 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
8070 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
8073 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
8074 DECL_CONTEXT (field
) = record_type
;
8075 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8076 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8078 last
= &DECL_CHAIN (field
);
8080 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
8082 DECL_CONTEXT (bfield
) = record_type
;
8083 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
8084 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
8086 last
= &DECL_CHAIN (bfield
);
8090 layout_type (record_type
);
8092 /* Build up an array which registers with the runtime all the reductions
8093 and deregisters them at the end. Format documented in libgomp/task.c. */
8094 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
8095 tree avar
= create_tmp_var_raw (atype
);
8096 gimple_add_tmp_var (avar
);
8097 TREE_ADDRESSABLE (avar
) = 1;
8098 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
8099 NULL_TREE
, NULL_TREE
);
8100 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
8101 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8102 gimple_seq seq
= NULL
;
8103 tree sz
= fold_convert (pointer_sized_int_node
,
8104 TYPE_SIZE_UNIT (record_type
));
8106 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
8107 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
8108 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
8109 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
8110 ctx
->task_reductions
.create (1 + cnt
);
8111 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
8112 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
8114 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
8115 gimple_seq_add_seq (start
, seq
);
8116 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
8117 NULL_TREE
, NULL_TREE
);
8118 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
8119 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8120 NULL_TREE
, NULL_TREE
);
8121 t
= build_int_cst (pointer_sized_int_node
,
8122 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
8123 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8124 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
8125 NULL_TREE
, NULL_TREE
);
8126 t
= build_int_cst (pointer_sized_int_node
, -1);
8127 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8128 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
8129 NULL_TREE
, NULL_TREE
);
8130 t
= build_int_cst (pointer_sized_int_node
, 0);
8131 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8133 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8134 and for each task reduction checks a bool right after the private variable
8135 within that thread's chunk; if the bool is clear, it hasn't been
8136 initialized and thus isn't going to be reduced nor destructed, otherwise
8137 reduce and destruct it. */
8138 tree idx
= create_tmp_var (size_type_node
);
8139 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
8140 tree num_thr_sz
= create_tmp_var (size_type_node
);
8141 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
8142 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
8143 tree lab3
= NULL_TREE
;
8145 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8147 /* For worksharing constructs, only perform it in the master thread,
8148 with the exception of cancelled implicit barriers - then only handle
8149 the current thread. */
8150 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8151 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8152 tree thr_num
= create_tmp_var (integer_type_node
);
8153 g
= gimple_build_call (t
, 0);
8154 gimple_call_set_lhs (g
, thr_num
);
8155 gimple_seq_add_stmt (end
, g
);
8159 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8160 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8161 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8162 if (code
== OMP_FOR
)
8163 c
= gimple_omp_for_clauses (ctx
->stmt
);
8164 else /* if (code == OMP_SECTIONS) */
8165 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8166 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
8168 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
8170 gimple_seq_add_stmt (end
, g
);
8171 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8172 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
8173 gimple_seq_add_stmt (end
, g
);
8174 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
8175 build_one_cst (TREE_TYPE (idx
)));
8176 gimple_seq_add_stmt (end
, g
);
8177 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
8178 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8180 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
8181 gimple_seq_add_stmt (end
, g
);
8182 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8184 if (code
!= OMP_PARALLEL
)
8186 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
8187 tree num_thr
= create_tmp_var (integer_type_node
);
8188 g
= gimple_build_call (t
, 0);
8189 gimple_call_set_lhs (g
, num_thr
);
8190 gimple_seq_add_stmt (end
, g
);
8191 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
8192 gimple_seq_add_stmt (end
, g
);
8194 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8198 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
8199 OMP_CLAUSE__REDUCTEMP_
);
8200 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
8201 t
= fold_convert (size_type_node
, t
);
8202 gimplify_assign (num_thr_sz
, t
, end
);
8204 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8205 NULL_TREE
, NULL_TREE
);
8206 tree data
= create_tmp_var (pointer_sized_int_node
);
8207 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
8208 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
8210 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
8211 ptr
= create_tmp_var (build_pointer_type (record_type
));
8213 ptr
= create_tmp_var (ptr_type_node
);
8214 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
8216 tree field
= TYPE_FIELDS (record_type
);
8219 field
= DECL_CHAIN (DECL_CHAIN (field
));
8220 for (int pass
= 0; pass
< 2; pass
++)
8222 tree decl
, type
, next
;
8223 for (tree c
= clauses
;
8224 omp_task_reduction_iterate (pass
, code
, ccode
,
8225 &c
, &decl
, &type
, &next
); c
= next
)
8227 tree var
= decl
, ref
;
8228 if (TREE_CODE (decl
) == MEM_REF
)
8230 var
= TREE_OPERAND (var
, 0);
8231 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
8232 var
= TREE_OPERAND (var
, 0);
8234 if (TREE_CODE (var
) == ADDR_EXPR
)
8235 var
= TREE_OPERAND (var
, 0);
8236 else if (TREE_CODE (var
) == INDIRECT_REF
)
8237 var
= TREE_OPERAND (var
, 0);
8238 tree orig_var
= var
;
8239 if (is_variable_sized (var
))
8241 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
8242 var
= DECL_VALUE_EXPR (var
);
8243 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
8244 var
= TREE_OPERAND (var
, 0);
8245 gcc_assert (DECL_P (var
));
8247 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8248 if (orig_var
!= var
)
8249 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
8250 else if (TREE_CODE (v
) == ADDR_EXPR
)
8251 t
= build_fold_addr_expr (t
);
8252 else if (TREE_CODE (v
) == INDIRECT_REF
)
8253 t
= build_fold_indirect_ref (t
);
8254 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
8256 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
8257 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
8258 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
8260 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
8261 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
8262 fold_convert (size_type_node
,
8263 TREE_OPERAND (decl
, 1)));
8267 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8268 if (!omp_is_reference (decl
))
8269 t
= build_fold_addr_expr (t
);
8271 t
= fold_convert (pointer_sized_int_node
, t
);
8273 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8274 gimple_seq_add_seq (start
, seq
);
8275 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8276 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8277 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8278 t
= unshare_expr (byte_position (field
));
8279 t
= fold_convert (pointer_sized_int_node
, t
);
8280 ctx
->task_reduction_map
->put (c
, cnt
);
8281 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
8284 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8285 gimple_seq_add_seq (start
, seq
);
8286 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8287 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
8288 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8290 tree bfield
= DECL_CHAIN (field
);
8292 if (code
== OMP_PARALLEL
|| code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8293 /* In parallel or worksharing all threads unconditionally
8294 initialize all their task reduction private variables. */
8295 cond
= boolean_true_node
;
8296 else if (TREE_TYPE (ptr
) == ptr_type_node
)
8298 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8299 unshare_expr (byte_position (bfield
)));
8301 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
8302 gimple_seq_add_seq (end
, seq
);
8303 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
8304 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
8305 build_int_cst (pbool
, 0));
8308 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
8309 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
8310 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8311 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8312 tree condv
= create_tmp_var (boolean_type_node
);
8313 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
8314 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
8316 gimple_seq_add_stmt (end
, g
);
8317 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8318 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
8320 /* If this reduction doesn't need destruction and parallel
8321 has been cancelled, there is nothing to do for this
8322 reduction, so jump around the merge operation. */
8323 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8324 g
= gimple_build_cond (NE_EXPR
, cancellable
,
8325 build_zero_cst (TREE_TYPE (cancellable
)),
8327 gimple_seq_add_stmt (end
, g
);
8328 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8332 if (TREE_TYPE (ptr
) == ptr_type_node
)
8334 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8335 unshare_expr (byte_position (field
)));
8337 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
8338 gimple_seq_add_seq (end
, seq
);
8339 tree pbool
= build_pointer_type (TREE_TYPE (field
));
8340 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
8341 build_int_cst (pbool
, 0));
8344 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
8345 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
8347 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
8348 if (TREE_CODE (decl
) != MEM_REF
&& omp_is_reference (decl
))
8349 ref
= build_simple_mem_ref (ref
);
8350 /* reduction(-:var) sums up the partial results, so it acts
8351 identically to reduction(+:var). */
8352 if (rcode
== MINUS_EXPR
)
8354 if (TREE_CODE (decl
) == MEM_REF
)
8356 tree type
= TREE_TYPE (new_var
);
8357 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8358 tree i
= create_tmp_var (TREE_TYPE (v
));
8359 tree ptype
= build_pointer_type (TREE_TYPE (type
));
8362 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
8363 tree vv
= create_tmp_var (TREE_TYPE (v
));
8364 gimplify_assign (vv
, v
, start
);
8367 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8368 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8369 new_var
= build_fold_addr_expr (new_var
);
8370 new_var
= fold_convert (ptype
, new_var
);
8371 ref
= fold_convert (ptype
, ref
);
8372 tree m
= create_tmp_var (ptype
);
8373 gimplify_assign (m
, new_var
, end
);
8375 m
= create_tmp_var (ptype
);
8376 gimplify_assign (m
, ref
, end
);
8378 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
8379 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
8380 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
8381 gimple_seq_add_stmt (end
, gimple_build_label (body
));
8382 tree priv
= build_simple_mem_ref (new_var
);
8383 tree out
= build_simple_mem_ref (ref
);
8384 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8386 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8387 tree decl_placeholder
8388 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
8389 tree lab6
= NULL_TREE
;
8392 /* If this reduction needs destruction and parallel
8393 has been cancelled, jump around the merge operation
8394 to the destruction. */
8395 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8396 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8397 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8398 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8400 gimple_seq_add_stmt (end
, g
);
8401 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8403 SET_DECL_VALUE_EXPR (placeholder
, out
);
8404 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8405 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
8406 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
8407 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8408 gimple_seq_add_seq (end
,
8409 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8410 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8411 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8413 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8414 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
8417 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8418 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
8421 gimple_seq tseq
= NULL
;
8422 gimplify_stmt (&x
, &tseq
);
8423 gimple_seq_add_seq (end
, tseq
);
8428 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
8429 out
= unshare_expr (out
);
8430 gimplify_assign (out
, x
, end
);
8433 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
8434 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8435 gimple_seq_add_stmt (end
, g
);
8436 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
8437 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8438 gimple_seq_add_stmt (end
, g
);
8439 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
8440 build_int_cst (TREE_TYPE (i
), 1));
8441 gimple_seq_add_stmt (end
, g
);
8442 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
8443 gimple_seq_add_stmt (end
, g
);
8444 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
8446 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8448 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8449 tree oldv
= NULL_TREE
;
8450 tree lab6
= NULL_TREE
;
8453 /* If this reduction needs destruction and parallel
8454 has been cancelled, jump around the merge operation
8455 to the destruction. */
8456 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8457 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8458 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8459 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8461 gimple_seq_add_stmt (end
, g
);
8462 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8464 if (omp_is_reference (decl
)
8465 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
8467 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8468 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8469 tree refv
= create_tmp_var (TREE_TYPE (ref
));
8470 gimplify_assign (refv
, ref
, end
);
8471 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
8472 SET_DECL_VALUE_EXPR (placeholder
, ref
);
8473 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8474 tree d
= maybe_lookup_decl (decl
, ctx
);
8476 if (DECL_HAS_VALUE_EXPR_P (d
))
8477 oldv
= DECL_VALUE_EXPR (d
);
8478 if (omp_is_reference (var
))
8480 tree v
= fold_convert (TREE_TYPE (d
),
8481 build_fold_addr_expr (new_var
));
8482 SET_DECL_VALUE_EXPR (d
, v
);
8485 SET_DECL_VALUE_EXPR (d
, new_var
);
8486 DECL_HAS_VALUE_EXPR_P (d
) = 1;
8487 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8489 SET_DECL_VALUE_EXPR (d
, oldv
);
8492 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
8493 DECL_HAS_VALUE_EXPR_P (d
) = 0;
8495 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8496 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8497 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8498 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8500 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8501 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
8504 gimple_seq tseq
= NULL
;
8505 gimplify_stmt (&x
, &tseq
);
8506 gimple_seq_add_seq (end
, tseq
);
8511 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
8512 ref
= unshare_expr (ref
);
8513 gimplify_assign (ref
, x
, end
);
8515 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8517 field
= DECL_CHAIN (bfield
);
8521 if (code
== OMP_TASKGROUP
)
8523 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
8524 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8525 gimple_seq_add_stmt (start
, g
);
8530 if (code
== OMP_FOR
)
8531 c
= gimple_omp_for_clauses (ctx
->stmt
);
8532 else if (code
== OMP_SECTIONS
)
8533 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8535 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
8536 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
8537 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
8538 build_fold_addr_expr (avar
));
8539 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
8542 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
8543 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
8545 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
8546 gimple_seq_add_stmt (end
, g
);
8547 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
8548 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8550 enum built_in_function bfn
8551 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
8552 t
= builtin_decl_explicit (bfn
);
8553 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
8557 arg
= create_tmp_var (c_bool_type
);
8558 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
8562 arg
= build_int_cst (c_bool_type
, 0);
8563 g
= gimple_build_call (t
, 1, arg
);
8567 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
8568 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8570 gimple_seq_add_stmt (end
, g
);
8571 t
= build_constructor (atype
, NULL
);
8572 TREE_THIS_VOLATILE (t
) = 1;
8573 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
8576 /* Expand code for an OpenMP taskgroup directive. */
8579 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8581 gimple
*stmt
= gsi_stmt (*gsi_p
);
8584 gimple_seq dseq
= NULL
;
8585 tree block
= make_node (BLOCK
);
8587 bind
= gimple_build_bind (NULL
, NULL
, block
);
8588 gsi_replace (gsi_p
, bind
, true);
8589 gimple_bind_add_stmt (bind
, stmt
);
8591 push_gimplify_context ();
8593 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
8595 gimple_bind_add_stmt (bind
, x
);
8597 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
8598 gimple_omp_taskgroup_clauses (stmt
),
8599 gimple_bind_body_ptr (bind
), &dseq
);
8601 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8602 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8603 gimple_omp_set_body (stmt
, NULL
);
8605 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8606 gimple_bind_add_seq (bind
, dseq
);
8608 pop_gimplify_context (bind
);
8610 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8611 BLOCK_VARS (block
) = ctx
->block_vars
;
8615 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8618 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
8621 struct omp_for_data fd
;
8622 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
8625 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
8626 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
8627 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
8631 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8632 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
8633 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
8634 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8636 /* Merge depend clauses from multiple adjacent
8637 #pragma omp ordered depend(sink:...) constructs
8638 into one #pragma omp ordered depend(sink:...), so that
8639 we can optimize them together. */
8640 gimple_stmt_iterator gsi
= *gsi_p
;
8642 while (!gsi_end_p (gsi
))
8644 gimple
*stmt
= gsi_stmt (gsi
);
8645 if (is_gimple_debug (stmt
)
8646 || gimple_code (stmt
) == GIMPLE_NOP
)
8651 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
8653 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
8654 c
= gimple_omp_ordered_clauses (ord_stmt2
);
8656 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
8657 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8660 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
8662 gsi_remove (&gsi
, true);
8666 /* Canonicalize sink dependence clauses into one folded clause if
8669 The basic algorithm is to create a sink vector whose first
8670 element is the GCD of all the first elements, and whose remaining
8671 elements are the minimum of the subsequent columns.
8673 We ignore dependence vectors whose first element is zero because
8674 such dependencies are known to be executed by the same thread.
8676 We take into account the direction of the loop, so a minimum
8677 becomes a maximum if the loop is iterating forwards. We also
8678 ignore sink clauses where the loop direction is unknown, or where
8679 the offsets are clearly invalid because they are not a multiple
8680 of the loop increment.
8684 #pragma omp for ordered(2)
8685 for (i=0; i < N; ++i)
8686 for (j=0; j < M; ++j)
8688 #pragma omp ordered \
8689 depend(sink:i-8,j-2) \
8690 depend(sink:i,j-1) \ // Completely ignored because i+0.
8691 depend(sink:i-4,j-3) \
8692 depend(sink:i-6,j-4)
8693 #pragma omp ordered depend(source)
8698 depend(sink:-gcd(8,4,6),-min(2,3,4))
8703 /* FIXME: Computing GCD's where the first element is zero is
8704 non-trivial in the presence of collapsed loops. Do this later. */
8705 if (fd
.collapse
> 1)
8708 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
8710 /* wide_int is not a POD so it must be default-constructed. */
8711 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
8712 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
8714 tree folded_dep
= NULL_TREE
;
8715 /* TRUE if the first dimension's offset is negative. */
8716 bool neg_offset_p
= false;
8718 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8720 while ((c
= *list_p
) != NULL
)
8722 bool remove
= false;
8724 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
8725 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8726 goto next_ordered_clause
;
8729 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
8730 vec
&& TREE_CODE (vec
) == TREE_LIST
;
8731 vec
= TREE_CHAIN (vec
), ++i
)
8733 gcc_assert (i
< len
);
8735 /* omp_extract_for_data has canonicalized the condition. */
8736 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
8737 || fd
.loops
[i
].cond_code
== GT_EXPR
);
8738 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
8739 bool maybe_lexically_later
= true;
8741 /* While the committee makes up its mind, bail if we have any
8742 non-constant steps. */
8743 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
8744 goto lower_omp_ordered_ret
;
8746 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
8747 if (POINTER_TYPE_P (itype
))
8749 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
8750 TYPE_PRECISION (itype
),
8753 /* Ignore invalid offsets that are not multiples of the step. */
8754 if (!wi::multiple_of_p (wi::abs (offset
),
8755 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
8758 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
8759 "ignoring sink clause with offset that is not "
8760 "a multiple of the loop step");
8762 goto next_ordered_clause
;
8765 /* Calculate the first dimension. The first dimension of
8766 the folded dependency vector is the GCD of the first
8767 elements, while ignoring any first elements whose offset
8771 /* Ignore dependence vectors whose first dimension is 0. */
8775 goto next_ordered_clause
;
8779 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
8781 error_at (OMP_CLAUSE_LOCATION (c
),
8782 "first offset must be in opposite direction "
8783 "of loop iterations");
8784 goto lower_omp_ordered_ret
;
8788 neg_offset_p
= forward
;
8789 /* Initialize the first time around. */
8790 if (folded_dep
== NULL_TREE
)
8793 folded_deps
[0] = offset
;
8796 folded_deps
[0] = wi::gcd (folded_deps
[0],
8800 /* Calculate minimum for the remaining dimensions. */
8803 folded_deps
[len
+ i
- 1] = offset
;
8804 if (folded_dep
== c
)
8805 folded_deps
[i
] = offset
;
8806 else if (maybe_lexically_later
8807 && !wi::eq_p (folded_deps
[i
], offset
))
8809 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
8813 for (j
= 1; j
<= i
; j
++)
8814 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
8817 maybe_lexically_later
= false;
8821 gcc_assert (i
== len
);
8825 next_ordered_clause
:
8827 *list_p
= OMP_CLAUSE_CHAIN (c
);
8829 list_p
= &OMP_CLAUSE_CHAIN (c
);
8835 folded_deps
[0] = -folded_deps
[0];
8837 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
8838 if (POINTER_TYPE_P (itype
))
8841 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
8842 = wide_int_to_tree (itype
, folded_deps
[0]);
8843 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
8844 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
8847 lower_omp_ordered_ret
:
8849 /* Ordered without clauses is #pragma omp threads, while we want
8850 a nop instead if we remove all clauses. */
8851 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
8852 gsi_replace (gsi_p
, gimple_build_nop (), true);
8856 /* Expand code for an OpenMP ordered directive. */
8859 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8862 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
8863 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
8866 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8868 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8871 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
8872 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8873 OMP_CLAUSE_THREADS
);
8875 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8878 /* FIXME: This is needs to be moved to the expansion to verify various
8879 conditions only testable on cfg with dominators computed, and also
8880 all the depend clauses to be merged still might need to be available
8881 for the runtime checks. */
8883 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
8887 push_gimplify_context ();
8889 block
= make_node (BLOCK
);
8890 bind
= gimple_build_bind (NULL
, NULL
, block
);
8891 gsi_replace (gsi_p
, bind
, true);
8892 gimple_bind_add_stmt (bind
, stmt
);
8896 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
8897 build_int_cst (NULL_TREE
, threads
));
8898 cfun
->has_simduid_loops
= true;
8901 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
8903 gimple_bind_add_stmt (bind
, x
);
8905 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
8908 counter
= create_tmp_var (integer_type_node
);
8909 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
8910 gimple_call_set_lhs (g
, counter
);
8911 gimple_bind_add_stmt (bind
, g
);
8913 body
= create_artificial_label (UNKNOWN_LOCATION
);
8914 test
= create_artificial_label (UNKNOWN_LOCATION
);
8915 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
8917 tree simt_pred
= create_tmp_var (integer_type_node
);
8918 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
8919 gimple_call_set_lhs (g
, simt_pred
);
8920 gimple_bind_add_stmt (bind
, g
);
8922 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
8923 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
8924 gimple_bind_add_stmt (bind
, g
);
8926 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
8928 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8929 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8930 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8931 gimple_omp_set_body (stmt
, NULL
);
8935 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
8936 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
8937 gimple_bind_add_stmt (bind
, g
);
8939 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
8940 tree nonneg
= create_tmp_var (integer_type_node
);
8941 gimple_seq tseq
= NULL
;
8942 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
8943 gimple_bind_add_seq (bind
, tseq
);
8945 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
8946 gimple_call_set_lhs (g
, nonneg
);
8947 gimple_bind_add_stmt (bind
, g
);
8949 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
8950 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
8951 gimple_bind_add_stmt (bind
, g
);
8953 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
8956 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
8957 build_int_cst (NULL_TREE
, threads
));
8959 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
8961 gimple_bind_add_stmt (bind
, x
);
8963 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8965 pop_gimplify_context (bind
);
8967 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8968 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8972 /* Expand code for an OpenMP scan directive and the structured block
8973 before the scan directive. */
8976 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8978 gimple
*stmt
= gsi_stmt (*gsi_p
);
8980 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
8981 tree lane
= NULL_TREE
;
8982 gimple_seq before
= NULL
;
8983 omp_context
*octx
= ctx
->outer
;
8985 if (octx
->scan_exclusive
&& !has_clauses
)
8987 gimple_stmt_iterator gsi2
= *gsi_p
;
8989 gimple
*stmt2
= gsi_stmt (gsi2
);
8990 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8991 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8992 the one with exclusive clause(s), comes first. */
8994 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
8995 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
8997 gsi_remove (gsi_p
, false);
8998 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
8999 ctx
= maybe_lookup_ctx (stmt2
);
9001 lower_omp_scan (gsi_p
, ctx
);
9006 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
9007 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9008 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
9009 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9010 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
9011 && !gimple_omp_for_combined_p (octx
->stmt
));
9012 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
9013 if (is_for_simd
&& octx
->for_simd_scan_phase
)
9016 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
9017 OMP_CLAUSE__SIMDUID_
))
9019 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
9020 lane
= create_tmp_var (unsigned_type_node
);
9021 tree t
= build_int_cst (integer_type_node
,
9023 : octx
->scan_inclusive
? 2 : 3);
9025 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
9026 gimple_call_set_lhs (g
, lane
);
9027 gimple_seq_add_stmt (&before
, g
);
9030 if (is_simd
|| is_for
)
9032 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
9033 c
; c
= OMP_CLAUSE_CHAIN (c
))
9034 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9035 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9037 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9038 tree var
= OMP_CLAUSE_DECL (c
);
9039 tree new_var
= lookup_decl (var
, octx
);
9041 tree var2
= NULL_TREE
;
9042 tree var3
= NULL_TREE
;
9043 tree var4
= NULL_TREE
;
9044 tree lane0
= NULL_TREE
;
9045 tree new_vard
= new_var
;
9046 if (omp_is_reference (var
))
9048 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9051 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
9053 val
= DECL_VALUE_EXPR (new_vard
);
9054 if (new_vard
!= new_var
)
9056 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
9057 val
= TREE_OPERAND (val
, 0);
9059 if (TREE_CODE (val
) == ARRAY_REF
9060 && VAR_P (TREE_OPERAND (val
, 0)))
9062 tree v
= TREE_OPERAND (val
, 0);
9063 if (lookup_attribute ("omp simd array",
9064 DECL_ATTRIBUTES (v
)))
9066 val
= unshare_expr (val
);
9067 lane0
= TREE_OPERAND (val
, 1);
9068 TREE_OPERAND (val
, 1) = lane
;
9069 var2
= lookup_decl (v
, octx
);
9070 if (octx
->scan_exclusive
)
9071 var4
= lookup_decl (var2
, octx
);
9073 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9074 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
9077 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9078 var2
, lane
, NULL_TREE
, NULL_TREE
);
9079 TREE_THIS_NOTRAP (var2
) = 1;
9080 if (octx
->scan_exclusive
)
9082 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9083 var4
, lane
, NULL_TREE
,
9085 TREE_THIS_NOTRAP (var4
) = 1;
9096 var2
= build_outer_var_ref (var
, octx
);
9097 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9099 var3
= maybe_lookup_decl (new_vard
, octx
);
9100 if (var3
== new_vard
|| var3
== NULL_TREE
)
9102 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
9104 var4
= maybe_lookup_decl (var3
, octx
);
9105 if (var4
== var3
|| var4
== NULL_TREE
)
9107 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
9118 && octx
->scan_exclusive
9120 && var4
== NULL_TREE
)
9121 var4
= create_tmp_var (TREE_TYPE (val
));
9123 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9125 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9130 /* If we've added a separate identity element
9131 variable, copy it over into val. */
9132 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9134 gimplify_and_add (x
, &before
);
9136 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9138 /* Otherwise, assign to it the identity element. */
9139 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9141 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9142 tree ref
= build_outer_var_ref (var
, octx
);
9143 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9144 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9147 if (new_vard
!= new_var
)
9148 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9149 SET_DECL_VALUE_EXPR (new_vard
, val
);
9151 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9152 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9153 lower_omp (&tseq
, octx
);
9155 SET_DECL_VALUE_EXPR (new_vard
, x
);
9156 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9157 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9158 gimple_seq_add_seq (&before
, tseq
);
9160 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9166 if (octx
->scan_exclusive
)
9168 tree v4
= unshare_expr (var4
);
9169 tree v2
= unshare_expr (var2
);
9170 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
9171 gimplify_and_add (x
, &before
);
9173 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9174 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9175 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9177 if (x
&& new_vard
!= new_var
)
9178 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
9180 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9181 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9182 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9183 lower_omp (&tseq
, octx
);
9184 gimple_seq_add_seq (&before
, tseq
);
9185 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9187 SET_DECL_VALUE_EXPR (new_vard
, x
);
9188 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9189 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9190 if (octx
->scan_inclusive
)
9192 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9194 gimplify_and_add (x
, &before
);
9196 else if (lane0
== NULL_TREE
)
9198 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9200 gimplify_and_add (x
, &before
);
9208 /* input phase. Set val to initializer before
9210 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
9211 gimplify_assign (val
, x
, &before
);
9216 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
9217 if (code
== MINUS_EXPR
)
9220 tree x
= build2 (code
, TREE_TYPE (var2
),
9221 unshare_expr (var2
), unshare_expr (val
));
9222 if (octx
->scan_inclusive
)
9224 gimplify_assign (unshare_expr (var2
), x
, &before
);
9225 gimplify_assign (val
, var2
, &before
);
9229 gimplify_assign (unshare_expr (var4
),
9230 unshare_expr (var2
), &before
);
9231 gimplify_assign (var2
, x
, &before
);
9232 if (lane0
== NULL_TREE
)
9233 gimplify_assign (val
, var4
, &before
);
9237 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
9239 tree vexpr
= unshare_expr (var4
);
9240 TREE_OPERAND (vexpr
, 1) = lane0
;
9241 if (new_vard
!= new_var
)
9242 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
9243 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9247 if (is_simd
&& !is_for_simd
)
9249 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
9250 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
9251 gsi_replace (gsi_p
, gimple_build_nop (), true);
9254 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
9257 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (stmt
));
9258 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
9263 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9264 substitution of a couple of function calls. But in the NAMED case,
9265 requires that languages coordinate a symbol name. It is therefore
9266 best put here in common code. */
9268 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
9271 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9274 tree name
, lock
, unlock
;
9275 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
9277 location_t loc
= gimple_location (stmt
);
9280 name
= gimple_omp_critical_name (stmt
);
9285 if (!critical_name_mutexes
)
9286 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
9288 tree
*n
= critical_name_mutexes
->get (name
);
9293 decl
= create_tmp_var_raw (ptr_type_node
);
9295 new_str
= ACONCAT ((".gomp_critical_user_",
9296 IDENTIFIER_POINTER (name
), NULL
));
9297 DECL_NAME (decl
) = get_identifier (new_str
);
9298 TREE_PUBLIC (decl
) = 1;
9299 TREE_STATIC (decl
) = 1;
9300 DECL_COMMON (decl
) = 1;
9301 DECL_ARTIFICIAL (decl
) = 1;
9302 DECL_IGNORED_P (decl
) = 1;
9304 varpool_node::finalize_decl (decl
);
9306 critical_name_mutexes
->put (name
, decl
);
9311 /* If '#pragma omp critical' is inside offloaded region or
9312 inside function marked as offloadable, the symbol must be
9313 marked as offloadable too. */
9315 if (cgraph_node::get (current_function_decl
)->offloadable
)
9316 varpool_node::get_create (decl
)->offloadable
= 1;
9318 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
9319 if (is_gimple_omp_offloaded (octx
->stmt
))
9321 varpool_node::get_create (decl
)->offloadable
= 1;
9325 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
9326 lock
= build_call_expr_loc (loc
, lock
, 1,
9327 build_fold_addr_expr_loc (loc
, decl
));
9329 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
9330 unlock
= build_call_expr_loc (loc
, unlock
, 1,
9331 build_fold_addr_expr_loc (loc
, decl
));
9335 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
9336 lock
= build_call_expr_loc (loc
, lock
, 0);
9338 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
9339 unlock
= build_call_expr_loc (loc
, unlock
, 0);
9342 push_gimplify_context ();
9344 block
= make_node (BLOCK
);
9345 bind
= gimple_build_bind (NULL
, NULL
, block
);
9346 gsi_replace (gsi_p
, bind
, true);
9347 gimple_bind_add_stmt (bind
, stmt
);
9349 tbody
= gimple_bind_body (bind
);
9350 gimplify_and_add (lock
, &tbody
);
9351 gimple_bind_set_body (bind
, tbody
);
9353 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9354 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9355 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9356 gimple_omp_set_body (stmt
, NULL
);
9358 tbody
= gimple_bind_body (bind
);
9359 gimplify_and_add (unlock
, &tbody
);
9360 gimple_bind_set_body (bind
, tbody
);
9362 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9364 pop_gimplify_context (bind
);
9365 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9366 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9369 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9370 for a lastprivate clause. Given a loop control predicate of (V
9371 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9372 is appended to *DLIST, iterator initialization is appended to
9373 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9374 to be emitted in a critical section. */
9377 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
9378 gimple_seq
*dlist
, gimple_seq
*clist
,
9379 struct omp_context
*ctx
)
9381 tree clauses
, cond
, vinit
;
9382 enum tree_code cond_code
;
9385 cond_code
= fd
->loop
.cond_code
;
9386 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
9388 /* When possible, use a strict equality expression. This can let VRP
9389 type optimizations deduce the value and remove a copy. */
9390 if (tree_fits_shwi_p (fd
->loop
.step
))
9392 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
9393 if (step
== 1 || step
== -1)
9394 cond_code
= EQ_EXPR
;
9397 if (gimple_omp_for_kind (fd
->for_stmt
) == GF_OMP_FOR_KIND_GRID_LOOP
9398 || gimple_omp_for_grid_phony (fd
->for_stmt
))
9399 cond
= omp_grid_lastprivate_predicate (fd
);
9402 tree n2
= fd
->loop
.n2
;
9403 if (fd
->collapse
> 1
9404 && TREE_CODE (n2
) != INTEGER_CST
9405 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
9407 struct omp_context
*taskreg_ctx
= NULL
;
9408 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
9410 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
9411 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
9412 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
9414 if (gimple_omp_for_combined_into_p (gfor
))
9416 gcc_assert (ctx
->outer
->outer
9417 && is_parallel_ctx (ctx
->outer
->outer
));
9418 taskreg_ctx
= ctx
->outer
->outer
;
9422 struct omp_for_data outer_fd
;
9423 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
9424 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
9427 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
9428 taskreg_ctx
= ctx
->outer
->outer
;
9430 else if (is_taskreg_ctx (ctx
->outer
))
9431 taskreg_ctx
= ctx
->outer
;
9435 tree taskreg_clauses
9436 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
9437 tree innerc
= omp_find_clause (taskreg_clauses
,
9438 OMP_CLAUSE__LOOPTEMP_
);
9439 gcc_assert (innerc
);
9440 for (i
= 0; i
< fd
->collapse
; i
++)
9442 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9443 OMP_CLAUSE__LOOPTEMP_
);
9444 gcc_assert (innerc
);
9446 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9447 OMP_CLAUSE__LOOPTEMP_
);
9449 n2
= fold_convert (TREE_TYPE (n2
),
9450 lookup_decl (OMP_CLAUSE_DECL (innerc
),
9454 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
9457 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
9459 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
9460 if (!gimple_seq_empty_p (stmts
))
9462 gimple_seq_add_seq (&stmts
, *dlist
);
9465 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9466 vinit
= fd
->loop
.n1
;
9467 if (cond_code
== EQ_EXPR
9468 && tree_fits_shwi_p (fd
->loop
.n2
)
9469 && ! integer_zerop (fd
->loop
.n2
))
9470 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
9472 vinit
= unshare_expr (vinit
);
9474 /* Initialize the iterator variable, so that threads that don't execute
9475 any iterations don't execute the lastprivate clauses by accident. */
9476 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
9480 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9483 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9484 struct walk_stmt_info
*wi
)
9486 gimple
*stmt
= gsi_stmt (*gsi_p
);
9488 *handled_ops_p
= true;
9489 switch (gimple_code (stmt
))
9493 case GIMPLE_OMP_FOR
:
9494 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
9495 && gimple_omp_for_combined_into_p (stmt
))
9496 *handled_ops_p
= false;
9499 case GIMPLE_OMP_SCAN
:
9500 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
9501 return integer_zero_node
;
9508 /* Helper function for lower_omp_for, add transformations for a worksharing
9509 loop with scan directives inside of it.
9510 For worksharing loop not combined with simd, transform:
9511 #pragma omp for reduction(inscan,+:r) private(i)
9512 for (i = 0; i < n; i = i + 1)
9517 #pragma omp scan inclusive(r)
9523 into two worksharing loops + code to merge results:
9525 num_threads = omp_get_num_threads ();
9526 thread_num = omp_get_thread_num ();
9527 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9532 // For UDRs this is UDR init, or if ctors are needed, copy from
9533 // var3 that has been constructed to contain the neutral element.
9537 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9538 // a shared array with num_threads elements and rprivb to a local array
9539 // number of elements equal to the number of (contiguous) iterations the
9540 // current thread will perform. controlb and controlp variables are
9541 // temporaries to handle deallocation of rprivb at the end of second
9543 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9544 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9545 for (i = 0; i < n; i = i + 1)
9548 // For UDRs this is UDR init or copy from var3.
9550 // This is the input phase from user code.
9554 // For UDRs this is UDR merge.
9556 // Rather than handing it over to the user, save to local thread's
9558 rprivb[ivar] = var2;
9559 // For exclusive scan, the above two statements are swapped.
9563 // And remember the final value from this thread's into the shared
9565 rpriva[(sizetype) thread_num] = var2;
9566 // If more than one thread, compute using Work-Efficient prefix sum
9567 // the inclusive parallel scan of the rpriva array.
9568 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9573 num_threadsu = (unsigned int) num_threads;
9574 thread_numup1 = (unsigned int) thread_num + 1;
9577 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9581 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9586 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9587 mul = REALPART_EXPR <cplx>;
9588 ovf = IMAGPART_EXPR <cplx>;
9589 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9592 andvm1 = andv + 4294967295;
9594 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9596 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9597 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9598 rpriva[l] = rpriva[l - k] + rpriva[l];
9600 if (down == 0) goto <D.2121>; else goto <D.2122>;
9608 if (k != 0) goto <D.2108>; else goto <D.2103>;
9610 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9612 // For UDRs this is UDR init or copy from var3.
9616 var2 = rpriva[thread_num - 1];
9619 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9620 reduction(inscan,+:r) private(i)
9621 for (i = 0; i < n; i = i + 1)
9624 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9625 r = var2 + rprivb[ivar];
9628 // This is the scan phase from user code.
9630 // Plus a bump of the iterator.
9636 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
9637 struct omp_for_data
*fd
, omp_context
*ctx
)
9639 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
9640 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
9642 gimple_seq body
= gimple_omp_body (stmt
);
9643 gimple_stmt_iterator input1_gsi
= gsi_none ();
9644 struct walk_stmt_info wi
;
9645 memset (&wi
, 0, sizeof (wi
));
9647 wi
.info
= (void *) &input1_gsi
;
9648 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
9649 gcc_assert (!gsi_end_p (input1_gsi
));
9651 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
9652 gimple_stmt_iterator gsi
= input1_gsi
;
9654 gimple_stmt_iterator scan1_gsi
= gsi
;
9655 gimple
*scan_stmt1
= gsi_stmt (gsi
);
9656 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
9658 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
9659 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
9660 gimple_omp_set_body (input_stmt1
, NULL
);
9661 gimple_omp_set_body (scan_stmt1
, NULL
);
9662 gimple_omp_set_body (stmt
, NULL
);
9664 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
9665 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
9666 gimple_omp_set_body (stmt
, body
);
9667 gimple_omp_set_body (input_stmt1
, input_body
);
9669 gimple_stmt_iterator input2_gsi
= gsi_none ();
9670 memset (&wi
, 0, sizeof (wi
));
9672 wi
.info
= (void *) &input2_gsi
;
9673 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
9674 gcc_assert (!gsi_end_p (input2_gsi
));
9676 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
9679 gimple_stmt_iterator scan2_gsi
= gsi
;
9680 gimple
*scan_stmt2
= gsi_stmt (gsi
);
9681 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
9682 gimple_omp_set_body (scan_stmt2
, scan_body
);
9684 gimple_stmt_iterator input3_gsi
= gsi_none ();
9685 gimple_stmt_iterator scan3_gsi
= gsi_none ();
9686 gimple_stmt_iterator input4_gsi
= gsi_none ();
9687 gimple_stmt_iterator scan4_gsi
= gsi_none ();
9688 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
9689 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
9690 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
9693 memset (&wi
, 0, sizeof (wi
));
9695 wi
.info
= (void *) &input3_gsi
;
9696 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
9697 gcc_assert (!gsi_end_p (input3_gsi
));
9699 input_stmt3
= gsi_stmt (input3_gsi
);
9703 scan_stmt3
= gsi_stmt (gsi
);
9704 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
9706 memset (&wi
, 0, sizeof (wi
));
9708 wi
.info
= (void *) &input4_gsi
;
9709 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
9710 gcc_assert (!gsi_end_p (input4_gsi
));
9712 input_stmt4
= gsi_stmt (input4_gsi
);
9716 scan_stmt4
= gsi_stmt (gsi
);
9717 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
9719 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
9720 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
9723 tree num_threads
= create_tmp_var (integer_type_node
);
9724 tree thread_num
= create_tmp_var (integer_type_node
);
9725 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9726 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9727 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
9728 gimple_call_set_lhs (g
, num_threads
);
9729 gimple_seq_add_stmt (body_p
, g
);
9730 g
= gimple_build_call (threadnum_decl
, 0);
9731 gimple_call_set_lhs (g
, thread_num
);
9732 gimple_seq_add_stmt (body_p
, g
);
9734 tree ivar
= create_tmp_var (sizetype
);
9735 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
9736 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
9737 tree k
= create_tmp_var (unsigned_type_node
);
9738 tree l
= create_tmp_var (unsigned_type_node
);
9740 gimple_seq clist
= NULL
, mdlist
= NULL
;
9741 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
9742 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
9743 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
9744 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
9745 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9746 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9747 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9749 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9750 tree var
= OMP_CLAUSE_DECL (c
);
9751 tree new_var
= lookup_decl (var
, ctx
);
9752 tree var3
= NULL_TREE
;
9753 tree new_vard
= new_var
;
9754 if (omp_is_reference (var
))
9755 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9756 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9758 var3
= maybe_lookup_decl (new_vard
, ctx
);
9759 if (var3
== new_vard
)
9763 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
9764 tree rpriva
= create_tmp_var (ptype
);
9765 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9766 OMP_CLAUSE_DECL (nc
) = rpriva
;
9768 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9770 tree rprivb
= create_tmp_var (ptype
);
9771 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9772 OMP_CLAUSE_DECL (nc
) = rprivb
;
9773 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
9775 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9777 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
9778 if (new_vard
!= new_var
)
9779 TREE_ADDRESSABLE (var2
) = 1;
9780 gimple_add_tmp_var (var2
);
9782 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
9783 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9784 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9785 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9786 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9788 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
9789 thread_num
, integer_minus_one_node
);
9790 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9791 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9792 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9793 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9794 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9796 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
9797 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9798 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9799 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9800 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9802 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
9803 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9804 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9805 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9806 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9807 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9809 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
9810 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9811 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
9812 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9814 tree var4
= is_for_simd
? new_var
: var2
;
9815 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
9818 var5
= lookup_decl (var
, input_simd_ctx
);
9819 var6
= lookup_decl (var
, scan_simd_ctx
);
9820 if (new_vard
!= new_var
)
9822 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
9823 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
9826 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9828 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9831 x
= lang_hooks
.decls
.omp_clause_default_ctor
9832 (c
, var2
, build_outer_var_ref (var
, ctx
));
9834 gimplify_and_add (x
, &clist
);
9836 x
= build_outer_var_ref (var
, ctx
);
9837 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
9839 gimplify_and_add (x
, &thr01_list
);
9841 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9842 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9845 x
= unshare_expr (var4
);
9846 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
9847 gimplify_and_add (x
, &thrn1_list
);
9848 x
= unshare_expr (var4
);
9849 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
9850 gimplify_and_add (x
, &thr02_list
);
9852 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9854 /* Otherwise, assign to it the identity element. */
9855 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9856 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9859 if (new_vard
!= new_var
)
9860 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9861 SET_DECL_VALUE_EXPR (new_vard
, val
);
9862 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9864 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
9865 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9866 lower_omp (&tseq
, ctx
);
9867 gimple_seq_add_seq (&thrn1_list
, tseq
);
9868 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9869 lower_omp (&tseq
, ctx
);
9870 gimple_seq_add_seq (&thr02_list
, tseq
);
9871 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9872 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9873 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9875 SET_DECL_VALUE_EXPR (new_vard
, y
);
9878 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9879 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9883 x
= unshare_expr (var4
);
9884 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
9885 gimplify_and_add (x
, &thrn2_list
);
9889 x
= unshare_expr (rprivb_ref
);
9890 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
9891 gimplify_and_add (x
, &scan1_list
);
9895 if (ctx
->scan_exclusive
)
9897 x
= unshare_expr (rprivb_ref
);
9898 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
9899 gimplify_and_add (x
, &scan1_list
);
9902 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9903 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9904 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9905 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9906 lower_omp (&tseq
, ctx
);
9907 gimple_seq_add_seq (&scan1_list
, tseq
);
9909 if (ctx
->scan_inclusive
)
9911 x
= unshare_expr (rprivb_ref
);
9912 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
9913 gimplify_and_add (x
, &scan1_list
);
9917 x
= unshare_expr (rpriva_ref
);
9918 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
9919 unshare_expr (var4
));
9920 gimplify_and_add (x
, &mdlist
);
9922 x
= unshare_expr (is_for_simd
? var6
: new_var
);
9923 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
9924 gimplify_and_add (x
, &input2_list
);
9927 if (new_vard
!= new_var
)
9928 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9930 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9931 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9932 SET_DECL_VALUE_EXPR (new_vard
, val
);
9933 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9936 SET_DECL_VALUE_EXPR (placeholder
, var6
);
9937 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9940 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9941 lower_omp (&tseq
, ctx
);
9943 SET_DECL_VALUE_EXPR (new_vard
, y
);
9946 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9947 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9951 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
9952 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9953 lower_omp (&tseq
, ctx
);
9955 gimple_seq_add_seq (&input2_list
, tseq
);
9957 x
= build_outer_var_ref (var
, ctx
);
9958 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
9959 gimplify_and_add (x
, &last_list
);
9961 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
9962 gimplify_and_add (x
, &reduc_list
);
9963 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9964 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9966 if (new_vard
!= new_var
)
9967 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9968 SET_DECL_VALUE_EXPR (new_vard
, val
);
9969 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9970 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9971 lower_omp (&tseq
, ctx
);
9972 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9973 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9974 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9976 SET_DECL_VALUE_EXPR (new_vard
, y
);
9979 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9980 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9982 gimple_seq_add_seq (&reduc_list
, tseq
);
9983 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
9984 gimplify_and_add (x
, &reduc_list
);
9986 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
9988 gimplify_and_add (x
, dlist
);
9992 x
= build_outer_var_ref (var
, ctx
);
9993 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
9995 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
9996 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
9998 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
10000 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
10002 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10003 if (code
== MINUS_EXPR
)
10007 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
10010 if (ctx
->scan_exclusive
)
10011 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10013 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
10014 gimplify_assign (var2
, x
, &scan1_list
);
10015 if (ctx
->scan_inclusive
)
10016 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10020 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
10023 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
10024 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
10026 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
10029 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
10030 unshare_expr (rprival_ref
));
10031 gimplify_assign (rprival_ref
, x
, &reduc_list
);
10035 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10036 gimple_seq_add_stmt (&scan1_list
, g
);
10037 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10038 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10039 ? scan_stmt4
: scan_stmt2
), g
);
10041 tree controlb
= create_tmp_var (boolean_type_node
);
10042 tree controlp
= create_tmp_var (ptr_type_node
);
10043 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10044 OMP_CLAUSE_DECL (nc
) = controlb
;
10045 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10047 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10048 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10049 OMP_CLAUSE_DECL (nc
) = controlp
;
10050 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10052 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10053 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10054 OMP_CLAUSE_DECL (nc
) = controlb
;
10055 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10057 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10058 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10059 OMP_CLAUSE_DECL (nc
) = controlp
;
10060 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10062 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10064 *cp1
= gimple_omp_for_clauses (stmt
);
10065 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
10066 *cp2
= gimple_omp_for_clauses (new_stmt
);
10067 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
10071 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
10072 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
10074 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
10076 gsi_remove (&input3_gsi
, true);
10077 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
10079 gsi_remove (&scan3_gsi
, true);
10080 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
10082 gsi_remove (&input4_gsi
, true);
10083 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
10085 gsi_remove (&scan4_gsi
, true);
10089 gimple_omp_set_body (scan_stmt1
, scan1_list
);
10090 gimple_omp_set_body (input_stmt2
, input2_list
);
10093 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
10095 gsi_remove (&input1_gsi
, true);
10096 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
10098 gsi_remove (&scan1_gsi
, true);
10099 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
10101 gsi_remove (&input2_gsi
, true);
10102 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
10104 gsi_remove (&scan2_gsi
, true);
10106 gimple_seq_add_seq (body_p
, clist
);
10108 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10109 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10110 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10111 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10112 gimple_seq_add_stmt (body_p
, g
);
10113 g
= gimple_build_label (lab1
);
10114 gimple_seq_add_stmt (body_p
, g
);
10115 gimple_seq_add_seq (body_p
, thr01_list
);
10116 g
= gimple_build_goto (lab3
);
10117 gimple_seq_add_stmt (body_p
, g
);
10118 g
= gimple_build_label (lab2
);
10119 gimple_seq_add_stmt (body_p
, g
);
10120 gimple_seq_add_seq (body_p
, thrn1_list
);
10121 g
= gimple_build_label (lab3
);
10122 gimple_seq_add_stmt (body_p
, g
);
10124 g
= gimple_build_assign (ivar
, size_zero_node
);
10125 gimple_seq_add_stmt (body_p
, g
);
10127 gimple_seq_add_stmt (body_p
, stmt
);
10128 gimple_seq_add_seq (body_p
, body
);
10129 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
10132 g
= gimple_build_omp_return (true);
10133 gimple_seq_add_stmt (body_p
, g
);
10134 gimple_seq_add_seq (body_p
, mdlist
);
10136 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10137 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10138 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
10139 gimple_seq_add_stmt (body_p
, g
);
10140 g
= gimple_build_label (lab1
);
10141 gimple_seq_add_stmt (body_p
, g
);
10143 g
= omp_build_barrier (NULL
);
10144 gimple_seq_add_stmt (body_p
, g
);
10146 tree down
= create_tmp_var (unsigned_type_node
);
10147 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
10148 gimple_seq_add_stmt (body_p
, g
);
10150 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
10151 gimple_seq_add_stmt (body_p
, g
);
10153 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
10154 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
10155 gimple_seq_add_stmt (body_p
, g
);
10157 tree thread_numu
= create_tmp_var (unsigned_type_node
);
10158 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
10159 gimple_seq_add_stmt (body_p
, g
);
10161 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
10162 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
10163 build_int_cst (unsigned_type_node
, 1));
10164 gimple_seq_add_stmt (body_p
, g
);
10166 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10167 g
= gimple_build_label (lab3
);
10168 gimple_seq_add_stmt (body_p
, g
);
10170 tree twok
= create_tmp_var (unsigned_type_node
);
10171 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10172 gimple_seq_add_stmt (body_p
, g
);
10174 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
10175 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
10176 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
10177 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
10178 gimple_seq_add_stmt (body_p
, g
);
10179 g
= gimple_build_label (lab4
);
10180 gimple_seq_add_stmt (body_p
, g
);
10181 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
10182 gimple_seq_add_stmt (body_p
, g
);
10183 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10184 gimple_seq_add_stmt (body_p
, g
);
10186 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
10187 gimple_seq_add_stmt (body_p
, g
);
10188 g
= gimple_build_label (lab6
);
10189 gimple_seq_add_stmt (body_p
, g
);
10191 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10192 gimple_seq_add_stmt (body_p
, g
);
10194 g
= gimple_build_label (lab5
);
10195 gimple_seq_add_stmt (body_p
, g
);
10197 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10198 gimple_seq_add_stmt (body_p
, g
);
10200 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
10201 DECL_GIMPLE_REG_P (cplx
) = 1;
10202 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
10203 gimple_call_set_lhs (g
, cplx
);
10204 gimple_seq_add_stmt (body_p
, g
);
10205 tree mul
= create_tmp_var (unsigned_type_node
);
10206 g
= gimple_build_assign (mul
, REALPART_EXPR
,
10207 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
10208 gimple_seq_add_stmt (body_p
, g
);
10209 tree ovf
= create_tmp_var (unsigned_type_node
);
10210 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
10211 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
10212 gimple_seq_add_stmt (body_p
, g
);
10214 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
10215 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
10216 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
10218 gimple_seq_add_stmt (body_p
, g
);
10219 g
= gimple_build_label (lab7
);
10220 gimple_seq_add_stmt (body_p
, g
);
10222 tree andv
= create_tmp_var (unsigned_type_node
);
10223 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
10224 gimple_seq_add_stmt (body_p
, g
);
10225 tree andvm1
= create_tmp_var (unsigned_type_node
);
10226 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
10227 build_minus_one_cst (unsigned_type_node
));
10228 gimple_seq_add_stmt (body_p
, g
);
10230 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
10231 gimple_seq_add_stmt (body_p
, g
);
10233 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
10234 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
10235 gimple_seq_add_stmt (body_p
, g
);
10236 g
= gimple_build_label (lab9
);
10237 gimple_seq_add_stmt (body_p
, g
);
10238 gimple_seq_add_seq (body_p
, reduc_list
);
10239 g
= gimple_build_label (lab8
);
10240 gimple_seq_add_stmt (body_p
, g
);
10242 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
10243 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
10244 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
10245 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
10247 gimple_seq_add_stmt (body_p
, g
);
10248 g
= gimple_build_label (lab10
);
10249 gimple_seq_add_stmt (body_p
, g
);
10250 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
10251 gimple_seq_add_stmt (body_p
, g
);
10252 g
= gimple_build_goto (lab12
);
10253 gimple_seq_add_stmt (body_p
, g
);
10254 g
= gimple_build_label (lab11
);
10255 gimple_seq_add_stmt (body_p
, g
);
10256 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10257 gimple_seq_add_stmt (body_p
, g
);
10258 g
= gimple_build_label (lab12
);
10259 gimple_seq_add_stmt (body_p
, g
);
10261 g
= omp_build_barrier (NULL
);
10262 gimple_seq_add_stmt (body_p
, g
);
10264 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
10266 gimple_seq_add_stmt (body_p
, g
);
10268 g
= gimple_build_label (lab2
);
10269 gimple_seq_add_stmt (body_p
, g
);
10271 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10272 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10273 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10274 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10275 gimple_seq_add_stmt (body_p
, g
);
10276 g
= gimple_build_label (lab1
);
10277 gimple_seq_add_stmt (body_p
, g
);
10278 gimple_seq_add_seq (body_p
, thr02_list
);
10279 g
= gimple_build_goto (lab3
);
10280 gimple_seq_add_stmt (body_p
, g
);
10281 g
= gimple_build_label (lab2
);
10282 gimple_seq_add_stmt (body_p
, g
);
10283 gimple_seq_add_seq (body_p
, thrn2_list
);
10284 g
= gimple_build_label (lab3
);
10285 gimple_seq_add_stmt (body_p
, g
);
10287 g
= gimple_build_assign (ivar
, size_zero_node
);
10288 gimple_seq_add_stmt (body_p
, g
);
10289 gimple_seq_add_stmt (body_p
, new_stmt
);
10290 gimple_seq_add_seq (body_p
, new_body
);
10292 gimple_seq new_dlist
= NULL
;
10293 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10294 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10295 tree num_threadsm1
= create_tmp_var (integer_type_node
);
10296 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
10297 integer_minus_one_node
);
10298 gimple_seq_add_stmt (&new_dlist
, g
);
10299 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
10300 gimple_seq_add_stmt (&new_dlist
, g
);
10301 g
= gimple_build_label (lab1
);
10302 gimple_seq_add_stmt (&new_dlist
, g
);
10303 gimple_seq_add_seq (&new_dlist
, last_list
);
10304 g
= gimple_build_label (lab2
);
10305 gimple_seq_add_stmt (&new_dlist
, g
);
10306 gimple_seq_add_seq (&new_dlist
, *dlist
);
10307 *dlist
= new_dlist
;
10310 /* Lower code for an OMP loop directive. */
10313 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10315 tree
*rhs_p
, block
;
10316 struct omp_for_data fd
, *fdp
= NULL
;
10317 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
10319 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
10320 gimple_seq cnt_list
= NULL
, clist
= NULL
;
10321 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
10324 push_gimplify_context ();
10326 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
10328 block
= make_node (BLOCK
);
10329 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
10330 /* Replace at gsi right away, so that 'stmt' is no member
10331 of a sequence anymore as we're going to add to a different
10333 gsi_replace (gsi_p
, new_stmt
, true);
10335 /* Move declaration of temporaries in the loop body before we make
10337 omp_for_body
= gimple_omp_body (stmt
);
10338 if (!gimple_seq_empty_p (omp_for_body
)
10339 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
10342 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
10343 tree vars
= gimple_bind_vars (inner_bind
);
10344 gimple_bind_append_vars (new_stmt
, vars
);
10345 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10346 keep them on the inner_bind and it's block. */
10347 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
10348 if (gimple_bind_block (inner_bind
))
10349 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
10352 if (gimple_omp_for_combined_into_p (stmt
))
10354 omp_extract_for_data (stmt
, &fd
, NULL
);
10357 /* We need two temporaries with fd.loop.v type (istart/iend)
10358 and then (fd.collapse - 1) temporaries with the same
10359 type for count2 ... countN-1 vars if not constant. */
10361 tree type
= fd
.iter_type
;
10362 if (fd
.collapse
> 1
10363 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
10364 count
+= fd
.collapse
- 1;
10366 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
10367 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
10368 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
10370 tree clauses
= *pc
;
10373 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
10374 OMP_CLAUSE__LOOPTEMP_
);
10375 if (ctx
->simt_stmt
)
10376 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
10377 OMP_CLAUSE__LOOPTEMP_
);
10378 for (i
= 0; i
< count
; i
++)
10383 gcc_assert (outerc
);
10384 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
10385 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
10386 OMP_CLAUSE__LOOPTEMP_
);
10390 /* If there are 2 adjacent SIMD stmts, one with _simt_
10391 clause, another without, make sure they have the same
10392 decls in _looptemp_ clauses, because the outer stmt
10393 they are combined into will look up just one inner_stmt. */
10394 if (ctx
->simt_stmt
)
10395 temp
= OMP_CLAUSE_DECL (simtc
);
10397 temp
= create_tmp_var (type
);
10398 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
10400 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
10401 OMP_CLAUSE_DECL (*pc
) = temp
;
10402 pc
= &OMP_CLAUSE_CHAIN (*pc
);
10403 if (ctx
->simt_stmt
)
10404 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
10405 OMP_CLAUSE__LOOPTEMP_
);
10410 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10414 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
10415 OMP_CLAUSE_REDUCTION
);
10416 tree rtmp
= NULL_TREE
;
10419 tree type
= build_pointer_type (pointer_sized_int_node
);
10420 tree temp
= create_tmp_var (type
);
10421 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
10422 OMP_CLAUSE_DECL (c
) = temp
;
10423 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
10424 gimple_omp_for_set_clauses (stmt
, c
);
10425 lower_omp_task_reductions (ctx
, OMP_FOR
,
10426 gimple_omp_for_clauses (stmt
),
10427 &tred_ilist
, &tred_dlist
);
10429 rtmp
= make_ssa_name (type
);
10430 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
10433 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
10436 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
10438 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
10439 gimple_omp_for_pre_body (stmt
));
10441 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10443 /* Lower the header expressions. At this point, we can assume that
10444 the header is of the form:
10446 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10448 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10449 using the .omp_data_s mapping, if needed. */
10450 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
10452 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
10453 if (!is_gimple_min_invariant (*rhs_p
))
10454 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10455 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10456 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10458 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
10459 if (!is_gimple_min_invariant (*rhs_p
))
10460 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10461 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10462 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10464 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
10465 if (!is_gimple_min_invariant (*rhs_p
))
10466 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10469 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
10471 gimple_seq_add_seq (&body
, cnt_list
);
10473 /* Once lowered, extract the bounds and clauses. */
10474 omp_extract_for_data (stmt
, &fd
, NULL
);
10476 if (is_gimple_omp_oacc (ctx
->stmt
)
10477 && !ctx_in_oacc_kernels_region (ctx
))
10478 lower_oacc_head_tail (gimple_location (stmt
),
10479 gimple_omp_for_clauses (stmt
),
10480 &oacc_head
, &oacc_tail
, ctx
);
10482 /* Add OpenACC partitioning and reduction markers just before the loop. */
10484 gimple_seq_add_seq (&body
, oacc_head
);
10486 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
10488 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10489 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10490 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10491 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10493 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
10494 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
10495 OMP_CLAUSE_LINEAR_STEP (c
)
10496 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
10500 bool phony_loop
= (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
10501 && gimple_omp_for_grid_phony (stmt
));
10502 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
10503 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10505 gcc_assert (!phony_loop
);
10506 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
10511 gimple_seq_add_stmt (&body
, stmt
);
10512 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
10516 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
10519 /* After the loop, add exit clauses. */
10520 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
10524 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
10525 gcall
*g
= gimple_build_call (fndecl
, 0);
10526 gimple_seq_add_stmt (&body
, g
);
10527 gimple_seq_add_seq (&body
, clist
);
10528 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
10529 g
= gimple_build_call (fndecl
, 0);
10530 gimple_seq_add_stmt (&body
, g
);
10533 if (ctx
->cancellable
)
10534 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
10536 gimple_seq_add_seq (&body
, dlist
);
10540 gimple_seq_add_seq (&tred_ilist
, body
);
10544 body
= maybe_catch_exception (body
);
10548 /* Region exit marker goes at the end of the loop body. */
10549 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
10550 gimple_seq_add_stmt (&body
, g
);
10552 gimple_seq_add_seq (&body
, tred_dlist
);
10554 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
10557 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
10560 /* Add OpenACC joining and reduction markers just after the loop. */
10562 gimple_seq_add_seq (&body
, oacc_tail
);
10564 pop_gimplify_context (new_stmt
);
10566 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
10567 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
10568 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
10569 if (BLOCK_VARS (block
))
10570 TREE_USED (block
) = 1;
10572 gimple_bind_set_body (new_stmt
, body
);
10573 gimple_omp_set_body (stmt
, NULL
);
10574 gimple_omp_for_set_pre_body (stmt
, NULL
);
10577 /* Callback for walk_stmts. Check if the current statement only contains
10578 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10581 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
10582 bool *handled_ops_p
,
10583 struct walk_stmt_info
*wi
)
10585 int *info
= (int *) wi
->info
;
10586 gimple
*stmt
= gsi_stmt (*gsi_p
);
10588 *handled_ops_p
= true;
10589 switch (gimple_code (stmt
))
10595 case GIMPLE_OMP_FOR
:
10596 case GIMPLE_OMP_SECTIONS
:
10597 *info
= *info
== 0 ? 1 : -1;
10606 struct omp_taskcopy_context
10608 /* This field must be at the beginning, as we do "inheritance": Some
10609 callback functions for tree-inline.c (e.g., omp_copy_decl)
10610 receive a copy_body_data pointer that is up-casted to an
10611 omp_context pointer. */
10617 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
10619 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
10621 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
10622 return create_tmp_var (TREE_TYPE (var
));
10628 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
10630 tree name
, new_fields
= NULL
, type
, f
;
10632 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
10633 name
= DECL_NAME (TYPE_NAME (orig_type
));
10634 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
10635 TYPE_DECL
, name
, type
);
10636 TYPE_NAME (type
) = name
;
10638 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
10640 tree new_f
= copy_node (f
);
10641 DECL_CONTEXT (new_f
) = type
;
10642 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
10643 TREE_CHAIN (new_f
) = new_fields
;
10644 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10645 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10646 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
10648 new_fields
= new_f
;
10649 tcctx
->cb
.decl_map
->put (f
, new_f
);
10651 TYPE_FIELDS (type
) = nreverse (new_fields
);
10652 layout_type (type
);
10656 /* Create task copyfn. */
10659 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
10661 struct function
*child_cfun
;
10662 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
10663 tree record_type
, srecord_type
, bind
, list
;
10664 bool record_needs_remap
= false, srecord_needs_remap
= false;
10666 struct omp_taskcopy_context tcctx
;
10667 location_t loc
= gimple_location (task_stmt
);
10668 size_t looptempno
= 0;
10670 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
10671 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
10672 gcc_assert (child_cfun
->cfg
== NULL
);
10673 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
10675 /* Reset DECL_CONTEXT on function arguments. */
10676 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
10677 DECL_CONTEXT (t
) = child_fn
;
10679 /* Populate the function. */
10680 push_gimplify_context ();
10681 push_cfun (child_cfun
);
10683 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
10684 TREE_SIDE_EFFECTS (bind
) = 1;
10686 DECL_SAVED_TREE (child_fn
) = bind
;
10687 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
10689 /* Remap src and dst argument types if needed. */
10690 record_type
= ctx
->record_type
;
10691 srecord_type
= ctx
->srecord_type
;
10692 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
10693 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10695 record_needs_remap
= true;
10698 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
10699 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10701 srecord_needs_remap
= true;
10705 if (record_needs_remap
|| srecord_needs_remap
)
10707 memset (&tcctx
, '\0', sizeof (tcctx
));
10708 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
10709 tcctx
.cb
.dst_fn
= child_fn
;
10710 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
10711 gcc_checking_assert (tcctx
.cb
.src_node
);
10712 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
10713 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
10714 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
10715 tcctx
.cb
.eh_lp_nr
= 0;
10716 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
10717 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
10720 if (record_needs_remap
)
10721 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
10722 if (srecord_needs_remap
)
10723 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
10726 tcctx
.cb
.decl_map
= NULL
;
10728 arg
= DECL_ARGUMENTS (child_fn
);
10729 TREE_TYPE (arg
) = build_pointer_type (record_type
);
10730 sarg
= DECL_CHAIN (arg
);
10731 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
10733 /* First pass: initialize temporaries used in record_type and srecord_type
10734 sizes and field offsets. */
10735 if (tcctx
.cb
.decl_map
)
10736 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10737 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10741 decl
= OMP_CLAUSE_DECL (c
);
10742 p
= tcctx
.cb
.decl_map
->get (decl
);
10745 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10746 sf
= (tree
) n
->value
;
10747 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10748 src
= build_simple_mem_ref_loc (loc
, sarg
);
10749 src
= omp_build_component_ref (src
, sf
);
10750 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
10751 append_to_statement_list (t
, &list
);
10754 /* Second pass: copy shared var pointers and copy construct non-VLA
10755 firstprivate vars. */
10756 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10757 switch (OMP_CLAUSE_CODE (c
))
10759 splay_tree_key key
;
10760 case OMP_CLAUSE_SHARED
:
10761 decl
= OMP_CLAUSE_DECL (c
);
10762 key
= (splay_tree_key
) decl
;
10763 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
10764 key
= (splay_tree_key
) &DECL_UID (decl
);
10765 n
= splay_tree_lookup (ctx
->field_map
, key
);
10768 f
= (tree
) n
->value
;
10769 if (tcctx
.cb
.decl_map
)
10770 f
= *tcctx
.cb
.decl_map
->get (f
);
10771 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10772 sf
= (tree
) n
->value
;
10773 if (tcctx
.cb
.decl_map
)
10774 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10775 src
= build_simple_mem_ref_loc (loc
, sarg
);
10776 src
= omp_build_component_ref (src
, sf
);
10777 dst
= build_simple_mem_ref_loc (loc
, arg
);
10778 dst
= omp_build_component_ref (dst
, f
);
10779 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10780 append_to_statement_list (t
, &list
);
10782 case OMP_CLAUSE_REDUCTION
:
10783 case OMP_CLAUSE_IN_REDUCTION
:
10784 decl
= OMP_CLAUSE_DECL (c
);
10785 if (TREE_CODE (decl
) == MEM_REF
)
10787 decl
= TREE_OPERAND (decl
, 0);
10788 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
10789 decl
= TREE_OPERAND (decl
, 0);
10790 if (TREE_CODE (decl
) == INDIRECT_REF
10791 || TREE_CODE (decl
) == ADDR_EXPR
)
10792 decl
= TREE_OPERAND (decl
, 0);
10794 key
= (splay_tree_key
) decl
;
10795 n
= splay_tree_lookup (ctx
->field_map
, key
);
10798 f
= (tree
) n
->value
;
10799 if (tcctx
.cb
.decl_map
)
10800 f
= *tcctx
.cb
.decl_map
->get (f
);
10801 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10802 sf
= (tree
) n
->value
;
10803 if (tcctx
.cb
.decl_map
)
10804 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10805 src
= build_simple_mem_ref_loc (loc
, sarg
);
10806 src
= omp_build_component_ref (src
, sf
);
10807 if (decl
!= OMP_CLAUSE_DECL (c
)
10808 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10809 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
10810 src
= build_simple_mem_ref_loc (loc
, src
);
10811 dst
= build_simple_mem_ref_loc (loc
, arg
);
10812 dst
= omp_build_component_ref (dst
, f
);
10813 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10814 append_to_statement_list (t
, &list
);
10816 case OMP_CLAUSE__LOOPTEMP_
:
10817 /* Fields for first two _looptemp_ clauses are initialized by
10818 GOMP_taskloop*, the rest are handled like firstprivate. */
10819 if (looptempno
< 2)
10825 case OMP_CLAUSE__REDUCTEMP_
:
10826 case OMP_CLAUSE_FIRSTPRIVATE
:
10827 decl
= OMP_CLAUSE_DECL (c
);
10828 if (is_variable_sized (decl
))
10830 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10833 f
= (tree
) n
->value
;
10834 if (tcctx
.cb
.decl_map
)
10835 f
= *tcctx
.cb
.decl_map
->get (f
);
10836 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10839 sf
= (tree
) n
->value
;
10840 if (tcctx
.cb
.decl_map
)
10841 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10842 src
= build_simple_mem_ref_loc (loc
, sarg
);
10843 src
= omp_build_component_ref (src
, sf
);
10844 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
10845 src
= build_simple_mem_ref_loc (loc
, src
);
10849 dst
= build_simple_mem_ref_loc (loc
, arg
);
10850 dst
= omp_build_component_ref (dst
, f
);
10851 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
10852 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10854 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
10855 append_to_statement_list (t
, &list
);
10857 case OMP_CLAUSE_PRIVATE
:
10858 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
10860 decl
= OMP_CLAUSE_DECL (c
);
10861 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10862 f
= (tree
) n
->value
;
10863 if (tcctx
.cb
.decl_map
)
10864 f
= *tcctx
.cb
.decl_map
->get (f
);
10865 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10868 sf
= (tree
) n
->value
;
10869 if (tcctx
.cb
.decl_map
)
10870 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10871 src
= build_simple_mem_ref_loc (loc
, sarg
);
10872 src
= omp_build_component_ref (src
, sf
);
10873 if (use_pointer_for_field (decl
, NULL
))
10874 src
= build_simple_mem_ref_loc (loc
, src
);
10878 dst
= build_simple_mem_ref_loc (loc
, arg
);
10879 dst
= omp_build_component_ref (dst
, f
);
10880 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10881 append_to_statement_list (t
, &list
);
10887 /* Last pass: handle VLA firstprivates. */
10888 if (tcctx
.cb
.decl_map
)
10889 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10890 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10894 decl
= OMP_CLAUSE_DECL (c
);
10895 if (!is_variable_sized (decl
))
10897 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10900 f
= (tree
) n
->value
;
10901 f
= *tcctx
.cb
.decl_map
->get (f
);
10902 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
10903 ind
= DECL_VALUE_EXPR (decl
);
10904 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
10905 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
10906 n
= splay_tree_lookup (ctx
->sfield_map
,
10907 (splay_tree_key
) TREE_OPERAND (ind
, 0));
10908 sf
= (tree
) n
->value
;
10909 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10910 src
= build_simple_mem_ref_loc (loc
, sarg
);
10911 src
= omp_build_component_ref (src
, sf
);
10912 src
= build_simple_mem_ref_loc (loc
, src
);
10913 dst
= build_simple_mem_ref_loc (loc
, arg
);
10914 dst
= omp_build_component_ref (dst
, f
);
10915 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
10916 append_to_statement_list (t
, &list
);
10917 n
= splay_tree_lookup (ctx
->field_map
,
10918 (splay_tree_key
) TREE_OPERAND (ind
, 0));
10919 df
= (tree
) n
->value
;
10920 df
= *tcctx
.cb
.decl_map
->get (df
);
10921 ptr
= build_simple_mem_ref_loc (loc
, arg
);
10922 ptr
= omp_build_component_ref (ptr
, df
);
10923 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
10924 build_fold_addr_expr_loc (loc
, dst
));
10925 append_to_statement_list (t
, &list
);
10928 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
10929 append_to_statement_list (t
, &list
);
10931 if (tcctx
.cb
.decl_map
)
10932 delete tcctx
.cb
.decl_map
;
10933 pop_gimplify_context (NULL
);
10934 BIND_EXPR_BODY (bind
) = list
;
10939 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
10943 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
10945 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
10946 gcc_assert (clauses
);
10947 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10948 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
10949 switch (OMP_CLAUSE_DEPEND_KIND (c
))
10951 case OMP_CLAUSE_DEPEND_LAST
:
10952 /* Lowering already done at gimplification. */
10954 case OMP_CLAUSE_DEPEND_IN
:
10957 case OMP_CLAUSE_DEPEND_OUT
:
10958 case OMP_CLAUSE_DEPEND_INOUT
:
10961 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
10964 case OMP_CLAUSE_DEPEND_DEPOBJ
:
10967 case OMP_CLAUSE_DEPEND_SOURCE
:
10968 case OMP_CLAUSE_DEPEND_SINK
:
10971 gcc_unreachable ();
10973 if (cnt
[1] || cnt
[3])
10975 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
10976 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
10977 tree array
= create_tmp_var (type
);
10978 TREE_ADDRESSABLE (array
) = 1;
10979 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
10983 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
10984 gimple_seq_add_stmt (iseq
, g
);
10985 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
10988 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
10989 gimple_seq_add_stmt (iseq
, g
);
10990 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
10992 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
10993 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
10994 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
10995 gimple_seq_add_stmt (iseq
, g
);
10997 for (i
= 0; i
< 4; i
++)
11001 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11002 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
11006 switch (OMP_CLAUSE_DEPEND_KIND (c
))
11008 case OMP_CLAUSE_DEPEND_IN
:
11012 case OMP_CLAUSE_DEPEND_OUT
:
11013 case OMP_CLAUSE_DEPEND_INOUT
:
11017 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
11021 case OMP_CLAUSE_DEPEND_DEPOBJ
:
11026 gcc_unreachable ();
11028 tree t
= OMP_CLAUSE_DECL (c
);
11029 t
= fold_convert (ptr_type_node
, t
);
11030 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
11031 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
11032 NULL_TREE
, NULL_TREE
);
11033 g
= gimple_build_assign (r
, t
);
11034 gimple_seq_add_stmt (iseq
, g
);
11037 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
11038 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
11039 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
11040 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
11042 tree clobber
= build_clobber (type
);
11043 g
= gimple_build_assign (array
, clobber
);
11044 gimple_seq_add_stmt (oseq
, g
);
11047 /* Lower the OpenMP parallel or task directive in the current statement
11048 in GSI_P. CTX holds context information for the directive. */
11051 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11055 gimple
*stmt
= gsi_stmt (*gsi_p
);
11056 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
11057 gimple_seq par_body
;
11058 location_t loc
= gimple_location (stmt
);
11060 clauses
= gimple_omp_taskreg_clauses (stmt
);
11061 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11062 && gimple_omp_task_taskwait_p (stmt
))
11070 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
11071 par_body
= gimple_bind_body (par_bind
);
11073 child_fn
= ctx
->cb
.dst_fn
;
11074 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
11075 && !gimple_omp_parallel_combined_p (stmt
))
11077 struct walk_stmt_info wi
;
11080 memset (&wi
, 0, sizeof (wi
));
11082 wi
.val_only
= true;
11083 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
11085 gimple_omp_parallel_set_combined_p (stmt
, true);
11087 gimple_seq dep_ilist
= NULL
;
11088 gimple_seq dep_olist
= NULL
;
11089 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11090 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11092 push_gimplify_context ();
11093 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11094 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
11095 &dep_ilist
, &dep_olist
);
11098 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11099 && gimple_omp_task_taskwait_p (stmt
))
11103 gsi_replace (gsi_p
, dep_bind
, true);
11104 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11105 gimple_bind_add_stmt (dep_bind
, stmt
);
11106 gimple_bind_add_seq (dep_bind
, dep_olist
);
11107 pop_gimplify_context (dep_bind
);
11112 if (ctx
->srecord_type
)
11113 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
11115 gimple_seq tskred_ilist
= NULL
;
11116 gimple_seq tskred_olist
= NULL
;
11117 if ((is_task_ctx (ctx
)
11118 && gimple_omp_task_taskloop_p (ctx
->stmt
)
11119 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
11120 OMP_CLAUSE_REDUCTION
))
11121 || (is_parallel_ctx (ctx
)
11122 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
11123 OMP_CLAUSE__REDUCTEMP_
)))
11125 if (dep_bind
== NULL
)
11127 push_gimplify_context ();
11128 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11130 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
11132 gimple_omp_taskreg_clauses (ctx
->stmt
),
11133 &tskred_ilist
, &tskred_olist
);
11136 push_gimplify_context ();
11138 gimple_seq par_olist
= NULL
;
11139 gimple_seq par_ilist
= NULL
;
11140 gimple_seq par_rlist
= NULL
;
11141 bool phony_construct
= gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
11142 && gimple_omp_parallel_grid_phony (as_a
<gomp_parallel
*> (stmt
));
11143 if (phony_construct
&& ctx
->record_type
)
11145 gcc_checking_assert (!ctx
->receiver_decl
);
11146 ctx
->receiver_decl
= create_tmp_var
11147 (build_reference_type (ctx
->record_type
), ".omp_rec");
11149 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
11150 lower_omp (&par_body
, ctx
);
11151 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
)
11152 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
11154 /* Declare all the variables created by mapping and the variables
11155 declared in the scope of the parallel body. */
11156 record_vars_into (ctx
->block_vars
, child_fn
);
11157 maybe_remove_omp_member_access_dummy_vars (par_bind
);
11158 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
11160 if (ctx
->record_type
)
11163 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
11164 : ctx
->record_type
, ".omp_data_o");
11165 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11166 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11167 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
11170 gimple_seq olist
= NULL
;
11171 gimple_seq ilist
= NULL
;
11172 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
11173 lower_send_shared_vars (&ilist
, &olist
, ctx
);
11175 if (ctx
->record_type
)
11177 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
11178 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
11182 /* Once all the expansions are done, sequence all the different
11183 fragments inside gimple_omp_body. */
11185 gimple_seq new_body
= NULL
;
11187 if (ctx
->record_type
)
11189 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
11190 /* fixup_child_record_type might have changed receiver_decl's type. */
11191 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
11192 gimple_seq_add_stmt (&new_body
,
11193 gimple_build_assign (ctx
->receiver_decl
, t
));
11196 gimple_seq_add_seq (&new_body
, par_ilist
);
11197 gimple_seq_add_seq (&new_body
, par_body
);
11198 gimple_seq_add_seq (&new_body
, par_rlist
);
11199 if (ctx
->cancellable
)
11200 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
11201 gimple_seq_add_seq (&new_body
, par_olist
);
11202 new_body
= maybe_catch_exception (new_body
);
11203 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
11204 gimple_seq_add_stmt (&new_body
,
11205 gimple_build_omp_continue (integer_zero_node
,
11206 integer_zero_node
));
11207 if (!phony_construct
)
11209 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
11210 gimple_omp_set_body (stmt
, new_body
);
11213 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
11214 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11216 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
11217 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
11218 gimple_bind_add_seq (bind
, ilist
);
11219 if (!phony_construct
)
11220 gimple_bind_add_stmt (bind
, stmt
);
11222 gimple_bind_add_seq (bind
, new_body
);
11223 gimple_bind_add_seq (bind
, olist
);
11225 pop_gimplify_context (NULL
);
11229 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11230 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
11231 gimple_bind_add_stmt (dep_bind
, bind
);
11232 gimple_bind_add_seq (dep_bind
, tskred_olist
);
11233 gimple_bind_add_seq (dep_bind
, dep_olist
);
11234 pop_gimplify_context (dep_bind
);
11238 /* Lower the GIMPLE_OMP_TARGET in the current statement
11239 in GSI_P. CTX holds context information for the directive. */
11242 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11245 tree child_fn
, t
, c
;
11246 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
11247 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
11248 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
11249 location_t loc
= gimple_location (stmt
);
11250 bool offloaded
, data_region
;
11251 unsigned int map_cnt
= 0;
11253 offloaded
= is_gimple_omp_offloaded (stmt
);
11254 switch (gimple_omp_target_kind (stmt
))
11256 case GF_OMP_TARGET_KIND_REGION
:
11257 case GF_OMP_TARGET_KIND_UPDATE
:
11258 case GF_OMP_TARGET_KIND_ENTER_DATA
:
11259 case GF_OMP_TARGET_KIND_EXIT_DATA
:
11260 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
11261 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
11262 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
11263 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
11264 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
11265 data_region
= false;
11267 case GF_OMP_TARGET_KIND_DATA
:
11268 case GF_OMP_TARGET_KIND_OACC_DATA
:
11269 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
11270 data_region
= true;
11273 gcc_unreachable ();
11276 clauses
= gimple_omp_target_clauses (stmt
);
11278 gimple_seq dep_ilist
= NULL
;
11279 gimple_seq dep_olist
= NULL
;
11280 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11282 push_gimplify_context ();
11283 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11284 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
11285 &dep_ilist
, &dep_olist
);
11292 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
11293 tgt_body
= gimple_bind_body (tgt_bind
);
11295 else if (data_region
)
11296 tgt_body
= gimple_omp_body (stmt
);
11297 child_fn
= ctx
->cb
.dst_fn
;
11299 push_gimplify_context ();
11302 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11303 switch (OMP_CLAUSE_CODE (c
))
11309 case OMP_CLAUSE_MAP
:
11311 /* First check what we're prepared to handle in the following. */
11312 switch (OMP_CLAUSE_MAP_KIND (c
))
11314 case GOMP_MAP_ALLOC
:
11316 case GOMP_MAP_FROM
:
11317 case GOMP_MAP_TOFROM
:
11318 case GOMP_MAP_POINTER
:
11319 case GOMP_MAP_TO_PSET
:
11320 case GOMP_MAP_DELETE
:
11321 case GOMP_MAP_RELEASE
:
11322 case GOMP_MAP_ALWAYS_TO
:
11323 case GOMP_MAP_ALWAYS_FROM
:
11324 case GOMP_MAP_ALWAYS_TOFROM
:
11325 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
11326 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
11327 case GOMP_MAP_STRUCT
:
11328 case GOMP_MAP_ALWAYS_POINTER
:
11330 case GOMP_MAP_FORCE_ALLOC
:
11331 case GOMP_MAP_FORCE_TO
:
11332 case GOMP_MAP_FORCE_FROM
:
11333 case GOMP_MAP_FORCE_TOFROM
:
11334 case GOMP_MAP_FORCE_PRESENT
:
11335 case GOMP_MAP_FORCE_DEVICEPTR
:
11336 case GOMP_MAP_DEVICE_RESIDENT
:
11337 case GOMP_MAP_LINK
:
11338 gcc_assert (is_gimple_omp_oacc (stmt
));
11341 gcc_unreachable ();
11345 case OMP_CLAUSE_TO
:
11346 case OMP_CLAUSE_FROM
:
11348 var
= OMP_CLAUSE_DECL (c
);
11351 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
11352 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11353 && (OMP_CLAUSE_MAP_KIND (c
)
11354 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
11359 if (DECL_SIZE (var
)
11360 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
11362 tree var2
= DECL_VALUE_EXPR (var
);
11363 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
11364 var2
= TREE_OPERAND (var2
, 0);
11365 gcc_assert (DECL_P (var2
));
11370 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11371 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11372 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11374 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11376 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
11377 && varpool_node::get_create (var
)->offloadable
)
11380 tree type
= build_pointer_type (TREE_TYPE (var
));
11381 tree new_var
= lookup_decl (var
, ctx
);
11382 x
= create_tmp_var_raw (type
, get_name (new_var
));
11383 gimple_add_tmp_var (x
);
11384 x
= build_simple_mem_ref (x
);
11385 SET_DECL_VALUE_EXPR (new_var
, x
);
11386 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11391 if (!maybe_lookup_field (var
, ctx
))
11394 /* Don't remap oacc parallel reduction variables, because the
11395 intermediate result must be local to each gang. */
11396 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11397 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
11399 x
= build_receiver_ref (var
, true, ctx
);
11400 tree new_var
= lookup_decl (var
, ctx
);
11402 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11403 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11404 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11405 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11406 x
= build_simple_mem_ref (x
);
11407 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11409 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11410 if (omp_is_reference (new_var
)
11411 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
11412 || DECL_BY_REFERENCE (var
)))
11414 /* Create a local object to hold the instance
11416 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
11417 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
11418 tree inst
= create_tmp_var (type
, id
);
11419 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
11420 x
= build_fold_addr_expr (inst
);
11422 gimplify_assign (new_var
, x
, &fplist
);
11424 else if (DECL_P (new_var
))
11426 SET_DECL_VALUE_EXPR (new_var
, x
);
11427 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11430 gcc_unreachable ();
11435 case OMP_CLAUSE_FIRSTPRIVATE
:
11436 if (is_oacc_parallel (ctx
))
11437 goto oacc_firstprivate
;
11439 var
= OMP_CLAUSE_DECL (c
);
11440 if (!omp_is_reference (var
)
11441 && !is_gimple_reg_type (TREE_TYPE (var
)))
11443 tree new_var
= lookup_decl (var
, ctx
);
11444 if (is_variable_sized (var
))
11446 tree pvar
= DECL_VALUE_EXPR (var
);
11447 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11448 pvar
= TREE_OPERAND (pvar
, 0);
11449 gcc_assert (DECL_P (pvar
));
11450 tree new_pvar
= lookup_decl (pvar
, ctx
);
11451 x
= build_fold_indirect_ref (new_pvar
);
11452 TREE_THIS_NOTRAP (x
) = 1;
11455 x
= build_receiver_ref (var
, true, ctx
);
11456 SET_DECL_VALUE_EXPR (new_var
, x
);
11457 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11461 case OMP_CLAUSE_PRIVATE
:
11462 if (is_gimple_omp_oacc (ctx
->stmt
))
11464 var
= OMP_CLAUSE_DECL (c
);
11465 if (is_variable_sized (var
))
11467 tree new_var
= lookup_decl (var
, ctx
);
11468 tree pvar
= DECL_VALUE_EXPR (var
);
11469 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11470 pvar
= TREE_OPERAND (pvar
, 0);
11471 gcc_assert (DECL_P (pvar
));
11472 tree new_pvar
= lookup_decl (pvar
, ctx
);
11473 x
= build_fold_indirect_ref (new_pvar
);
11474 TREE_THIS_NOTRAP (x
) = 1;
11475 SET_DECL_VALUE_EXPR (new_var
, x
);
11476 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11480 case OMP_CLAUSE_USE_DEVICE_PTR
:
11481 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11482 case OMP_CLAUSE_IS_DEVICE_PTR
:
11483 var
= OMP_CLAUSE_DECL (c
);
11485 if (is_variable_sized (var
))
11487 tree new_var
= lookup_decl (var
, ctx
);
11488 tree pvar
= DECL_VALUE_EXPR (var
);
11489 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11490 pvar
= TREE_OPERAND (pvar
, 0);
11491 gcc_assert (DECL_P (pvar
));
11492 tree new_pvar
= lookup_decl (pvar
, ctx
);
11493 x
= build_fold_indirect_ref (new_pvar
);
11494 TREE_THIS_NOTRAP (x
) = 1;
11495 SET_DECL_VALUE_EXPR (new_var
, x
);
11496 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11498 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
11499 && !omp_is_reference (var
)
11500 && !omp_is_allocatable_or_ptr (var
)
11501 && !lang_hooks
.decls
.omp_array_data (var
, true))
11502 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11504 tree new_var
= lookup_decl (var
, ctx
);
11505 tree type
= build_pointer_type (TREE_TYPE (var
));
11506 x
= create_tmp_var_raw (type
, get_name (new_var
));
11507 gimple_add_tmp_var (x
);
11508 x
= build_simple_mem_ref (x
);
11509 SET_DECL_VALUE_EXPR (new_var
, x
);
11510 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11514 tree new_var
= lookup_decl (var
, ctx
);
11515 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
11516 gimple_add_tmp_var (x
);
11517 SET_DECL_VALUE_EXPR (new_var
, x
);
11518 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11525 target_nesting_level
++;
11526 lower_omp (&tgt_body
, ctx
);
11527 target_nesting_level
--;
11529 else if (data_region
)
11530 lower_omp (&tgt_body
, ctx
);
11534 /* Declare all the variables created by mapping and the variables
11535 declared in the scope of the target body. */
11536 record_vars_into (ctx
->block_vars
, child_fn
);
11537 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
11538 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
11543 if (ctx
->record_type
)
11546 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
11547 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11548 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11549 t
= make_tree_vec (3);
11550 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
11551 TREE_VEC_ELT (t
, 1)
11552 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
11553 ".omp_data_sizes");
11554 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
11555 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
11556 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
11557 tree tkind_type
= short_unsigned_type_node
;
11558 int talign_shift
= 8;
11559 TREE_VEC_ELT (t
, 2)
11560 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
11561 ".omp_data_kinds");
11562 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
11563 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
11564 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
11565 gimple_omp_target_set_data_arg (stmt
, t
);
11567 vec
<constructor_elt
, va_gc
> *vsize
;
11568 vec
<constructor_elt
, va_gc
> *vkind
;
11569 vec_alloc (vsize
, map_cnt
);
11570 vec_alloc (vkind
, map_cnt
);
11571 unsigned int map_idx
= 0;
11573 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11574 switch (OMP_CLAUSE_CODE (c
))
11576 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
11577 unsigned int talign
;
11582 case OMP_CLAUSE_MAP
:
11583 case OMP_CLAUSE_TO
:
11584 case OMP_CLAUSE_FROM
:
11585 oacc_firstprivate_map
:
11587 ovar
= OMP_CLAUSE_DECL (c
);
11588 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11589 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11590 || (OMP_CLAUSE_MAP_KIND (c
)
11591 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
11593 if (!DECL_P (ovar
))
11595 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11596 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
11598 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
11599 == get_base_address (ovar
));
11600 nc
= OMP_CLAUSE_CHAIN (c
);
11601 ovar
= OMP_CLAUSE_DECL (nc
);
11605 tree x
= build_sender_ref (ovar
, ctx
);
11607 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
11608 gimplify_assign (x
, v
, &ilist
);
11614 if (DECL_SIZE (ovar
)
11615 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
11617 tree ovar2
= DECL_VALUE_EXPR (ovar
);
11618 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
11619 ovar2
= TREE_OPERAND (ovar2
, 0);
11620 gcc_assert (DECL_P (ovar2
));
11623 if (!maybe_lookup_field (ovar
, ctx
))
11627 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
11628 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
11629 talign
= DECL_ALIGN_UNIT (ovar
);
11632 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11633 x
= build_sender_ref (ovar
, ctx
);
11635 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11636 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11637 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11638 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
11640 gcc_assert (offloaded
);
11642 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
11643 mark_addressable (avar
);
11644 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
11645 talign
= DECL_ALIGN_UNIT (avar
);
11646 avar
= build_fold_addr_expr (avar
);
11647 gimplify_assign (x
, avar
, &ilist
);
11649 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11651 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11652 if (!omp_is_reference (var
))
11654 if (is_gimple_reg (var
)
11655 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11656 TREE_NO_WARNING (var
) = 1;
11657 var
= build_fold_addr_expr (var
);
11660 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11661 gimplify_assign (x
, var
, &ilist
);
11663 else if (is_gimple_reg (var
))
11665 gcc_assert (offloaded
);
11666 tree avar
= create_tmp_var (TREE_TYPE (var
));
11667 mark_addressable (avar
);
11668 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
11669 if (GOMP_MAP_COPY_TO_P (map_kind
)
11670 || map_kind
== GOMP_MAP_POINTER
11671 || map_kind
== GOMP_MAP_TO_PSET
11672 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11674 /* If we need to initialize a temporary
11675 with VAR because it is not addressable, and
11676 the variable hasn't been initialized yet, then
11677 we'll get a warning for the store to avar.
11678 Don't warn in that case, the mapping might
11680 TREE_NO_WARNING (var
) = 1;
11681 gimplify_assign (avar
, var
, &ilist
);
11683 avar
= build_fold_addr_expr (avar
);
11684 gimplify_assign (x
, avar
, &ilist
);
11685 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
11686 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11687 && !TYPE_READONLY (TREE_TYPE (var
)))
11689 x
= unshare_expr (x
);
11690 x
= build_simple_mem_ref (x
);
11691 gimplify_assign (var
, x
, &olist
);
11696 /* While MAP is handled explicitly by the FE,
11697 for 'target update', only the identified is passed. */
11698 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
11699 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
11700 && (omp_is_allocatable_or_ptr (var
)
11701 && omp_is_optional_argument (var
)))
11702 var
= build_fold_indirect_ref (var
);
11703 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
11704 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
11705 || (!omp_is_allocatable_or_ptr (var
)
11706 && !omp_is_optional_argument (var
)))
11707 var
= build_fold_addr_expr (var
);
11708 gimplify_assign (x
, var
, &ilist
);
11712 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11714 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11715 s
= TREE_TYPE (ovar
);
11716 if (TREE_CODE (s
) == REFERENCE_TYPE
)
11718 s
= TYPE_SIZE_UNIT (s
);
11721 s
= OMP_CLAUSE_SIZE (c
);
11722 if (s
== NULL_TREE
)
11723 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11724 s
= fold_convert (size_type_node
, s
);
11725 purpose
= size_int (map_idx
++);
11726 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11727 if (TREE_CODE (s
) != INTEGER_CST
)
11728 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11730 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
11731 switch (OMP_CLAUSE_CODE (c
))
11733 case OMP_CLAUSE_MAP
:
11734 tkind
= OMP_CLAUSE_MAP_KIND (c
);
11735 tkind_zero
= tkind
;
11736 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
11739 case GOMP_MAP_ALLOC
:
11741 case GOMP_MAP_FROM
:
11742 case GOMP_MAP_TOFROM
:
11743 case GOMP_MAP_ALWAYS_TO
:
11744 case GOMP_MAP_ALWAYS_FROM
:
11745 case GOMP_MAP_ALWAYS_TOFROM
:
11746 case GOMP_MAP_RELEASE
:
11747 case GOMP_MAP_FORCE_TO
:
11748 case GOMP_MAP_FORCE_FROM
:
11749 case GOMP_MAP_FORCE_TOFROM
:
11750 case GOMP_MAP_FORCE_PRESENT
:
11751 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
11753 case GOMP_MAP_DELETE
:
11754 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
11758 if (tkind_zero
!= tkind
)
11760 if (integer_zerop (s
))
11761 tkind
= tkind_zero
;
11762 else if (integer_nonzerop (s
))
11763 tkind_zero
= tkind
;
11766 case OMP_CLAUSE_FIRSTPRIVATE
:
11767 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11768 tkind
= GOMP_MAP_TO
;
11769 tkind_zero
= tkind
;
11771 case OMP_CLAUSE_TO
:
11772 tkind
= GOMP_MAP_TO
;
11773 tkind_zero
= tkind
;
11775 case OMP_CLAUSE_FROM
:
11776 tkind
= GOMP_MAP_FROM
;
11777 tkind_zero
= tkind
;
11780 gcc_unreachable ();
11782 gcc_checking_assert (tkind
11783 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11784 gcc_checking_assert (tkind_zero
11785 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11786 talign
= ceil_log2 (talign
);
11787 tkind
|= talign
<< talign_shift
;
11788 tkind_zero
|= talign
<< talign_shift
;
11789 gcc_checking_assert (tkind
11790 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11791 gcc_checking_assert (tkind_zero
11792 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11793 if (tkind
== tkind_zero
)
11794 x
= build_int_cstu (tkind_type
, tkind
);
11797 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
11798 x
= build3 (COND_EXPR
, tkind_type
,
11799 fold_build2 (EQ_EXPR
, boolean_type_node
,
11800 unshare_expr (s
), size_zero_node
),
11801 build_int_cstu (tkind_type
, tkind_zero
),
11802 build_int_cstu (tkind_type
, tkind
));
11804 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
11809 case OMP_CLAUSE_FIRSTPRIVATE
:
11810 if (is_oacc_parallel (ctx
))
11811 goto oacc_firstprivate_map
;
11812 ovar
= OMP_CLAUSE_DECL (c
);
11813 if (omp_is_reference (ovar
))
11814 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11816 talign
= DECL_ALIGN_UNIT (ovar
);
11817 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11818 x
= build_sender_ref (ovar
, ctx
);
11819 tkind
= GOMP_MAP_FIRSTPRIVATE
;
11820 type
= TREE_TYPE (ovar
);
11821 if (omp_is_reference (ovar
))
11822 type
= TREE_TYPE (type
);
11823 if ((INTEGRAL_TYPE_P (type
)
11824 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
11825 || TREE_CODE (type
) == POINTER_TYPE
)
11827 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
11829 if (omp_is_reference (var
))
11830 t
= build_simple_mem_ref (var
);
11831 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11832 TREE_NO_WARNING (var
) = 1;
11833 if (TREE_CODE (type
) != POINTER_TYPE
)
11834 t
= fold_convert (pointer_sized_int_node
, t
);
11835 t
= fold_convert (TREE_TYPE (x
), t
);
11836 gimplify_assign (x
, t
, &ilist
);
11838 else if (omp_is_reference (var
))
11839 gimplify_assign (x
, var
, &ilist
);
11840 else if (is_gimple_reg (var
))
11842 tree avar
= create_tmp_var (TREE_TYPE (var
));
11843 mark_addressable (avar
);
11844 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11845 TREE_NO_WARNING (var
) = 1;
11846 gimplify_assign (avar
, var
, &ilist
);
11847 avar
= build_fold_addr_expr (avar
);
11848 gimplify_assign (x
, avar
, &ilist
);
11852 var
= build_fold_addr_expr (var
);
11853 gimplify_assign (x
, var
, &ilist
);
11855 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
11857 else if (omp_is_reference (ovar
))
11858 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11860 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11861 s
= fold_convert (size_type_node
, s
);
11862 purpose
= size_int (map_idx
++);
11863 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11864 if (TREE_CODE (s
) != INTEGER_CST
)
11865 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11867 gcc_checking_assert (tkind
11868 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11869 talign
= ceil_log2 (talign
);
11870 tkind
|= talign
<< talign_shift
;
11871 gcc_checking_assert (tkind
11872 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11873 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
11874 build_int_cstu (tkind_type
, tkind
));
11877 case OMP_CLAUSE_USE_DEVICE_PTR
:
11878 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11879 case OMP_CLAUSE_IS_DEVICE_PTR
:
11880 ovar
= OMP_CLAUSE_DECL (c
);
11881 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11883 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
11885 tkind
= (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
11886 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
11887 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
11889 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
11891 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
11892 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
11896 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
11897 x
= build_sender_ref (ovar
, ctx
);
11899 type
= TREE_TYPE (ovar
);
11900 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
11901 var
= lang_hooks
.decls
.omp_array_data (ovar
, false);
11902 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
11903 && !omp_is_reference (ovar
)
11904 && !omp_is_allocatable_or_ptr (ovar
))
11905 || TREE_CODE (type
) == ARRAY_TYPE
)
11906 var
= build_fold_addr_expr (var
);
11909 if (omp_is_reference (ovar
)
11910 || omp_is_optional_argument (ovar
)
11911 || omp_is_allocatable_or_ptr (ovar
))
11913 type
= TREE_TYPE (type
);
11914 if (TREE_CODE (type
) != ARRAY_TYPE
11915 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
11916 && !omp_is_allocatable_or_ptr (ovar
))
11917 || (omp_is_reference (ovar
)
11918 && omp_is_allocatable_or_ptr (ovar
))))
11919 var
= build_simple_mem_ref (var
);
11920 var
= fold_convert (TREE_TYPE (x
), var
);
11923 gimplify_assign (x
, var
, &ilist
);
11925 purpose
= size_int (map_idx
++);
11926 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11927 gcc_checking_assert (tkind
11928 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11929 gcc_checking_assert (tkind
11930 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11931 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
11932 build_int_cstu (tkind_type
, tkind
));
11936 gcc_assert (map_idx
== map_cnt
);
11938 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
11939 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
11940 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
11941 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
11942 for (int i
= 1; i
<= 2; i
++)
11943 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
11945 gimple_seq initlist
= NULL
;
11946 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
11947 TREE_VEC_ELT (t
, i
)),
11948 &initlist
, true, NULL_TREE
);
11949 gimple_seq_add_seq (&ilist
, initlist
);
11951 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
11952 gimple_seq_add_stmt (&olist
,
11953 gimple_build_assign (TREE_VEC_ELT (t
, i
),
11957 tree clobber
= build_clobber (ctx
->record_type
);
11958 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
11962 /* Once all the expansions are done, sequence all the different
11963 fragments inside gimple_omp_body. */
11968 && ctx
->record_type
)
11970 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
11971 /* fixup_child_record_type might have changed receiver_decl's type. */
11972 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
11973 gimple_seq_add_stmt (&new_body
,
11974 gimple_build_assign (ctx
->receiver_decl
, t
));
11976 gimple_seq_add_seq (&new_body
, fplist
);
11978 if (offloaded
|| data_region
)
11980 tree prev
= NULL_TREE
;
11981 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11982 switch (OMP_CLAUSE_CODE (c
))
11987 case OMP_CLAUSE_FIRSTPRIVATE
:
11988 if (is_gimple_omp_oacc (ctx
->stmt
))
11990 var
= OMP_CLAUSE_DECL (c
);
11991 if (omp_is_reference (var
)
11992 || is_gimple_reg_type (TREE_TYPE (var
)))
11994 tree new_var
= lookup_decl (var
, ctx
);
11996 type
= TREE_TYPE (var
);
11997 if (omp_is_reference (var
))
11998 type
= TREE_TYPE (type
);
11999 if ((INTEGRAL_TYPE_P (type
)
12000 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
12001 || TREE_CODE (type
) == POINTER_TYPE
)
12003 x
= build_receiver_ref (var
, false, ctx
);
12004 if (TREE_CODE (type
) != POINTER_TYPE
)
12005 x
= fold_convert (pointer_sized_int_node
, x
);
12006 x
= fold_convert (type
, x
);
12007 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12009 if (omp_is_reference (var
))
12011 tree v
= create_tmp_var_raw (type
, get_name (var
));
12012 gimple_add_tmp_var (v
);
12013 TREE_ADDRESSABLE (v
) = 1;
12014 gimple_seq_add_stmt (&new_body
,
12015 gimple_build_assign (v
, x
));
12016 x
= build_fold_addr_expr (v
);
12018 gimple_seq_add_stmt (&new_body
,
12019 gimple_build_assign (new_var
, x
));
12023 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
12024 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12026 gimple_seq_add_stmt (&new_body
,
12027 gimple_build_assign (new_var
, x
));
12030 else if (is_variable_sized (var
))
12032 tree pvar
= DECL_VALUE_EXPR (var
);
12033 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12034 pvar
= TREE_OPERAND (pvar
, 0);
12035 gcc_assert (DECL_P (pvar
));
12036 tree new_var
= lookup_decl (pvar
, ctx
);
12037 x
= build_receiver_ref (var
, false, ctx
);
12038 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12039 gimple_seq_add_stmt (&new_body
,
12040 gimple_build_assign (new_var
, x
));
12043 case OMP_CLAUSE_PRIVATE
:
12044 if (is_gimple_omp_oacc (ctx
->stmt
))
12046 var
= OMP_CLAUSE_DECL (c
);
12047 if (omp_is_reference (var
))
12049 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12050 tree new_var
= lookup_decl (var
, ctx
);
12051 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12052 if (TREE_CONSTANT (x
))
12054 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
12056 gimple_add_tmp_var (x
);
12057 TREE_ADDRESSABLE (x
) = 1;
12058 x
= build_fold_addr_expr_loc (clause_loc
, x
);
12063 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12064 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12065 gimple_seq_add_stmt (&new_body
,
12066 gimple_build_assign (new_var
, x
));
12069 case OMP_CLAUSE_USE_DEVICE_PTR
:
12070 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12071 case OMP_CLAUSE_IS_DEVICE_PTR
:
12072 var
= OMP_CLAUSE_DECL (c
);
12073 bool is_array_data
;
12074 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
12076 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
12077 x
= build_sender_ref (is_array_data
12078 ? (splay_tree_key
) &DECL_NAME (var
)
12079 : (splay_tree_key
) &DECL_UID (var
), ctx
);
12081 x
= build_receiver_ref (var
, false, ctx
);
12085 bool is_ref
= omp_is_reference (var
);
12086 /* First, we copy the descriptor data from the host; then
12087 we update its data to point to the target address. */
12088 tree new_var
= lookup_decl (var
, ctx
);
12089 new_var
= DECL_VALUE_EXPR (new_var
);
12094 var
= build_fold_indirect_ref (var
);
12095 gimplify_expr (&var
, &new_body
, NULL
, is_gimple_val
,
12097 v
= create_tmp_var_raw (TREE_TYPE (var
), get_name (var
));
12098 gimple_add_tmp_var (v
);
12099 TREE_ADDRESSABLE (v
) = 1;
12100 gimple_seq_add_stmt (&new_body
,
12101 gimple_build_assign (v
, var
));
12102 tree rhs
= build_fold_addr_expr (v
);
12103 gimple_seq_add_stmt (&new_body
,
12104 gimple_build_assign (new_var
, rhs
));
12107 gimple_seq_add_stmt (&new_body
,
12108 gimple_build_assign (new_var
, var
));
12110 tree v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
12112 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12113 gimple_seq_add_stmt (&new_body
,
12114 gimple_build_assign (v2
, x
));
12116 else if (is_variable_sized (var
))
12118 tree pvar
= DECL_VALUE_EXPR (var
);
12119 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12120 pvar
= TREE_OPERAND (pvar
, 0);
12121 gcc_assert (DECL_P (pvar
));
12122 tree new_var
= lookup_decl (pvar
, ctx
);
12123 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12124 gimple_seq_add_stmt (&new_body
,
12125 gimple_build_assign (new_var
, x
));
12127 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12128 && !omp_is_reference (var
)
12129 && !omp_is_allocatable_or_ptr (var
))
12130 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12132 tree new_var
= lookup_decl (var
, ctx
);
12133 new_var
= DECL_VALUE_EXPR (new_var
);
12134 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
12135 new_var
= TREE_OPERAND (new_var
, 0);
12136 gcc_assert (DECL_P (new_var
));
12137 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12138 gimple_seq_add_stmt (&new_body
,
12139 gimple_build_assign (new_var
, x
));
12143 tree type
= TREE_TYPE (var
);
12144 tree new_var
= lookup_decl (var
, ctx
);
12145 if (omp_is_reference (var
))
12147 type
= TREE_TYPE (type
);
12148 if (TREE_CODE (type
) != ARRAY_TYPE
12149 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
12150 || (omp_is_reference (var
)
12151 && omp_is_allocatable_or_ptr (var
))))
12153 tree v
= create_tmp_var_raw (type
, get_name (var
));
12154 gimple_add_tmp_var (v
);
12155 TREE_ADDRESSABLE (v
) = 1;
12156 x
= fold_convert (type
, x
);
12157 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12159 gimple_seq_add_stmt (&new_body
,
12160 gimple_build_assign (v
, x
));
12161 x
= build_fold_addr_expr (v
);
12164 new_var
= DECL_VALUE_EXPR (new_var
);
12165 x
= fold_convert (TREE_TYPE (new_var
), x
);
12166 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12167 gimple_seq_add_stmt (&new_body
,
12168 gimple_build_assign (new_var
, x
));
12172 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12173 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12174 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12175 or references to VLAs. */
12176 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12177 switch (OMP_CLAUSE_CODE (c
))
12182 case OMP_CLAUSE_MAP
:
12183 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12184 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12186 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12187 poly_int64 offset
= 0;
12189 var
= OMP_CLAUSE_DECL (c
);
12191 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
12192 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
12194 && varpool_node::get_create (var
)->offloadable
)
12196 if (TREE_CODE (var
) == INDIRECT_REF
12197 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
12198 var
= TREE_OPERAND (var
, 0);
12199 if (TREE_CODE (var
) == COMPONENT_REF
)
12201 var
= get_addr_base_and_unit_offset (var
, &offset
);
12202 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
12204 else if (DECL_SIZE (var
)
12205 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12207 tree var2
= DECL_VALUE_EXPR (var
);
12208 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12209 var2
= TREE_OPERAND (var2
, 0);
12210 gcc_assert (DECL_P (var2
));
12213 tree new_var
= lookup_decl (var
, ctx
), x
;
12214 tree type
= TREE_TYPE (new_var
);
12216 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
12217 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
12220 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
12222 new_var
= build2 (MEM_REF
, type
,
12223 build_fold_addr_expr (new_var
),
12224 build_int_cst (build_pointer_type (type
),
12227 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
12229 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
12230 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
12231 new_var
= build2 (MEM_REF
, type
,
12232 build_fold_addr_expr (new_var
),
12233 build_int_cst (build_pointer_type (type
),
12237 is_ref
= omp_is_reference (var
);
12238 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12240 bool ref_to_array
= false;
12243 type
= TREE_TYPE (type
);
12244 if (TREE_CODE (type
) == ARRAY_TYPE
)
12246 type
= build_pointer_type (type
);
12247 ref_to_array
= true;
12250 else if (TREE_CODE (type
) == ARRAY_TYPE
)
12252 tree decl2
= DECL_VALUE_EXPR (new_var
);
12253 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
12254 decl2
= TREE_OPERAND (decl2
, 0);
12255 gcc_assert (DECL_P (decl2
));
12257 type
= TREE_TYPE (new_var
);
12259 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
12260 x
= fold_convert_loc (clause_loc
, type
, x
);
12261 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
12263 tree bias
= OMP_CLAUSE_SIZE (c
);
12265 bias
= lookup_decl (bias
, ctx
);
12266 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
12267 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
12269 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
12270 TREE_TYPE (x
), x
, bias
);
12273 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12274 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12275 if (is_ref
&& !ref_to_array
)
12277 tree t
= create_tmp_var_raw (type
, get_name (var
));
12278 gimple_add_tmp_var (t
);
12279 TREE_ADDRESSABLE (t
) = 1;
12280 gimple_seq_add_stmt (&new_body
,
12281 gimple_build_assign (t
, x
));
12282 x
= build_fold_addr_expr_loc (clause_loc
, t
);
12284 gimple_seq_add_stmt (&new_body
,
12285 gimple_build_assign (new_var
, x
));
12288 else if (OMP_CLAUSE_CHAIN (c
)
12289 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
12291 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12292 == GOMP_MAP_FIRSTPRIVATE_POINTER
12293 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12294 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
12297 case OMP_CLAUSE_PRIVATE
:
12298 var
= OMP_CLAUSE_DECL (c
);
12299 if (is_variable_sized (var
))
12301 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12302 tree new_var
= lookup_decl (var
, ctx
);
12303 tree pvar
= DECL_VALUE_EXPR (var
);
12304 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12305 pvar
= TREE_OPERAND (pvar
, 0);
12306 gcc_assert (DECL_P (pvar
));
12307 tree new_pvar
= lookup_decl (pvar
, ctx
);
12308 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12309 tree al
= size_int (DECL_ALIGN (var
));
12310 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
12311 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12312 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
12313 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12314 gimple_seq_add_stmt (&new_body
,
12315 gimple_build_assign (new_pvar
, x
));
12317 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
12319 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12320 tree new_var
= lookup_decl (var
, ctx
);
12321 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12322 if (TREE_CONSTANT (x
))
12327 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12328 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
12329 tree al
= size_int (TYPE_ALIGN (rtype
));
12330 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12333 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12334 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12335 gimple_seq_add_stmt (&new_body
,
12336 gimple_build_assign (new_var
, x
));
12341 gimple_seq fork_seq
= NULL
;
12342 gimple_seq join_seq
= NULL
;
12344 if (is_oacc_parallel (ctx
))
12346 /* If there are reductions on the offloaded region itself, treat
12347 them as a dummy GANG loop. */
12348 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
12350 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
12351 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
12354 gimple_seq_add_seq (&new_body
, fork_seq
);
12355 gimple_seq_add_seq (&new_body
, tgt_body
);
12356 gimple_seq_add_seq (&new_body
, join_seq
);
12359 new_body
= maybe_catch_exception (new_body
);
12361 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12362 gimple_omp_set_body (stmt
, new_body
);
12365 bind
= gimple_build_bind (NULL
, NULL
,
12366 tgt_bind
? gimple_bind_block (tgt_bind
)
12368 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12369 gimple_bind_add_seq (bind
, ilist
);
12370 gimple_bind_add_stmt (bind
, stmt
);
12371 gimple_bind_add_seq (bind
, olist
);
12373 pop_gimplify_context (NULL
);
12377 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12378 gimple_bind_add_stmt (dep_bind
, bind
);
12379 gimple_bind_add_seq (dep_bind
, dep_olist
);
12380 pop_gimplify_context (dep_bind
);
12384 /* Expand code for an OpenMP teams directive. */
12387 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12389 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
12390 push_gimplify_context ();
12392 tree block
= make_node (BLOCK
);
12393 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
12394 gsi_replace (gsi_p
, bind
, true);
12395 gimple_seq bind_body
= NULL
;
12396 gimple_seq dlist
= NULL
;
12397 gimple_seq olist
= NULL
;
12399 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12400 OMP_CLAUSE_NUM_TEAMS
);
12401 if (num_teams
== NULL_TREE
)
12402 num_teams
= build_int_cst (unsigned_type_node
, 0);
12405 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
12406 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
12407 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
12409 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12410 OMP_CLAUSE_THREAD_LIMIT
);
12411 if (thread_limit
== NULL_TREE
)
12412 thread_limit
= build_int_cst (unsigned_type_node
, 0);
12415 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
12416 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
12417 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
12421 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
12422 &bind_body
, &dlist
, ctx
, NULL
);
12423 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
12424 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
12426 if (!gimple_omp_teams_grid_phony (teams_stmt
))
12428 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
12429 location_t loc
= gimple_location (teams_stmt
);
12430 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
12431 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
12432 gimple_set_location (call
, loc
);
12433 gimple_seq_add_stmt (&bind_body
, call
);
12436 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
12437 gimple_omp_set_body (teams_stmt
, NULL
);
12438 gimple_seq_add_seq (&bind_body
, olist
);
12439 gimple_seq_add_seq (&bind_body
, dlist
);
12440 if (!gimple_omp_teams_grid_phony (teams_stmt
))
12441 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
12442 gimple_bind_set_body (bind
, bind_body
);
12444 pop_gimplify_context (bind
);
12446 gimple_bind_append_vars (bind
, ctx
->block_vars
);
12447 BLOCK_VARS (block
) = ctx
->block_vars
;
12448 if (BLOCK_VARS (block
))
12449 TREE_USED (block
) = 1;
12452 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
12455 lower_omp_grid_body (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12457 gimple
*stmt
= gsi_stmt (*gsi_p
);
12458 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
12459 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt
),
12460 gimple_build_omp_return (false));
12464 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12465 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12466 of OMP context, but with task_shared_vars set. */
12469 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
12474 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12475 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
12478 if (task_shared_vars
12480 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
12483 /* If a global variable has been privatized, TREE_CONSTANT on
12484 ADDR_EXPR might be wrong. */
12485 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
12486 recompute_tree_invariant_for_addr_expr (t
);
12488 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
12492 /* Data to be communicated between lower_omp_regimplify_operands and
12493 lower_omp_regimplify_operands_p. */
12495 struct lower_omp_regimplify_operands_data
12501 /* Helper function for lower_omp_regimplify_operands. Find
12502 omp_member_access_dummy_var vars and adjust temporarily their
12503 DECL_VALUE_EXPRs if needed. */
12506 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
12509 tree t
= omp_member_access_dummy_var (*tp
);
12512 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
12513 lower_omp_regimplify_operands_data
*ldata
12514 = (lower_omp_regimplify_operands_data
*) wi
->info
;
12515 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
12518 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
12519 ldata
->decls
->safe_push (*tp
);
12520 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
12521 SET_DECL_VALUE_EXPR (*tp
, v
);
12524 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
12528 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12529 of omp_member_access_dummy_var vars during regimplification. */
12532 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
12533 gimple_stmt_iterator
*gsi_p
)
12535 auto_vec
<tree
, 10> decls
;
12538 struct walk_stmt_info wi
;
12539 memset (&wi
, '\0', sizeof (wi
));
12540 struct lower_omp_regimplify_operands_data data
;
12542 data
.decls
= &decls
;
12544 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
12546 gimple_regimplify_operands (stmt
, gsi_p
);
12547 while (!decls
.is_empty ())
12549 tree t
= decls
.pop ();
12550 tree v
= decls
.pop ();
12551 SET_DECL_VALUE_EXPR (t
, v
);
12556 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12558 gimple
*stmt
= gsi_stmt (*gsi_p
);
12559 struct walk_stmt_info wi
;
12562 if (gimple_has_location (stmt
))
12563 input_location
= gimple_location (stmt
);
12565 if (task_shared_vars
)
12566 memset (&wi
, '\0', sizeof (wi
));
12568 /* If we have issued syntax errors, avoid doing any heavy lifting.
12569 Just replace the OMP directives with a NOP to avoid
12570 confusing RTL expansion. */
12571 if (seen_error () && is_gimple_omp (stmt
))
12573 gsi_replace (gsi_p
, gimple_build_nop (), true);
12577 switch (gimple_code (stmt
))
12581 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
12582 if ((ctx
|| task_shared_vars
)
12583 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
12584 lower_omp_regimplify_p
,
12585 ctx
? NULL
: &wi
, NULL
)
12586 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
12587 lower_omp_regimplify_p
,
12588 ctx
? NULL
: &wi
, NULL
)))
12589 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
12593 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
12595 case GIMPLE_EH_FILTER
:
12596 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
12599 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
12600 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
12602 case GIMPLE_TRANSACTION
:
12603 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
12607 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
12608 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
12610 case GIMPLE_OMP_PARALLEL
:
12611 case GIMPLE_OMP_TASK
:
12612 ctx
= maybe_lookup_ctx (stmt
);
12614 if (ctx
->cancellable
)
12615 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12616 lower_omp_taskreg (gsi_p
, ctx
);
12618 case GIMPLE_OMP_FOR
:
12619 ctx
= maybe_lookup_ctx (stmt
);
12621 if (ctx
->cancellable
)
12622 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12623 lower_omp_for (gsi_p
, ctx
);
12625 case GIMPLE_OMP_SECTIONS
:
12626 ctx
= maybe_lookup_ctx (stmt
);
12628 if (ctx
->cancellable
)
12629 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12630 lower_omp_sections (gsi_p
, ctx
);
12632 case GIMPLE_OMP_SINGLE
:
12633 ctx
= maybe_lookup_ctx (stmt
);
12635 lower_omp_single (gsi_p
, ctx
);
12637 case GIMPLE_OMP_MASTER
:
12638 ctx
= maybe_lookup_ctx (stmt
);
12640 lower_omp_master (gsi_p
, ctx
);
12642 case GIMPLE_OMP_TASKGROUP
:
12643 ctx
= maybe_lookup_ctx (stmt
);
12645 lower_omp_taskgroup (gsi_p
, ctx
);
12647 case GIMPLE_OMP_ORDERED
:
12648 ctx
= maybe_lookup_ctx (stmt
);
12650 lower_omp_ordered (gsi_p
, ctx
);
12652 case GIMPLE_OMP_SCAN
:
12653 ctx
= maybe_lookup_ctx (stmt
);
12655 lower_omp_scan (gsi_p
, ctx
);
12657 case GIMPLE_OMP_CRITICAL
:
12658 ctx
= maybe_lookup_ctx (stmt
);
12660 lower_omp_critical (gsi_p
, ctx
);
12662 case GIMPLE_OMP_ATOMIC_LOAD
:
12663 if ((ctx
|| task_shared_vars
)
12664 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12665 as_a
<gomp_atomic_load
*> (stmt
)),
12666 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
12667 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12669 case GIMPLE_OMP_TARGET
:
12670 ctx
= maybe_lookup_ctx (stmt
);
12672 lower_omp_target (gsi_p
, ctx
);
12674 case GIMPLE_OMP_TEAMS
:
12675 ctx
= maybe_lookup_ctx (stmt
);
12677 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
12678 lower_omp_taskreg (gsi_p
, ctx
);
12680 lower_omp_teams (gsi_p
, ctx
);
12682 case GIMPLE_OMP_GRID_BODY
:
12683 ctx
= maybe_lookup_ctx (stmt
);
12685 lower_omp_grid_body (gsi_p
, ctx
);
12689 call_stmt
= as_a
<gcall
*> (stmt
);
12690 fndecl
= gimple_call_fndecl (call_stmt
);
12692 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
12693 switch (DECL_FUNCTION_CODE (fndecl
))
12695 case BUILT_IN_GOMP_BARRIER
:
12699 case BUILT_IN_GOMP_CANCEL
:
12700 case BUILT_IN_GOMP_CANCELLATION_POINT
:
12703 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
12704 cctx
= cctx
->outer
;
12705 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
12706 if (!cctx
->cancellable
)
12708 if (DECL_FUNCTION_CODE (fndecl
)
12709 == BUILT_IN_GOMP_CANCELLATION_POINT
)
12711 stmt
= gimple_build_nop ();
12712 gsi_replace (gsi_p
, stmt
, false);
12716 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
12718 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
12719 gimple_call_set_fndecl (call_stmt
, fndecl
);
12720 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
12723 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
12724 gimple_call_set_lhs (call_stmt
, lhs
);
12725 tree fallthru_label
;
12726 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
12728 g
= gimple_build_label (fallthru_label
);
12729 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12730 g
= gimple_build_cond (NE_EXPR
, lhs
,
12731 fold_convert (TREE_TYPE (lhs
),
12732 boolean_false_node
),
12733 cctx
->cancel_label
, fallthru_label
);
12734 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12741 case GIMPLE_ASSIGN
:
12742 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
12744 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
12745 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
12746 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
12747 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
12748 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
12749 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
12750 && (gimple_omp_target_kind (up
->stmt
)
12751 == GF_OMP_TARGET_KIND_DATA
)))
12753 else if (!up
->lastprivate_conditional_map
)
12755 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
12756 if (TREE_CODE (lhs
) == MEM_REF
12757 && DECL_P (TREE_OPERAND (lhs
, 0))
12758 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
12759 0))) == REFERENCE_TYPE
)
12760 lhs
= TREE_OPERAND (lhs
, 0);
12762 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
12765 if (up
->combined_into_simd_safelen1
)
12768 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
12771 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
12772 clauses
= gimple_omp_for_clauses (up
->stmt
);
12774 clauses
= gimple_omp_sections_clauses (up
->stmt
);
12775 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
12776 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
12777 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
12778 OMP_CLAUSE__CONDTEMP_
);
12779 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
12780 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
12781 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12788 if ((ctx
|| task_shared_vars
)
12789 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
12792 /* Just remove clobbers, this should happen only if we have
12793 "privatized" local addressable variables in SIMD regions,
12794 the clobber isn't needed in that case and gimplifying address
12795 of the ARRAY_REF into a pointer and creating MEM_REF based
12796 clobber would create worse code than we get with the clobber
12798 if (gimple_clobber_p (stmt
))
12800 gsi_replace (gsi_p
, gimple_build_nop (), true);
12803 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12810 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
12812 location_t saved_location
= input_location
;
12813 gimple_stmt_iterator gsi
;
12814 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
12815 lower_omp_1 (&gsi
, ctx
);
12816 /* During gimplification, we haven't folded statments inside offloading
12817 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12818 if (target_nesting_level
|| taskreg_nesting_level
)
12819 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
12821 input_location
= saved_location
;
12824 /* Main entry point. */
12826 static unsigned int
12827 execute_lower_omp (void)
12833 /* This pass always runs, to provide PROP_gimple_lomp.
12834 But often, there is nothing to do. */
12835 if (flag_openacc
== 0 && flag_openmp
== 0
12836 && flag_openmp_simd
== 0)
12839 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
12840 delete_omp_context
);
12842 body
= gimple_body (current_function_decl
);
12844 if (hsa_gen_requested_p ())
12845 omp_grid_gridify_all_targets (&body
);
12847 scan_omp (&body
, NULL
);
12848 gcc_assert (taskreg_nesting_level
== 0);
12849 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
12850 finish_taskreg_scan (ctx
);
12851 taskreg_contexts
.release ();
12853 if (all_contexts
->root
)
12855 if (task_shared_vars
)
12856 push_gimplify_context ();
12857 lower_omp (&body
, NULL
);
12858 if (task_shared_vars
)
12859 pop_gimplify_context (NULL
);
12864 splay_tree_delete (all_contexts
);
12865 all_contexts
= NULL
;
12867 BITMAP_FREE (task_shared_vars
);
12868 BITMAP_FREE (global_nonaddressable_vars
);
12870 /* If current function is a method, remove artificial dummy VAR_DECL created
12871 for non-static data member privatization, they aren't needed for
12872 debuginfo nor anything else, have been already replaced everywhere in the
12873 IL and cause problems with LTO. */
12874 if (DECL_ARGUMENTS (current_function_decl
)
12875 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
12876 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
12878 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
12884 const pass_data pass_data_lower_omp
=
12886 GIMPLE_PASS
, /* type */
12887 "omplower", /* name */
12888 OPTGROUP_OMP
, /* optinfo_flags */
12889 TV_NONE
, /* tv_id */
12890 PROP_gimple_any
, /* properties_required */
12891 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
12892 0, /* properties_destroyed */
12893 0, /* todo_flags_start */
12894 0, /* todo_flags_finish */
12897 class pass_lower_omp
: public gimple_opt_pass
12900 pass_lower_omp (gcc::context
*ctxt
)
12901 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
12904 /* opt_pass methods: */
12905 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
12907 }; // class pass_lower_omp
12909 } // anon namespace
12912 make_pass_lower_omp (gcc::context
*ctxt
)
12914 return new pass_lower_omp (ctxt
);
12917 /* The following is a utility to diagnose structured block violations.
12918 It is not part of the "omplower" pass, as that's invoked too late. It
12919 should be invoked by the respective front ends after gimplification. */
12921 static splay_tree all_labels
;
12923 /* Check for mismatched contexts and generate an error if needed. Return
12924 true if an error is detected. */
12927 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
12928 gimple
*branch_ctx
, gimple
*label_ctx
)
12930 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
12931 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
12933 if (label_ctx
== branch_ctx
)
12936 const char* kind
= NULL
;
12940 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
12941 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
12943 gcc_checking_assert (kind
== NULL
);
12949 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
12953 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
12954 so we could traverse it and issue a correct "exit" or "enter" error
12955 message upon a structured block violation.
12957 We built the context by building a list with tree_cons'ing, but there is
12958 no easy counterpart in gimple tuples. It seems like far too much work
12959 for issuing exit/enter error messages. If someone really misses the
12960 distinct error message... patches welcome. */
12963 /* Try to avoid confusing the user by producing and error message
12964 with correct "exit" or "enter" verbiage. We prefer "exit"
12965 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
12966 if (branch_ctx
== NULL
)
12972 if (TREE_VALUE (label_ctx
) == branch_ctx
)
12977 label_ctx
= TREE_CHAIN (label_ctx
);
12982 error ("invalid exit from %s structured block", kind
);
12984 error ("invalid entry to %s structured block", kind
);
12987 /* If it's obvious we have an invalid entry, be specific about the error. */
12988 if (branch_ctx
== NULL
)
12989 error ("invalid entry to %s structured block", kind
);
12992 /* Otherwise, be vague and lazy, but efficient. */
12993 error ("invalid branch to/from %s structured block", kind
);
12996 gsi_replace (gsi_p
, gimple_build_nop (), false);
13000 /* Pass 1: Create a minimal tree of structured blocks, and record
13001 where each label is found. */
13004 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
13005 struct walk_stmt_info
*wi
)
13007 gimple
*context
= (gimple
*) wi
->info
;
13008 gimple
*inner_context
;
13009 gimple
*stmt
= gsi_stmt (*gsi_p
);
13011 *handled_ops_p
= true;
13013 switch (gimple_code (stmt
))
13017 case GIMPLE_OMP_PARALLEL
:
13018 case GIMPLE_OMP_TASK
:
13019 case GIMPLE_OMP_SECTIONS
:
13020 case GIMPLE_OMP_SINGLE
:
13021 case GIMPLE_OMP_SECTION
:
13022 case GIMPLE_OMP_MASTER
:
13023 case GIMPLE_OMP_ORDERED
:
13024 case GIMPLE_OMP_SCAN
:
13025 case GIMPLE_OMP_CRITICAL
:
13026 case GIMPLE_OMP_TARGET
:
13027 case GIMPLE_OMP_TEAMS
:
13028 case GIMPLE_OMP_TASKGROUP
:
13029 /* The minimal context here is just the current OMP construct. */
13030 inner_context
= stmt
;
13031 wi
->info
= inner_context
;
13032 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
13033 wi
->info
= context
;
13036 case GIMPLE_OMP_FOR
:
13037 inner_context
= stmt
;
13038 wi
->info
= inner_context
;
13039 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13041 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
13042 diagnose_sb_1
, NULL
, wi
);
13043 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
13044 wi
->info
= context
;
13048 splay_tree_insert (all_labels
,
13049 (splay_tree_key
) gimple_label_label (
13050 as_a
<glabel
*> (stmt
)),
13051 (splay_tree_value
) context
);
13061 /* Pass 2: Check each branch and see if its context differs from that of
13062 the destination label's context. */
13065 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
13066 struct walk_stmt_info
*wi
)
13068 gimple
*context
= (gimple
*) wi
->info
;
13070 gimple
*stmt
= gsi_stmt (*gsi_p
);
13072 *handled_ops_p
= true;
13074 switch (gimple_code (stmt
))
13078 case GIMPLE_OMP_PARALLEL
:
13079 case GIMPLE_OMP_TASK
:
13080 case GIMPLE_OMP_SECTIONS
:
13081 case GIMPLE_OMP_SINGLE
:
13082 case GIMPLE_OMP_SECTION
:
13083 case GIMPLE_OMP_MASTER
:
13084 case GIMPLE_OMP_ORDERED
:
13085 case GIMPLE_OMP_SCAN
:
13086 case GIMPLE_OMP_CRITICAL
:
13087 case GIMPLE_OMP_TARGET
:
13088 case GIMPLE_OMP_TEAMS
:
13089 case GIMPLE_OMP_TASKGROUP
:
13091 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
13092 wi
->info
= context
;
13095 case GIMPLE_OMP_FOR
:
13097 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13099 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
13100 diagnose_sb_2
, NULL
, wi
);
13101 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
13102 wi
->info
= context
;
13107 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
13108 tree lab
= gimple_cond_true_label (cond_stmt
);
13111 n
= splay_tree_lookup (all_labels
,
13112 (splay_tree_key
) lab
);
13113 diagnose_sb_0 (gsi_p
, context
,
13114 n
? (gimple
*) n
->value
: NULL
);
13116 lab
= gimple_cond_false_label (cond_stmt
);
13119 n
= splay_tree_lookup (all_labels
,
13120 (splay_tree_key
) lab
);
13121 diagnose_sb_0 (gsi_p
, context
,
13122 n
? (gimple
*) n
->value
: NULL
);
13129 tree lab
= gimple_goto_dest (stmt
);
13130 if (TREE_CODE (lab
) != LABEL_DECL
)
13133 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13134 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
13138 case GIMPLE_SWITCH
:
13140 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
13142 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
13144 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
13145 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13146 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
13152 case GIMPLE_RETURN
:
13153 diagnose_sb_0 (gsi_p
, context
, NULL
);
13163 static unsigned int
13164 diagnose_omp_structured_block_errors (void)
13166 struct walk_stmt_info wi
;
13167 gimple_seq body
= gimple_body (current_function_decl
);
13169 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
13171 memset (&wi
, 0, sizeof (wi
));
13172 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
13174 memset (&wi
, 0, sizeof (wi
));
13175 wi
.want_locations
= true;
13176 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
13178 gimple_set_body (current_function_decl
, body
);
13180 splay_tree_delete (all_labels
);
13188 const pass_data pass_data_diagnose_omp_blocks
=
13190 GIMPLE_PASS
, /* type */
13191 "*diagnose_omp_blocks", /* name */
13192 OPTGROUP_OMP
, /* optinfo_flags */
13193 TV_NONE
, /* tv_id */
13194 PROP_gimple_any
, /* properties_required */
13195 0, /* properties_provided */
13196 0, /* properties_destroyed */
13197 0, /* todo_flags_start */
13198 0, /* todo_flags_finish */
13201 class pass_diagnose_omp_blocks
: public gimple_opt_pass
13204 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13205 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
13208 /* opt_pass methods: */
13209 virtual bool gate (function
*)
13211 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
13213 virtual unsigned int execute (function
*)
13215 return diagnose_omp_structured_block_errors ();
13218 }; // class pass_diagnose_omp_blocks
13220 } // anon namespace
13223 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13225 return new pass_diagnose_omp_blocks (ctxt
);
13229 #include "gt-omp-low.h"