1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
135 /* True if this parallel directive is nested within another. */
138 /* True if this construct can be cancelled. */
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
143 bool combined_into_simd_safelen1
;
145 /* True if there is nested scan context with inclusive clause. */
148 /* True if there is nested scan context with exclusive clause. */
151 /* True in the second simd loop of for simd with inscan reductions. */
152 bool for_simd_scan_phase
;
154 /* True if there is order(concurrent) clause on the construct. */
155 bool order_concurrent
;
157 /* True if there is bind clause on the construct (i.e. a loop construct). */
161 static splay_tree all_contexts
;
162 static int taskreg_nesting_level
;
163 static int target_nesting_level
;
164 static bitmap task_shared_vars
;
165 static bitmap global_nonaddressable_vars
;
166 static vec
<omp_context
*> taskreg_contexts
;
168 static void scan_omp (gimple_seq
*, omp_context
*);
169 static tree
scan_omp_1_op (tree
*, int *, void *);
171 #define WALK_SUBSTMTS \
175 case GIMPLE_EH_FILTER: \
176 case GIMPLE_TRANSACTION: \
177 /* The sub-statements for these should be walked. */ \
178 *handled_ops_p = false; \
181 /* Return true if CTX corresponds to an oacc parallel region. */
184 is_oacc_parallel (omp_context
*ctx
)
186 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
187 return ((outer_type
== GIMPLE_OMP_TARGET
)
188 && (gimple_omp_target_kind (ctx
->stmt
)
189 == GF_OMP_TARGET_KIND_OACC_PARALLEL
));
192 /* Return true if CTX corresponds to an oacc kernels region. */
195 is_oacc_kernels (omp_context
*ctx
)
197 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
198 return ((outer_type
== GIMPLE_OMP_TARGET
)
199 && (gimple_omp_target_kind (ctx
->stmt
)
200 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
203 /* If DECL is the artificial dummy VAR_DECL created for non-static
204 data member privatization, return the underlying "this" parameter,
205 otherwise return NULL. */
208 omp_member_access_dummy_var (tree decl
)
211 || !DECL_ARTIFICIAL (decl
)
212 || !DECL_IGNORED_P (decl
)
213 || !DECL_HAS_VALUE_EXPR_P (decl
)
214 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
217 tree v
= DECL_VALUE_EXPR (decl
);
218 if (TREE_CODE (v
) != COMPONENT_REF
)
222 switch (TREE_CODE (v
))
228 case POINTER_PLUS_EXPR
:
229 v
= TREE_OPERAND (v
, 0);
232 if (DECL_CONTEXT (v
) == current_function_decl
233 && DECL_ARTIFICIAL (v
)
234 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
242 /* Helper for unshare_and_remap, called through walk_tree. */
245 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
247 tree
*pair
= (tree
*) data
;
250 *tp
= unshare_expr (pair
[1]);
253 else if (IS_TYPE_OR_DECL_P (*tp
))
258 /* Return unshare_expr (X) with all occurrences of FROM
262 unshare_and_remap (tree x
, tree from
, tree to
)
264 tree pair
[2] = { from
, to
};
265 x
= unshare_expr (x
);
266 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
270 /* Convenience function for calling scan_omp_1_op on tree operands. */
273 scan_omp_op (tree
*tp
, omp_context
*ctx
)
275 struct walk_stmt_info wi
;
277 memset (&wi
, 0, sizeof (wi
));
279 wi
.want_locations
= true;
281 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
284 static void lower_omp (gimple_seq
*, omp_context
*);
285 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
286 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
288 /* Return true if CTX is for an omp parallel. */
291 is_parallel_ctx (omp_context
*ctx
)
293 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
297 /* Return true if CTX is for an omp task. */
300 is_task_ctx (omp_context
*ctx
)
302 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
306 /* Return true if CTX is for an omp taskloop. */
309 is_taskloop_ctx (omp_context
*ctx
)
311 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
312 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
316 /* Return true if CTX is for a host omp teams. */
319 is_host_teams_ctx (omp_context
*ctx
)
321 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
322 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
325 /* Return true if CTX is for an omp parallel or omp task or host omp teams
326 (the last one is strictly not a task region in OpenMP speak, but we
327 need to treat it similarly). */
330 is_taskreg_ctx (omp_context
*ctx
)
332 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
335 /* Return true if EXPR is variable sized. */
338 is_variable_sized (const_tree expr
)
340 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
343 /* Lookup variables. The "maybe" form
344 allows for the variable form to not have been entered, otherwise we
345 assert that the variable must have been entered. */
348 lookup_decl (tree var
, omp_context
*ctx
)
350 tree
*n
= ctx
->cb
.decl_map
->get (var
);
355 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
357 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
358 return n
? *n
: NULL_TREE
;
362 lookup_field (tree var
, omp_context
*ctx
)
365 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
366 return (tree
) n
->value
;
370 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
373 n
= splay_tree_lookup (ctx
->sfield_map
374 ? ctx
->sfield_map
: ctx
->field_map
, key
);
375 return (tree
) n
->value
;
379 lookup_sfield (tree var
, omp_context
*ctx
)
381 return lookup_sfield ((splay_tree_key
) var
, ctx
);
385 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
388 n
= splay_tree_lookup (ctx
->field_map
, key
);
389 return n
? (tree
) n
->value
: NULL_TREE
;
393 maybe_lookup_field (tree var
, omp_context
*ctx
)
395 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
398 /* Return true if DECL should be copied by pointer. SHARED_CTX is
399 the parallel context if DECL is to be shared. */
402 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
404 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
405 || TYPE_ATOMIC (TREE_TYPE (decl
)))
408 /* We can only use copy-in/copy-out semantics for shared variables
409 when we know the value is not accessible from an outer scope. */
412 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
414 /* ??? Trivially accessible from anywhere. But why would we even
415 be passing an address in this case? Should we simply assert
416 this to be false, or should we have a cleanup pass that removes
417 these from the list of mappings? */
418 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
421 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
422 without analyzing the expression whether or not its location
423 is accessible to anyone else. In the case of nested parallel
424 regions it certainly may be. */
425 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
428 /* Do not use copy-in/copy-out for variables that have their
430 if (is_global_var (decl
))
432 /* For file scope vars, track whether we've seen them as
433 non-addressable initially and in that case, keep the same
434 answer for the duration of the pass, even when they are made
435 addressable later on e.g. through reduction expansion. Global
436 variables which weren't addressable before the pass will not
437 have their privatized copies address taken. See PR91216. */
438 if (!TREE_ADDRESSABLE (decl
))
440 if (!global_nonaddressable_vars
)
441 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
442 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
444 else if (!global_nonaddressable_vars
445 || !bitmap_bit_p (global_nonaddressable_vars
,
449 else if (TREE_ADDRESSABLE (decl
))
452 /* lower_send_shared_vars only uses copy-in, but not copy-out
454 if (TREE_READONLY (decl
)
455 || ((TREE_CODE (decl
) == RESULT_DECL
456 || TREE_CODE (decl
) == PARM_DECL
)
457 && DECL_BY_REFERENCE (decl
)))
460 /* Disallow copy-in/out in nested parallel if
461 decl is shared in outer parallel, otherwise
462 each thread could store the shared variable
463 in its own copy-in location, making the
464 variable no longer really shared. */
465 if (shared_ctx
->is_nested
)
469 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
470 if (is_taskreg_ctx (up
) && maybe_lookup_decl (decl
, up
))
477 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
478 c
; c
= OMP_CLAUSE_CHAIN (c
))
479 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
480 && OMP_CLAUSE_DECL (c
) == decl
)
484 goto maybe_mark_addressable_and_ret
;
488 /* For tasks avoid using copy-in/out. As tasks can be
489 deferred or executed in different thread, when GOMP_task
490 returns, the task hasn't necessarily terminated. */
491 if (is_task_ctx (shared_ctx
))
494 maybe_mark_addressable_and_ret
:
495 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
496 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
498 /* Taking address of OUTER in lower_send_shared_vars
499 might need regimplification of everything that uses the
501 if (!task_shared_vars
)
502 task_shared_vars
= BITMAP_ALLOC (NULL
);
503 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
504 TREE_ADDRESSABLE (outer
) = 1;
513 /* Construct a new automatic decl similar to VAR. */
516 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
518 tree copy
= copy_var_decl (var
, name
, type
);
520 DECL_CONTEXT (copy
) = current_function_decl
;
521 DECL_CHAIN (copy
) = ctx
->block_vars
;
522 /* If VAR is listed in task_shared_vars, it means it wasn't
523 originally addressable and is just because task needs to take
524 it's address. But we don't need to take address of privatizations
526 if (TREE_ADDRESSABLE (var
)
527 && ((task_shared_vars
528 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
529 || (global_nonaddressable_vars
530 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
531 TREE_ADDRESSABLE (copy
) = 0;
532 ctx
->block_vars
= copy
;
538 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
540 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
543 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
546 omp_build_component_ref (tree obj
, tree field
)
548 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
549 if (TREE_THIS_VOLATILE (field
))
550 TREE_THIS_VOLATILE (ret
) |= 1;
551 if (TREE_READONLY (field
))
552 TREE_READONLY (ret
) |= 1;
556 /* Build tree nodes to access the field for VAR on the receiver side. */
559 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
561 tree x
, field
= lookup_field (var
, ctx
);
563 /* If the receiver record type was remapped in the child function,
564 remap the field into the new record type. */
565 x
= maybe_lookup_field (field
, ctx
);
569 x
= build_simple_mem_ref (ctx
->receiver_decl
);
570 TREE_THIS_NOTRAP (x
) = 1;
571 x
= omp_build_component_ref (x
, field
);
574 x
= build_simple_mem_ref (x
);
575 TREE_THIS_NOTRAP (x
) = 1;
581 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
582 of a parallel, this is a component reference; for workshare constructs
583 this is some variable. */
586 build_outer_var_ref (tree var
, omp_context
*ctx
,
587 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
590 omp_context
*outer
= ctx
->outer
;
591 while (outer
&& gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
592 outer
= outer
->outer
;
594 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
596 else if (is_variable_sized (var
))
598 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
599 x
= build_outer_var_ref (x
, ctx
, code
);
600 x
= build_simple_mem_ref (x
);
602 else if (is_taskreg_ctx (ctx
))
604 bool by_ref
= use_pointer_for_field (var
, NULL
);
605 x
= build_receiver_ref (var
, by_ref
, ctx
);
607 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
608 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
610 || (code
== OMP_CLAUSE_PRIVATE
611 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
612 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
613 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
615 /* #pragma omp simd isn't a worksharing construct, and can reference
616 even private vars in its linear etc. clauses.
617 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
618 to private vars in all worksharing constructs. */
620 if (outer
&& is_taskreg_ctx (outer
))
621 x
= lookup_decl (var
, outer
);
623 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
627 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
631 = splay_tree_lookup (outer
->field_map
,
632 (splay_tree_key
) &DECL_UID (var
));
635 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
638 x
= lookup_decl (var
, outer
);
642 tree field
= (tree
) n
->value
;
643 /* If the receiver record type was remapped in the child function,
644 remap the field into the new record type. */
645 x
= maybe_lookup_field (field
, outer
);
649 x
= build_simple_mem_ref (outer
->receiver_decl
);
650 x
= omp_build_component_ref (x
, field
);
651 if (use_pointer_for_field (var
, outer
))
652 x
= build_simple_mem_ref (x
);
657 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_GRID_BODY
)
659 outer
= outer
->outer
;
661 && gimple_code (outer
->stmt
) != GIMPLE_OMP_GRID_BODY
);
663 x
= lookup_decl (var
, outer
);
665 else if (omp_is_reference (var
))
666 /* This can happen with orphaned constructs. If var is reference, it is
667 possible it is shared and as such valid. */
669 else if (omp_member_access_dummy_var (var
))
676 tree t
= omp_member_access_dummy_var (var
);
679 x
= DECL_VALUE_EXPR (var
);
680 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
682 x
= unshare_and_remap (x
, t
, o
);
684 x
= unshare_expr (x
);
688 if (omp_is_reference (var
))
689 x
= build_simple_mem_ref (x
);
694 /* Build tree nodes to access the field for VAR on the sender side. */
697 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
699 tree field
= lookup_sfield (key
, ctx
);
700 return omp_build_component_ref (ctx
->sender_decl
, field
);
704 build_sender_ref (tree var
, omp_context
*ctx
)
706 return build_sender_ref ((splay_tree_key
) var
, ctx
);
709 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
710 BASE_POINTERS_RESTRICT, declare the field with restrict. */
713 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
715 tree field
, type
, sfield
= NULL_TREE
;
716 splay_tree_key key
= (splay_tree_key
) var
;
720 key
= (splay_tree_key
) &DECL_UID (var
);
721 gcc_checking_assert (key
!= (splay_tree_key
) var
);
723 gcc_assert ((mask
& 1) == 0
724 || !splay_tree_lookup (ctx
->field_map
, key
));
725 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
726 || !splay_tree_lookup (ctx
->sfield_map
, key
));
727 gcc_assert ((mask
& 3) == 3
728 || !is_gimple_omp_oacc (ctx
->stmt
));
730 type
= TREE_TYPE (var
);
731 /* Prevent redeclaring the var in the split-off function with a restrict
732 pointer type. Note that we only clear type itself, restrict qualifiers in
733 the pointed-to type will be ignored by points-to analysis. */
734 if (POINTER_TYPE_P (type
)
735 && TYPE_RESTRICT (type
))
736 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
740 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
741 type
= build_pointer_type (build_pointer_type (type
));
744 type
= build_pointer_type (type
);
745 else if ((mask
& 3) == 1 && omp_is_reference (var
))
746 type
= TREE_TYPE (type
);
748 field
= build_decl (DECL_SOURCE_LOCATION (var
),
749 FIELD_DECL
, DECL_NAME (var
), type
);
751 /* Remember what variable this field was created for. This does have a
752 side effect of making dwarf2out ignore this member, so for helpful
753 debugging we clear it later in delete_omp_context. */
754 DECL_ABSTRACT_ORIGIN (field
) = var
;
755 if (type
== TREE_TYPE (var
))
757 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
758 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
759 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
762 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
766 insert_field_into_struct (ctx
->record_type
, field
);
767 if (ctx
->srecord_type
)
769 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
770 FIELD_DECL
, DECL_NAME (var
), type
);
771 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
772 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
773 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
774 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
775 insert_field_into_struct (ctx
->srecord_type
, sfield
);
780 if (ctx
->srecord_type
== NULL_TREE
)
784 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
785 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
786 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
788 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
789 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
790 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
791 insert_field_into_struct (ctx
->srecord_type
, sfield
);
792 splay_tree_insert (ctx
->sfield_map
,
793 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
794 (splay_tree_value
) sfield
);
798 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
799 : ctx
->srecord_type
, field
);
803 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
804 if ((mask
& 2) && ctx
->sfield_map
)
805 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
809 install_var_local (tree var
, omp_context
*ctx
)
811 tree new_var
= omp_copy_decl_1 (var
, ctx
);
812 insert_decl_map (&ctx
->cb
, var
, new_var
);
816 /* Adjust the replacement for DECL in CTX for the new context. This means
817 copying the DECL_VALUE_EXPR, and fixing up the type. */
820 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
824 new_decl
= lookup_decl (decl
, ctx
);
826 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
828 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
829 && DECL_HAS_VALUE_EXPR_P (decl
))
831 tree ve
= DECL_VALUE_EXPR (decl
);
832 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
833 SET_DECL_VALUE_EXPR (new_decl
, ve
);
834 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
837 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
839 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
840 if (size
== error_mark_node
)
841 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
842 DECL_SIZE (new_decl
) = size
;
844 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
845 if (size
== error_mark_node
)
846 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
847 DECL_SIZE_UNIT (new_decl
) = size
;
851 /* The callback for remap_decl. Search all containing contexts for a
852 mapping of the variable; this avoids having to duplicate the splay
853 tree ahead of time. We know a mapping doesn't already exist in the
854 given context. Create new mappings to implement default semantics. */
857 omp_copy_decl (tree var
, copy_body_data
*cb
)
859 omp_context
*ctx
= (omp_context
*) cb
;
862 if (TREE_CODE (var
) == LABEL_DECL
)
864 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
866 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
867 DECL_CONTEXT (new_var
) = current_function_decl
;
868 insert_decl_map (&ctx
->cb
, var
, new_var
);
872 while (!is_taskreg_ctx (ctx
))
877 new_var
= maybe_lookup_decl (var
, ctx
);
882 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
885 return error_mark_node
;
888 /* Create a new context, with OUTER_CTX being the surrounding context. */
891 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
893 omp_context
*ctx
= XCNEW (omp_context
);
895 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
896 (splay_tree_value
) ctx
);
901 ctx
->outer
= outer_ctx
;
902 ctx
->cb
= outer_ctx
->cb
;
903 ctx
->cb
.block
= NULL
;
904 ctx
->depth
= outer_ctx
->depth
+ 1;
908 ctx
->cb
.src_fn
= current_function_decl
;
909 ctx
->cb
.dst_fn
= current_function_decl
;
910 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
911 gcc_checking_assert (ctx
->cb
.src_node
);
912 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
913 ctx
->cb
.src_cfun
= cfun
;
914 ctx
->cb
.copy_decl
= omp_copy_decl
;
915 ctx
->cb
.eh_lp_nr
= 0;
916 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
917 ctx
->cb
.adjust_array_error_bounds
= true;
918 ctx
->cb
.dont_remap_vla_if_no_change
= true;
922 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
927 static gimple_seq
maybe_catch_exception (gimple_seq
);
929 /* Finalize task copyfn. */
932 finalize_task_copyfn (gomp_task
*task_stmt
)
934 struct function
*child_cfun
;
936 gimple_seq seq
= NULL
, new_seq
;
939 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
940 if (child_fn
== NULL_TREE
)
943 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
944 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
946 push_cfun (child_cfun
);
947 bind
= gimplify_body (child_fn
, false);
948 gimple_seq_add_stmt (&seq
, bind
);
949 new_seq
= maybe_catch_exception (seq
);
952 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
954 gimple_seq_add_stmt (&seq
, bind
);
956 gimple_set_body (child_fn
, seq
);
959 /* Inform the callgraph about the new function. */
960 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
961 node
->parallelized_function
= 1;
962 cgraph_node::add_new_function (child_fn
, false);
965 /* Destroy a omp_context data structures. Called through the splay tree
966 value delete callback. */
969 delete_omp_context (splay_tree_value value
)
971 omp_context
*ctx
= (omp_context
*) value
;
973 delete ctx
->cb
.decl_map
;
976 splay_tree_delete (ctx
->field_map
);
978 splay_tree_delete (ctx
->sfield_map
);
980 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
981 it produces corrupt debug information. */
982 if (ctx
->record_type
)
985 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
986 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
988 if (ctx
->srecord_type
)
991 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
992 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
995 if (is_task_ctx (ctx
))
996 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
998 if (ctx
->task_reduction_map
)
1000 ctx
->task_reductions
.release ();
1001 delete ctx
->task_reduction_map
;
1004 delete ctx
->lastprivate_conditional_map
;
1009 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1013 fixup_child_record_type (omp_context
*ctx
)
1015 tree f
, type
= ctx
->record_type
;
1017 if (!ctx
->receiver_decl
)
1019 /* ??? It isn't sufficient to just call remap_type here, because
1020 variably_modified_type_p doesn't work the way we expect for
1021 record types. Testing each field for whether it needs remapping
1022 and creating a new record by hand works, however. */
1023 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1024 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1028 tree name
, new_fields
= NULL
;
1030 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1031 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1032 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1033 TYPE_DECL
, name
, type
);
1034 TYPE_NAME (type
) = name
;
1036 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1038 tree new_f
= copy_node (f
);
1039 DECL_CONTEXT (new_f
) = type
;
1040 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1041 DECL_CHAIN (new_f
) = new_fields
;
1042 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1043 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1045 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1049 /* Arrange to be able to look up the receiver field
1050 given the sender field. */
1051 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1052 (splay_tree_value
) new_f
);
1054 TYPE_FIELDS (type
) = nreverse (new_fields
);
1058 /* In a target region we never modify any of the pointers in *.omp_data_i,
1059 so attempt to help the optimizers. */
1060 if (is_gimple_omp_offloaded (ctx
->stmt
))
1061 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1063 TREE_TYPE (ctx
->receiver_decl
)
1064 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1067 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1068 specified by CLAUSES. */
1071 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1074 bool scan_array_reductions
= false;
1076 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1080 switch (OMP_CLAUSE_CODE (c
))
1082 case OMP_CLAUSE_PRIVATE
:
1083 decl
= OMP_CLAUSE_DECL (c
);
1084 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1086 else if (!is_variable_sized (decl
))
1087 install_var_local (decl
, ctx
);
1090 case OMP_CLAUSE_SHARED
:
1091 decl
= OMP_CLAUSE_DECL (c
);
1092 /* Ignore shared directives in teams construct inside of
1093 target construct. */
1094 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1095 && !is_host_teams_ctx (ctx
))
1097 /* Global variables don't need to be copied,
1098 the receiver side will use them directly. */
1099 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1100 if (is_global_var (odecl
))
1102 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1105 gcc_assert (is_taskreg_ctx (ctx
));
1106 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1107 || !is_variable_sized (decl
));
1108 /* Global variables don't need to be copied,
1109 the receiver side will use them directly. */
1110 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1112 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1114 use_pointer_for_field (decl
, ctx
);
1117 by_ref
= use_pointer_for_field (decl
, NULL
);
1118 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1119 || TREE_ADDRESSABLE (decl
)
1121 || omp_is_reference (decl
))
1123 by_ref
= use_pointer_for_field (decl
, ctx
);
1124 install_var_field (decl
, by_ref
, 3, ctx
);
1125 install_var_local (decl
, ctx
);
1128 /* We don't need to copy const scalar vars back. */
1129 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1132 case OMP_CLAUSE_REDUCTION
:
1133 case OMP_CLAUSE_IN_REDUCTION
:
1134 decl
= OMP_CLAUSE_DECL (c
);
1135 if (TREE_CODE (decl
) == MEM_REF
)
1137 tree t
= TREE_OPERAND (decl
, 0);
1138 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1139 t
= TREE_OPERAND (t
, 0);
1140 if (TREE_CODE (t
) == INDIRECT_REF
1141 || TREE_CODE (t
) == ADDR_EXPR
)
1142 t
= TREE_OPERAND (t
, 0);
1143 install_var_local (t
, ctx
);
1144 if (is_taskreg_ctx (ctx
)
1145 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1146 || (is_task_ctx (ctx
)
1147 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1148 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1149 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1150 == POINTER_TYPE
)))))
1151 && !is_variable_sized (t
)
1152 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1153 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1154 && !is_task_ctx (ctx
))))
1156 by_ref
= use_pointer_for_field (t
, NULL
);
1157 if (is_task_ctx (ctx
)
1158 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1159 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1161 install_var_field (t
, false, 1, ctx
);
1162 install_var_field (t
, by_ref
, 2, ctx
);
1165 install_var_field (t
, by_ref
, 3, ctx
);
1169 if (is_task_ctx (ctx
)
1170 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1171 && OMP_CLAUSE_REDUCTION_TASK (c
)
1172 && is_parallel_ctx (ctx
)))
1174 /* Global variables don't need to be copied,
1175 the receiver side will use them directly. */
1176 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1178 by_ref
= use_pointer_for_field (decl
, ctx
);
1179 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1180 install_var_field (decl
, by_ref
, 3, ctx
);
1182 install_var_local (decl
, ctx
);
1185 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1186 && OMP_CLAUSE_REDUCTION_TASK (c
))
1188 install_var_local (decl
, ctx
);
1193 case OMP_CLAUSE_LASTPRIVATE
:
1194 /* Let the corresponding firstprivate clause create
1196 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1200 case OMP_CLAUSE_FIRSTPRIVATE
:
1201 case OMP_CLAUSE_LINEAR
:
1202 decl
= OMP_CLAUSE_DECL (c
);
1204 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1205 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1206 && is_gimple_omp_offloaded (ctx
->stmt
))
1208 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1209 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1210 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1211 install_var_field (decl
, true, 3, ctx
);
1213 install_var_field (decl
, false, 3, ctx
);
1215 if (is_variable_sized (decl
))
1217 if (is_task_ctx (ctx
))
1218 install_var_field (decl
, false, 1, ctx
);
1221 else if (is_taskreg_ctx (ctx
))
1224 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1225 by_ref
= use_pointer_for_field (decl
, NULL
);
1227 if (is_task_ctx (ctx
)
1228 && (global
|| by_ref
|| omp_is_reference (decl
)))
1230 install_var_field (decl
, false, 1, ctx
);
1232 install_var_field (decl
, by_ref
, 2, ctx
);
1235 install_var_field (decl
, by_ref
, 3, ctx
);
1237 install_var_local (decl
, ctx
);
1240 case OMP_CLAUSE_USE_DEVICE_PTR
:
1241 decl
= OMP_CLAUSE_DECL (c
);
1242 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1243 install_var_field (decl
, true, 3, ctx
);
1245 install_var_field (decl
, false, 3, ctx
);
1246 if (DECL_SIZE (decl
)
1247 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1249 tree decl2
= DECL_VALUE_EXPR (decl
);
1250 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1251 decl2
= TREE_OPERAND (decl2
, 0);
1252 gcc_assert (DECL_P (decl2
));
1253 install_var_local (decl2
, ctx
);
1255 install_var_local (decl
, ctx
);
1258 case OMP_CLAUSE_IS_DEVICE_PTR
:
1259 decl
= OMP_CLAUSE_DECL (c
);
1262 case OMP_CLAUSE__LOOPTEMP_
:
1263 case OMP_CLAUSE__REDUCTEMP_
:
1264 gcc_assert (is_taskreg_ctx (ctx
));
1265 decl
= OMP_CLAUSE_DECL (c
);
1266 install_var_field (decl
, false, 3, ctx
);
1267 install_var_local (decl
, ctx
);
1270 case OMP_CLAUSE_COPYPRIVATE
:
1271 case OMP_CLAUSE_COPYIN
:
1272 decl
= OMP_CLAUSE_DECL (c
);
1273 by_ref
= use_pointer_for_field (decl
, NULL
);
1274 install_var_field (decl
, by_ref
, 3, ctx
);
1277 case OMP_CLAUSE_FINAL
:
1279 case OMP_CLAUSE_NUM_THREADS
:
1280 case OMP_CLAUSE_NUM_TEAMS
:
1281 case OMP_CLAUSE_THREAD_LIMIT
:
1282 case OMP_CLAUSE_DEVICE
:
1283 case OMP_CLAUSE_SCHEDULE
:
1284 case OMP_CLAUSE_DIST_SCHEDULE
:
1285 case OMP_CLAUSE_DEPEND
:
1286 case OMP_CLAUSE_PRIORITY
:
1287 case OMP_CLAUSE_GRAINSIZE
:
1288 case OMP_CLAUSE_NUM_TASKS
:
1289 case OMP_CLAUSE_NUM_GANGS
:
1290 case OMP_CLAUSE_NUM_WORKERS
:
1291 case OMP_CLAUSE_VECTOR_LENGTH
:
1293 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1297 case OMP_CLAUSE_FROM
:
1298 case OMP_CLAUSE_MAP
:
1300 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1301 decl
= OMP_CLAUSE_DECL (c
);
1302 /* Global variables with "omp declare target" attribute
1303 don't need to be copied, the receiver side will use them
1304 directly. However, global variables with "omp declare target link"
1305 attribute need to be copied. Or when ALWAYS modifier is used. */
1306 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1308 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1309 && (OMP_CLAUSE_MAP_KIND (c
)
1310 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1311 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1312 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1313 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1314 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1315 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1316 && varpool_node::get_create (decl
)->offloadable
1317 && !lookup_attribute ("omp declare target link",
1318 DECL_ATTRIBUTES (decl
)))
1320 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1321 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1323 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1324 not offloaded; there is nothing to map for those. */
1325 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1326 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1327 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1330 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1331 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1332 || (OMP_CLAUSE_MAP_KIND (c
)
1333 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1335 if (TREE_CODE (decl
) == COMPONENT_REF
1336 || (TREE_CODE (decl
) == INDIRECT_REF
1337 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1338 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1339 == REFERENCE_TYPE
)))
1341 if (DECL_SIZE (decl
)
1342 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1344 tree decl2
= DECL_VALUE_EXPR (decl
);
1345 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1346 decl2
= TREE_OPERAND (decl2
, 0);
1347 gcc_assert (DECL_P (decl2
));
1348 install_var_local (decl2
, ctx
);
1350 install_var_local (decl
, ctx
);
1355 if (DECL_SIZE (decl
)
1356 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1358 tree decl2
= DECL_VALUE_EXPR (decl
);
1359 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1360 decl2
= TREE_OPERAND (decl2
, 0);
1361 gcc_assert (DECL_P (decl2
));
1362 install_var_field (decl2
, true, 3, ctx
);
1363 install_var_local (decl2
, ctx
);
1364 install_var_local (decl
, ctx
);
1368 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1369 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1370 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1371 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1372 install_var_field (decl
, true, 7, ctx
);
1374 install_var_field (decl
, true, 3, ctx
);
1375 if (is_gimple_omp_offloaded (ctx
->stmt
)
1376 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1377 install_var_local (decl
, ctx
);
1382 tree base
= get_base_address (decl
);
1383 tree nc
= OMP_CLAUSE_CHAIN (c
);
1386 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1387 && OMP_CLAUSE_DECL (nc
) == base
1388 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1389 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1391 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1392 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1398 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1399 decl
= OMP_CLAUSE_DECL (c
);
1401 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1402 (splay_tree_key
) decl
));
1404 = build_decl (OMP_CLAUSE_LOCATION (c
),
1405 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1406 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1407 insert_field_into_struct (ctx
->record_type
, field
);
1408 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1409 (splay_tree_value
) field
);
1414 case OMP_CLAUSE__GRIDDIM_
:
1417 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c
), ctx
->outer
);
1418 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c
), ctx
->outer
);
1422 case OMP_CLAUSE_ORDER
:
1423 ctx
->order_concurrent
= true;
1426 case OMP_CLAUSE_BIND
:
1430 case OMP_CLAUSE_NOWAIT
:
1431 case OMP_CLAUSE_ORDERED
:
1432 case OMP_CLAUSE_COLLAPSE
:
1433 case OMP_CLAUSE_UNTIED
:
1434 case OMP_CLAUSE_MERGEABLE
:
1435 case OMP_CLAUSE_PROC_BIND
:
1436 case OMP_CLAUSE_SAFELEN
:
1437 case OMP_CLAUSE_SIMDLEN
:
1438 case OMP_CLAUSE_THREADS
:
1439 case OMP_CLAUSE_SIMD
:
1440 case OMP_CLAUSE_NOGROUP
:
1441 case OMP_CLAUSE_DEFAULTMAP
:
1442 case OMP_CLAUSE_ASYNC
:
1443 case OMP_CLAUSE_WAIT
:
1444 case OMP_CLAUSE_GANG
:
1445 case OMP_CLAUSE_WORKER
:
1446 case OMP_CLAUSE_VECTOR
:
1447 case OMP_CLAUSE_INDEPENDENT
:
1448 case OMP_CLAUSE_AUTO
:
1449 case OMP_CLAUSE_SEQ
:
1450 case OMP_CLAUSE_TILE
:
1451 case OMP_CLAUSE__SIMT_
:
1452 case OMP_CLAUSE_DEFAULT
:
1453 case OMP_CLAUSE_NONTEMPORAL
:
1454 case OMP_CLAUSE_IF_PRESENT
:
1455 case OMP_CLAUSE_FINALIZE
:
1456 case OMP_CLAUSE_TASK_REDUCTION
:
1459 case OMP_CLAUSE_ALIGNED
:
1460 decl
= OMP_CLAUSE_DECL (c
);
1461 if (is_global_var (decl
)
1462 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1463 install_var_local (decl
, ctx
);
1466 case OMP_CLAUSE__CONDTEMP_
:
1467 decl
= OMP_CLAUSE_DECL (c
);
1468 if (is_parallel_ctx (ctx
))
1470 install_var_field (decl
, false, 3, ctx
);
1471 install_var_local (decl
, ctx
);
1473 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1474 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1475 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1476 install_var_local (decl
, ctx
);
1479 case OMP_CLAUSE__CACHE_
:
1485 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1487 switch (OMP_CLAUSE_CODE (c
))
1489 case OMP_CLAUSE_LASTPRIVATE
:
1490 /* Let the corresponding firstprivate clause create
1492 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1493 scan_array_reductions
= true;
1494 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1498 case OMP_CLAUSE_FIRSTPRIVATE
:
1499 case OMP_CLAUSE_PRIVATE
:
1500 case OMP_CLAUSE_LINEAR
:
1501 case OMP_CLAUSE_IS_DEVICE_PTR
:
1502 decl
= OMP_CLAUSE_DECL (c
);
1503 if (is_variable_sized (decl
))
1505 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1506 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1507 && is_gimple_omp_offloaded (ctx
->stmt
))
1509 tree decl2
= DECL_VALUE_EXPR (decl
);
1510 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1511 decl2
= TREE_OPERAND (decl2
, 0);
1512 gcc_assert (DECL_P (decl2
));
1513 install_var_local (decl2
, ctx
);
1514 fixup_remapped_decl (decl2
, ctx
, false);
1516 install_var_local (decl
, ctx
);
1518 fixup_remapped_decl (decl
, ctx
,
1519 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1520 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1521 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1522 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1523 scan_array_reductions
= true;
1526 case OMP_CLAUSE_REDUCTION
:
1527 case OMP_CLAUSE_IN_REDUCTION
:
1528 decl
= OMP_CLAUSE_DECL (c
);
1529 if (TREE_CODE (decl
) != MEM_REF
)
1531 if (is_variable_sized (decl
))
1532 install_var_local (decl
, ctx
);
1533 fixup_remapped_decl (decl
, ctx
, false);
1535 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1536 scan_array_reductions
= true;
1539 case OMP_CLAUSE_TASK_REDUCTION
:
1540 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1541 scan_array_reductions
= true;
1544 case OMP_CLAUSE_SHARED
:
1545 /* Ignore shared directives in teams construct inside of
1546 target construct. */
1547 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1548 && !is_host_teams_ctx (ctx
))
1550 decl
= OMP_CLAUSE_DECL (c
);
1551 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1553 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1555 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1558 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1559 install_var_field (decl
, by_ref
, 11, ctx
);
1562 fixup_remapped_decl (decl
, ctx
, false);
1565 case OMP_CLAUSE_MAP
:
1566 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1568 decl
= OMP_CLAUSE_DECL (c
);
1570 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1571 && (OMP_CLAUSE_MAP_KIND (c
)
1572 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1573 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1574 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1575 && varpool_node::get_create (decl
)->offloadable
)
1579 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1580 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1581 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1582 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1584 tree new_decl
= lookup_decl (decl
, ctx
);
1585 TREE_TYPE (new_decl
)
1586 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1588 else if (DECL_SIZE (decl
)
1589 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1591 tree decl2
= DECL_VALUE_EXPR (decl
);
1592 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1593 decl2
= TREE_OPERAND (decl2
, 0);
1594 gcc_assert (DECL_P (decl2
));
1595 fixup_remapped_decl (decl2
, ctx
, false);
1596 fixup_remapped_decl (decl
, ctx
, true);
1599 fixup_remapped_decl (decl
, ctx
, false);
1603 case OMP_CLAUSE_COPYPRIVATE
:
1604 case OMP_CLAUSE_COPYIN
:
1605 case OMP_CLAUSE_DEFAULT
:
1607 case OMP_CLAUSE_NUM_THREADS
:
1608 case OMP_CLAUSE_NUM_TEAMS
:
1609 case OMP_CLAUSE_THREAD_LIMIT
:
1610 case OMP_CLAUSE_DEVICE
:
1611 case OMP_CLAUSE_SCHEDULE
:
1612 case OMP_CLAUSE_DIST_SCHEDULE
:
1613 case OMP_CLAUSE_NOWAIT
:
1614 case OMP_CLAUSE_ORDERED
:
1615 case OMP_CLAUSE_COLLAPSE
:
1616 case OMP_CLAUSE_UNTIED
:
1617 case OMP_CLAUSE_FINAL
:
1618 case OMP_CLAUSE_MERGEABLE
:
1619 case OMP_CLAUSE_PROC_BIND
:
1620 case OMP_CLAUSE_SAFELEN
:
1621 case OMP_CLAUSE_SIMDLEN
:
1622 case OMP_CLAUSE_ALIGNED
:
1623 case OMP_CLAUSE_DEPEND
:
1624 case OMP_CLAUSE__LOOPTEMP_
:
1625 case OMP_CLAUSE__REDUCTEMP_
:
1627 case OMP_CLAUSE_FROM
:
1628 case OMP_CLAUSE_PRIORITY
:
1629 case OMP_CLAUSE_GRAINSIZE
:
1630 case OMP_CLAUSE_NUM_TASKS
:
1631 case OMP_CLAUSE_THREADS
:
1632 case OMP_CLAUSE_SIMD
:
1633 case OMP_CLAUSE_NOGROUP
:
1634 case OMP_CLAUSE_DEFAULTMAP
:
1635 case OMP_CLAUSE_ORDER
:
1636 case OMP_CLAUSE_BIND
:
1637 case OMP_CLAUSE_USE_DEVICE_PTR
:
1638 case OMP_CLAUSE_NONTEMPORAL
:
1639 case OMP_CLAUSE_ASYNC
:
1640 case OMP_CLAUSE_WAIT
:
1641 case OMP_CLAUSE_NUM_GANGS
:
1642 case OMP_CLAUSE_NUM_WORKERS
:
1643 case OMP_CLAUSE_VECTOR_LENGTH
:
1644 case OMP_CLAUSE_GANG
:
1645 case OMP_CLAUSE_WORKER
:
1646 case OMP_CLAUSE_VECTOR
:
1647 case OMP_CLAUSE_INDEPENDENT
:
1648 case OMP_CLAUSE_AUTO
:
1649 case OMP_CLAUSE_SEQ
:
1650 case OMP_CLAUSE_TILE
:
1651 case OMP_CLAUSE__GRIDDIM_
:
1652 case OMP_CLAUSE__SIMT_
:
1653 case OMP_CLAUSE_IF_PRESENT
:
1654 case OMP_CLAUSE_FINALIZE
:
1655 case OMP_CLAUSE__CONDTEMP_
:
1658 case OMP_CLAUSE__CACHE_
:
1664 gcc_checking_assert (!scan_array_reductions
1665 || !is_gimple_omp_oacc (ctx
->stmt
));
1666 if (scan_array_reductions
)
1668 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1669 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1670 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1671 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1672 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1674 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1675 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1677 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1678 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1679 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1680 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1681 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1682 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1686 /* Create a new name for omp child function. Returns an identifier. */
1689 create_omp_child_function_name (bool task_copy
)
1691 return clone_function_name_numbered (current_function_decl
,
1692 task_copy
? "_omp_cpyfn" : "_omp_fn");
1695 /* Return true if CTX may belong to offloaded code: either if current function
1696 is offloaded, or any enclosing context corresponds to a target region. */
1699 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1701 if (cgraph_node::get (current_function_decl
)->offloadable
)
1703 for (; ctx
; ctx
= ctx
->outer
)
1704 if (is_gimple_omp_offloaded (ctx
->stmt
))
1709 /* Build a decl for the omp child function. It'll not contain a body
1710 yet, just the bare decl. */
1713 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1715 tree decl
, type
, name
, t
;
1717 name
= create_omp_child_function_name (task_copy
);
1719 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1720 ptr_type_node
, NULL_TREE
);
1722 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1724 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1726 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1729 ctx
->cb
.dst_fn
= decl
;
1731 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1733 TREE_STATIC (decl
) = 1;
1734 TREE_USED (decl
) = 1;
1735 DECL_ARTIFICIAL (decl
) = 1;
1736 DECL_IGNORED_P (decl
) = 0;
1737 TREE_PUBLIC (decl
) = 0;
1738 DECL_UNINLINABLE (decl
) = 1;
1739 DECL_EXTERNAL (decl
) = 0;
1740 DECL_CONTEXT (decl
) = NULL_TREE
;
1741 DECL_INITIAL (decl
) = make_node (BLOCK
);
1742 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1743 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1744 /* Remove omp declare simd attribute from the new attributes. */
1745 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1747 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1750 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1751 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1752 *p
= TREE_CHAIN (*p
);
1755 tree chain
= TREE_CHAIN (*p
);
1756 *p
= copy_node (*p
);
1757 p
= &TREE_CHAIN (*p
);
1761 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1762 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1763 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1764 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1765 DECL_FUNCTION_VERSIONED (decl
)
1766 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1768 if (omp_maybe_offloaded_ctx (ctx
))
1770 cgraph_node::get_create (decl
)->offloadable
= 1;
1771 if (ENABLE_OFFLOADING
)
1772 g
->have_offload
= true;
1775 if (cgraph_node::get_create (decl
)->offloadable
1776 && !lookup_attribute ("omp declare target",
1777 DECL_ATTRIBUTES (current_function_decl
)))
1779 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1780 ? "omp target entrypoint"
1781 : "omp declare target");
1782 DECL_ATTRIBUTES (decl
)
1783 = tree_cons (get_identifier (target_attr
),
1784 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1787 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1788 RESULT_DECL
, NULL_TREE
, void_type_node
);
1789 DECL_ARTIFICIAL (t
) = 1;
1790 DECL_IGNORED_P (t
) = 1;
1791 DECL_CONTEXT (t
) = decl
;
1792 DECL_RESULT (decl
) = t
;
1794 tree data_name
= get_identifier (".omp_data_i");
1795 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1797 DECL_ARTIFICIAL (t
) = 1;
1798 DECL_NAMELESS (t
) = 1;
1799 DECL_ARG_TYPE (t
) = ptr_type_node
;
1800 DECL_CONTEXT (t
) = current_function_decl
;
1802 TREE_READONLY (t
) = 1;
1803 DECL_ARGUMENTS (decl
) = t
;
1805 ctx
->receiver_decl
= t
;
1808 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1809 PARM_DECL
, get_identifier (".omp_data_o"),
1811 DECL_ARTIFICIAL (t
) = 1;
1812 DECL_NAMELESS (t
) = 1;
1813 DECL_ARG_TYPE (t
) = ptr_type_node
;
1814 DECL_CONTEXT (t
) = current_function_decl
;
1816 TREE_ADDRESSABLE (t
) = 1;
1817 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1818 DECL_ARGUMENTS (decl
) = t
;
1821 /* Allocate memory for the function structure. The call to
1822 allocate_struct_function clobbers CFUN, so we need to restore
1824 push_struct_function (decl
);
1825 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1826 init_tree_ssa (cfun
);
1830 /* Callback for walk_gimple_seq. Check if combined parallel
1831 contains gimple_omp_for_combined_into_p OMP_FOR. */
1834 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1835 bool *handled_ops_p
,
1836 struct walk_stmt_info
*wi
)
1838 gimple
*stmt
= gsi_stmt (*gsi_p
);
1840 *handled_ops_p
= true;
1841 switch (gimple_code (stmt
))
1845 case GIMPLE_OMP_FOR
:
1846 if (gimple_omp_for_combined_into_p (stmt
)
1847 && gimple_omp_for_kind (stmt
)
1848 == *(const enum gf_mask
*) (wi
->info
))
1851 return integer_zero_node
;
1860 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1863 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1864 omp_context
*outer_ctx
)
1866 struct walk_stmt_info wi
;
1868 memset (&wi
, 0, sizeof (wi
));
1870 wi
.info
= (void *) &msk
;
1871 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1872 if (wi
.info
!= (void *) &msk
)
1874 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1875 struct omp_for_data fd
;
1876 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1877 /* We need two temporaries with fd.loop.v type (istart/iend)
1878 and then (fd.collapse - 1) temporaries with the same
1879 type for count2 ... countN-1 vars if not constant. */
1880 size_t count
= 2, i
;
1881 tree type
= fd
.iter_type
;
1883 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1885 count
+= fd
.collapse
- 1;
1886 /* If there are lastprivate clauses on the inner
1887 GIMPLE_OMP_FOR, add one more temporaries for the total number
1888 of iterations (product of count1 ... countN-1). */
1889 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1890 OMP_CLAUSE_LASTPRIVATE
))
1892 else if (msk
== GF_OMP_FOR_KIND_FOR
1893 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1894 OMP_CLAUSE_LASTPRIVATE
))
1897 for (i
= 0; i
< count
; i
++)
1899 tree temp
= create_tmp_var (type
);
1900 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1901 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1902 OMP_CLAUSE_DECL (c
) = temp
;
1903 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1904 gimple_omp_taskreg_set_clauses (stmt
, c
);
1907 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
1908 && omp_find_clause (gimple_omp_task_clauses (stmt
),
1909 OMP_CLAUSE_REDUCTION
))
1911 tree type
= build_pointer_type (pointer_sized_int_node
);
1912 tree temp
= create_tmp_var (type
);
1913 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1914 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1915 OMP_CLAUSE_DECL (c
) = temp
;
1916 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
1917 gimple_omp_task_set_clauses (stmt
, c
);
1921 /* Scan an OpenMP parallel directive. */
1924 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1928 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1930 /* Ignore parallel directives with empty bodies, unless there
1931 are copyin clauses. */
1933 && empty_body_p (gimple_omp_body (stmt
))
1934 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1935 OMP_CLAUSE_COPYIN
) == NULL
)
1937 gsi_replace (gsi
, gimple_build_nop (), false);
1941 if (gimple_omp_parallel_combined_p (stmt
))
1942 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1943 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1944 OMP_CLAUSE_REDUCTION
);
1945 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
1946 if (OMP_CLAUSE_REDUCTION_TASK (c
))
1948 tree type
= build_pointer_type (pointer_sized_int_node
);
1949 tree temp
= create_tmp_var (type
);
1950 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1952 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1953 OMP_CLAUSE_DECL (c
) = temp
;
1954 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
1955 gimple_omp_parallel_set_clauses (stmt
, c
);
1958 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
1961 ctx
= new_omp_context (stmt
, outer_ctx
);
1962 taskreg_contexts
.safe_push (ctx
);
1963 if (taskreg_nesting_level
> 1)
1964 ctx
->is_nested
= true;
1965 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1966 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1967 name
= create_tmp_var_name (".omp_data_s");
1968 name
= build_decl (gimple_location (stmt
),
1969 TYPE_DECL
, name
, ctx
->record_type
);
1970 DECL_ARTIFICIAL (name
) = 1;
1971 DECL_NAMELESS (name
) = 1;
1972 TYPE_NAME (ctx
->record_type
) = name
;
1973 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1974 if (!gimple_omp_parallel_grid_phony (stmt
))
1976 create_omp_child_function (ctx
, false);
1977 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1980 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
1981 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1983 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1984 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1987 /* Scan an OpenMP task directive. */
1990 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1994 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
1996 /* Ignore task directives with empty bodies, unless they have depend
1999 && gimple_omp_body (stmt
)
2000 && empty_body_p (gimple_omp_body (stmt
))
2001 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2003 gsi_replace (gsi
, gimple_build_nop (), false);
2007 if (gimple_omp_task_taskloop_p (stmt
))
2008 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2010 ctx
= new_omp_context (stmt
, outer_ctx
);
2012 if (gimple_omp_task_taskwait_p (stmt
))
2014 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2018 taskreg_contexts
.safe_push (ctx
);
2019 if (taskreg_nesting_level
> 1)
2020 ctx
->is_nested
= true;
2021 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2022 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2023 name
= create_tmp_var_name (".omp_data_s");
2024 name
= build_decl (gimple_location (stmt
),
2025 TYPE_DECL
, name
, ctx
->record_type
);
2026 DECL_ARTIFICIAL (name
) = 1;
2027 DECL_NAMELESS (name
) = 1;
2028 TYPE_NAME (ctx
->record_type
) = name
;
2029 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2030 create_omp_child_function (ctx
, false);
2031 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2033 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2035 if (ctx
->srecord_type
)
2037 name
= create_tmp_var_name (".omp_data_a");
2038 name
= build_decl (gimple_location (stmt
),
2039 TYPE_DECL
, name
, ctx
->srecord_type
);
2040 DECL_ARTIFICIAL (name
) = 1;
2041 DECL_NAMELESS (name
) = 1;
2042 TYPE_NAME (ctx
->srecord_type
) = name
;
2043 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2044 create_omp_child_function (ctx
, true);
2047 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2049 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2051 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2052 t
= build_int_cst (long_integer_type_node
, 0);
2053 gimple_omp_task_set_arg_size (stmt
, t
);
2054 t
= build_int_cst (long_integer_type_node
, 1);
2055 gimple_omp_task_set_arg_align (stmt
, t
);
2059 /* Helper function for finish_taskreg_scan, called through walk_tree.
2060 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2061 tree, replace it in the expression. */
2064 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2068 omp_context
*ctx
= (omp_context
*) data
;
2069 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2072 if (DECL_HAS_VALUE_EXPR_P (t
))
2073 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2078 else if (IS_TYPE_OR_DECL_P (*tp
))
2083 /* If any decls have been made addressable during scan_omp,
2084 adjust their fields if needed, and layout record types
2085 of parallel/task constructs. */
2088 finish_taskreg_scan (omp_context
*ctx
)
2090 if (ctx
->record_type
== NULL_TREE
)
2093 /* If any task_shared_vars were needed, verify all
2094 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2095 statements if use_pointer_for_field hasn't changed
2096 because of that. If it did, update field types now. */
2097 if (task_shared_vars
)
2101 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2102 c
; c
= OMP_CLAUSE_CHAIN (c
))
2103 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2104 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2106 tree decl
= OMP_CLAUSE_DECL (c
);
2108 /* Global variables don't need to be copied,
2109 the receiver side will use them directly. */
2110 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2112 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2113 || !use_pointer_for_field (decl
, ctx
))
2115 tree field
= lookup_field (decl
, ctx
);
2116 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2117 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2119 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2120 TREE_THIS_VOLATILE (field
) = 0;
2121 DECL_USER_ALIGN (field
) = 0;
2122 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2123 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2124 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2125 if (ctx
->srecord_type
)
2127 tree sfield
= lookup_sfield (decl
, ctx
);
2128 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2129 TREE_THIS_VOLATILE (sfield
) = 0;
2130 DECL_USER_ALIGN (sfield
) = 0;
2131 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2132 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2133 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2138 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2140 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2141 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2144 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2145 expects to find it at the start of data. */
2146 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2147 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2151 *p
= DECL_CHAIN (*p
);
2155 p
= &DECL_CHAIN (*p
);
2156 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2157 TYPE_FIELDS (ctx
->record_type
) = f
;
2159 layout_type (ctx
->record_type
);
2160 fixup_child_record_type (ctx
);
2162 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2164 layout_type (ctx
->record_type
);
2165 fixup_child_record_type (ctx
);
2169 location_t loc
= gimple_location (ctx
->stmt
);
2170 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2171 /* Move VLA fields to the end. */
2172 p
= &TYPE_FIELDS (ctx
->record_type
);
2174 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2175 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2178 *p
= TREE_CHAIN (*p
);
2179 TREE_CHAIN (*q
) = NULL_TREE
;
2180 q
= &TREE_CHAIN (*q
);
2183 p
= &DECL_CHAIN (*p
);
2185 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2187 /* Move fields corresponding to first and second _looptemp_
2188 clause first. There are filled by GOMP_taskloop
2189 and thus need to be in specific positions. */
2190 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2191 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2192 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2193 OMP_CLAUSE__LOOPTEMP_
);
2194 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2195 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2196 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2197 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2198 p
= &TYPE_FIELDS (ctx
->record_type
);
2200 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2201 *p
= DECL_CHAIN (*p
);
2203 p
= &DECL_CHAIN (*p
);
2204 DECL_CHAIN (f1
) = f2
;
2207 DECL_CHAIN (f2
) = f3
;
2208 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2211 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2212 TYPE_FIELDS (ctx
->record_type
) = f1
;
2213 if (ctx
->srecord_type
)
2215 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2216 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2218 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2219 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2221 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2222 *p
= DECL_CHAIN (*p
);
2224 p
= &DECL_CHAIN (*p
);
2225 DECL_CHAIN (f1
) = f2
;
2226 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2229 DECL_CHAIN (f2
) = f3
;
2230 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2233 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2234 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2237 layout_type (ctx
->record_type
);
2238 fixup_child_record_type (ctx
);
2239 if (ctx
->srecord_type
)
2240 layout_type (ctx
->srecord_type
);
2241 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2242 TYPE_SIZE_UNIT (ctx
->record_type
));
2243 if (TREE_CODE (t
) != INTEGER_CST
)
2245 t
= unshare_expr (t
);
2246 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2248 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2249 t
= build_int_cst (long_integer_type_node
,
2250 TYPE_ALIGN_UNIT (ctx
->record_type
));
2251 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2255 /* Find the enclosing offload context. */
2257 static omp_context
*
2258 enclosing_target_ctx (omp_context
*ctx
)
2260 for (; ctx
; ctx
= ctx
->outer
)
2261 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2267 /* Return true if ctx is part of an oacc kernels region. */
2270 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2272 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2274 gimple
*stmt
= ctx
->stmt
;
2275 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2276 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2283 /* Check the parallelism clauses inside a kernels regions.
2284 Until kernels handling moves to use the same loop indirection
2285 scheme as parallel, we need to do this checking early. */
2288 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2290 bool checking
= true;
2291 unsigned outer_mask
= 0;
2292 unsigned this_mask
= 0;
2293 bool has_seq
= false, has_auto
= false;
2296 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2300 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2302 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2305 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2307 switch (OMP_CLAUSE_CODE (c
))
2309 case OMP_CLAUSE_GANG
:
2310 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2312 case OMP_CLAUSE_WORKER
:
2313 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2315 case OMP_CLAUSE_VECTOR
:
2316 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2318 case OMP_CLAUSE_SEQ
:
2321 case OMP_CLAUSE_AUTO
:
2331 if (has_seq
&& (this_mask
|| has_auto
))
2332 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2333 " OpenACC loop specifiers");
2334 else if (has_auto
&& this_mask
)
2335 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2336 " OpenACC loop specifiers");
2338 if (this_mask
& outer_mask
)
2339 error_at (gimple_location (stmt
), "inner loop uses same"
2340 " OpenACC parallelism as containing loop");
2343 return outer_mask
| this_mask
;
2346 /* Scan a GIMPLE_OMP_FOR. */
2348 static omp_context
*
2349 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2353 tree clauses
= gimple_omp_for_clauses (stmt
);
2355 ctx
= new_omp_context (stmt
, outer_ctx
);
2357 if (is_gimple_omp_oacc (stmt
))
2359 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2361 if (!tgt
|| is_oacc_parallel (tgt
))
2362 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2364 char const *check
= NULL
;
2366 switch (OMP_CLAUSE_CODE (c
))
2368 case OMP_CLAUSE_GANG
:
2372 case OMP_CLAUSE_WORKER
:
2376 case OMP_CLAUSE_VECTOR
:
2384 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2385 error_at (gimple_location (stmt
),
2386 "argument not permitted on %qs clause in"
2387 " OpenACC %<parallel%>", check
);
2390 if (tgt
&& is_oacc_kernels (tgt
))
2392 /* Strip out reductions, as they are not handled yet. */
2393 tree
*prev_ptr
= &clauses
;
2395 while (tree probe
= *prev_ptr
)
2397 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2399 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2400 *prev_ptr
= *next_ptr
;
2402 prev_ptr
= next_ptr
;
2405 gimple_omp_for_set_clauses (stmt
, clauses
);
2406 check_oacc_kernel_gwv (stmt
, ctx
);
2410 scan_sharing_clauses (clauses
, ctx
);
2412 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2413 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2415 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2416 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2417 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2418 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2420 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2424 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2427 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2428 omp_context
*outer_ctx
)
2430 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2431 gsi_replace (gsi
, bind
, false);
2432 gimple_seq seq
= NULL
;
2433 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2434 tree cond
= create_tmp_var_raw (integer_type_node
);
2435 DECL_CONTEXT (cond
) = current_function_decl
;
2436 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2437 gimple_bind_set_vars (bind
, cond
);
2438 gimple_call_set_lhs (g
, cond
);
2439 gimple_seq_add_stmt (&seq
, g
);
2440 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2441 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2442 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2443 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2444 gimple_seq_add_stmt (&seq
, g
);
2445 g
= gimple_build_label (lab1
);
2446 gimple_seq_add_stmt (&seq
, g
);
2447 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2448 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2449 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2450 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2451 gimple_omp_for_set_clauses (new_stmt
, clause
);
2452 gimple_seq_add_stmt (&seq
, new_stmt
);
2453 g
= gimple_build_goto (lab3
);
2454 gimple_seq_add_stmt (&seq
, g
);
2455 g
= gimple_build_label (lab2
);
2456 gimple_seq_add_stmt (&seq
, g
);
2457 gimple_seq_add_stmt (&seq
, stmt
);
2458 g
= gimple_build_label (lab3
);
2459 gimple_seq_add_stmt (&seq
, g
);
2460 gimple_bind_set_body (bind
, seq
);
2462 scan_omp_for (new_stmt
, outer_ctx
);
2463 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2466 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2467 struct walk_stmt_info
*);
2468 static omp_context
*maybe_lookup_ctx (gimple
*);
2470 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2471 for scan phase loop. */
2474 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2475 omp_context
*outer_ctx
)
2477 /* The only change between inclusive and exclusive scan will be
2478 within the first simd loop, so just use inclusive in the
2479 worksharing loop. */
2480 outer_ctx
->scan_inclusive
= true;
2481 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2482 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2484 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2485 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2486 gsi_replace (gsi
, input_stmt
, false);
2487 gimple_seq input_body
= NULL
;
2488 gimple_seq_add_stmt (&input_body
, stmt
);
2489 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2491 gimple_stmt_iterator input1_gsi
= gsi_none ();
2492 struct walk_stmt_info wi
;
2493 memset (&wi
, 0, sizeof (wi
));
2495 wi
.info
= (void *) &input1_gsi
;
2496 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2497 gcc_assert (!gsi_end_p (input1_gsi
));
2499 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2500 gsi_next (&input1_gsi
);
2501 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2502 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2503 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
2504 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2505 std::swap (input_stmt1
, scan_stmt1
);
2507 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
2508 gimple_omp_set_body (input_stmt1
, NULL
);
2510 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
2511 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
2513 gimple_omp_set_body (input_stmt1
, input_body1
);
2514 gimple_omp_set_body (scan_stmt1
, NULL
);
2516 gimple_stmt_iterator input2_gsi
= gsi_none ();
2517 memset (&wi
, 0, sizeof (wi
));
2519 wi
.info
= (void *) &input2_gsi
;
2520 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
2522 gcc_assert (!gsi_end_p (input2_gsi
));
2524 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
2525 gsi_next (&input2_gsi
);
2526 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
2527 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
2528 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2529 std::swap (input_stmt2
, scan_stmt2
);
2531 gimple_omp_set_body (input_stmt2
, NULL
);
2533 gimple_omp_set_body (input_stmt
, input_body
);
2534 gimple_omp_set_body (scan_stmt
, scan_body
);
2536 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
2537 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
2539 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
2540 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
2542 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
2545 /* Scan an OpenMP sections directive. */
2548 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2552 ctx
= new_omp_context (stmt
, outer_ctx
);
2553 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2554 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2557 /* Scan an OpenMP single directive. */
2560 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2565 ctx
= new_omp_context (stmt
, outer_ctx
);
2566 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2567 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2568 name
= create_tmp_var_name (".omp_copy_s");
2569 name
= build_decl (gimple_location (stmt
),
2570 TYPE_DECL
, name
, ctx
->record_type
);
2571 TYPE_NAME (ctx
->record_type
) = name
;
2573 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2574 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2576 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2577 ctx
->record_type
= NULL
;
2579 layout_type (ctx
->record_type
);
2582 /* Scan a GIMPLE_OMP_TARGET. */
2585 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2589 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2590 tree clauses
= gimple_omp_target_clauses (stmt
);
2592 ctx
= new_omp_context (stmt
, outer_ctx
);
2593 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2594 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2595 name
= create_tmp_var_name (".omp_data_t");
2596 name
= build_decl (gimple_location (stmt
),
2597 TYPE_DECL
, name
, ctx
->record_type
);
2598 DECL_ARTIFICIAL (name
) = 1;
2599 DECL_NAMELESS (name
) = 1;
2600 TYPE_NAME (ctx
->record_type
) = name
;
2601 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2605 create_omp_child_function (ctx
, false);
2606 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2609 scan_sharing_clauses (clauses
, ctx
);
2610 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2612 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2613 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2616 TYPE_FIELDS (ctx
->record_type
)
2617 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2620 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2621 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2623 field
= DECL_CHAIN (field
))
2624 gcc_assert (DECL_ALIGN (field
) == align
);
2626 layout_type (ctx
->record_type
);
2628 fixup_child_record_type (ctx
);
2632 /* Scan an OpenMP teams directive. */
2635 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2637 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2639 if (!gimple_omp_teams_host (stmt
))
2641 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2642 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2645 taskreg_contexts
.safe_push (ctx
);
2646 gcc_assert (taskreg_nesting_level
== 1);
2647 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2648 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2649 tree name
= create_tmp_var_name (".omp_data_s");
2650 name
= build_decl (gimple_location (stmt
),
2651 TYPE_DECL
, name
, ctx
->record_type
);
2652 DECL_ARTIFICIAL (name
) = 1;
2653 DECL_NAMELESS (name
) = 1;
2654 TYPE_NAME (ctx
->record_type
) = name
;
2655 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2656 create_omp_child_function (ctx
, false);
2657 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2659 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2660 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2662 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2663 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2666 /* Check nesting restrictions. */
2668 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2672 if (ctx
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_GRID_BODY
)
2673 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2674 the original copy of its contents. */
2677 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2678 inside an OpenACC CTX. */
2679 if (!(is_gimple_omp (stmt
)
2680 && is_gimple_omp_oacc (stmt
))
2681 /* Except for atomic codes that we share with OpenMP. */
2682 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2683 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2685 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2687 error_at (gimple_location (stmt
),
2688 "non-OpenACC construct inside of OpenACC routine");
2692 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2693 if (is_gimple_omp (octx
->stmt
)
2694 && is_gimple_omp_oacc (octx
->stmt
))
2696 error_at (gimple_location (stmt
),
2697 "non-OpenACC construct inside of OpenACC region");
2704 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
2706 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
2708 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2709 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
2713 if (ctx
->order_concurrent
2714 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
2715 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2716 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2718 error_at (gimple_location (stmt
),
2719 "OpenMP constructs other than %<parallel%>, %<loop%>"
2720 " or %<simd%> may not be nested inside a region with"
2721 " the %<order(concurrent)%> clause");
2724 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2726 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2727 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2729 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2730 && (ctx
->outer
== NULL
2731 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2732 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2733 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2734 != GF_OMP_FOR_KIND_FOR
)
2735 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2737 error_at (gimple_location (stmt
),
2738 "%<ordered simd threads%> must be closely "
2739 "nested inside of %<for simd%> region");
2745 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2746 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
2747 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
2749 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
2750 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
2752 error_at (gimple_location (stmt
),
2753 "OpenMP constructs other than "
2754 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2755 "not be nested inside %<simd%> region");
2758 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2760 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2761 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
2762 && gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
2763 && omp_find_clause (gimple_omp_for_clauses (stmt
),
2764 OMP_CLAUSE_BIND
) == NULL_TREE
))
2765 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2767 error_at (gimple_location (stmt
),
2768 "only %<distribute%>, %<parallel%> or %<loop%> "
2769 "regions are allowed to be strictly nested inside "
2770 "%<teams%> region");
2774 else if (ctx
->order_concurrent
2775 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
2776 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
2777 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
2778 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
2781 error_at (gimple_location (stmt
),
2782 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2783 "%<simd%> may not be nested inside a %<loop%> region");
2785 error_at (gimple_location (stmt
),
2786 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2787 "%<simd%> may not be nested inside a region with "
2788 "the %<order(concurrent)%> clause");
2792 switch (gimple_code (stmt
))
2794 case GIMPLE_OMP_FOR
:
2795 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
2797 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2799 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2801 error_at (gimple_location (stmt
),
2802 "%<distribute%> region must be strictly nested "
2803 "inside %<teams%> construct");
2808 /* We split taskloop into task and nested taskloop in it. */
2809 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2811 /* For now, hope this will change and loop bind(parallel) will not
2812 be allowed in lots of contexts. */
2813 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
2814 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
2816 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2821 switch (gimple_code (ctx
->stmt
))
2823 case GIMPLE_OMP_FOR
:
2824 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2825 == GF_OMP_FOR_KIND_OACC_LOOP
);
2828 case GIMPLE_OMP_TARGET
:
2829 switch (gimple_omp_target_kind (ctx
->stmt
))
2831 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2832 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2843 else if (oacc_get_fn_attrib (current_function_decl
))
2847 error_at (gimple_location (stmt
),
2848 "OpenACC loop directive must be associated with"
2849 " an OpenACC compute region");
2855 if (is_gimple_call (stmt
)
2856 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2857 == BUILT_IN_GOMP_CANCEL
2858 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2859 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2861 const char *bad
= NULL
;
2862 const char *kind
= NULL
;
2863 const char *construct
2864 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2865 == BUILT_IN_GOMP_CANCEL
)
2867 : "cancellation point";
2870 error_at (gimple_location (stmt
), "orphaned %qs construct",
2874 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2875 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2879 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2881 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2882 == BUILT_IN_GOMP_CANCEL
2883 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2884 ctx
->cancellable
= true;
2888 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2889 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2891 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2892 == BUILT_IN_GOMP_CANCEL
2893 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2895 ctx
->cancellable
= true;
2896 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2898 warning_at (gimple_location (stmt
), 0,
2899 "%<cancel for%> inside "
2900 "%<nowait%> for construct");
2901 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2902 OMP_CLAUSE_ORDERED
))
2903 warning_at (gimple_location (stmt
), 0,
2904 "%<cancel for%> inside "
2905 "%<ordered%> for construct");
2910 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
2911 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
2913 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2914 == BUILT_IN_GOMP_CANCEL
2915 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2917 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
2919 ctx
->cancellable
= true;
2920 if (omp_find_clause (gimple_omp_sections_clauses
2923 warning_at (gimple_location (stmt
), 0,
2924 "%<cancel sections%> inside "
2925 "%<nowait%> sections construct");
2929 gcc_assert (ctx
->outer
2930 && gimple_code (ctx
->outer
->stmt
)
2931 == GIMPLE_OMP_SECTIONS
);
2932 ctx
->outer
->cancellable
= true;
2933 if (omp_find_clause (gimple_omp_sections_clauses
2936 warning_at (gimple_location (stmt
), 0,
2937 "%<cancel sections%> inside "
2938 "%<nowait%> sections construct");
2944 if (!is_task_ctx (ctx
)
2945 && (!is_taskloop_ctx (ctx
)
2946 || ctx
->outer
== NULL
2947 || !is_task_ctx (ctx
->outer
)))
2951 for (omp_context
*octx
= ctx
->outer
;
2952 octx
; octx
= octx
->outer
)
2954 switch (gimple_code (octx
->stmt
))
2956 case GIMPLE_OMP_TASKGROUP
:
2958 case GIMPLE_OMP_TARGET
:
2959 if (gimple_omp_target_kind (octx
->stmt
)
2960 != GF_OMP_TARGET_KIND_REGION
)
2963 case GIMPLE_OMP_PARALLEL
:
2964 case GIMPLE_OMP_TEAMS
:
2965 error_at (gimple_location (stmt
),
2966 "%<%s taskgroup%> construct not closely "
2967 "nested inside of %<taskgroup%> region",
2970 case GIMPLE_OMP_TASK
:
2971 if (gimple_omp_task_taskloop_p (octx
->stmt
)
2973 && is_taskloop_ctx (octx
->outer
))
2976 = gimple_omp_for_clauses (octx
->outer
->stmt
);
2977 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
2986 ctx
->cancellable
= true;
2991 error_at (gimple_location (stmt
), "invalid arguments");
2996 error_at (gimple_location (stmt
),
2997 "%<%s %s%> construct not closely nested inside of %qs",
2998 construct
, kind
, bad
);
3003 case GIMPLE_OMP_SECTIONS
:
3004 case GIMPLE_OMP_SINGLE
:
3005 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3006 switch (gimple_code (ctx
->stmt
))
3008 case GIMPLE_OMP_FOR
:
3009 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3010 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3013 case GIMPLE_OMP_SECTIONS
:
3014 case GIMPLE_OMP_SINGLE
:
3015 case GIMPLE_OMP_ORDERED
:
3016 case GIMPLE_OMP_MASTER
:
3017 case GIMPLE_OMP_TASK
:
3018 case GIMPLE_OMP_CRITICAL
:
3019 if (is_gimple_call (stmt
))
3021 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3022 != BUILT_IN_GOMP_BARRIER
)
3024 error_at (gimple_location (stmt
),
3025 "barrier region may not be closely nested inside "
3026 "of work-sharing, %<loop%>, %<critical%>, "
3027 "%<ordered%>, %<master%>, explicit %<task%> or "
3028 "%<taskloop%> region");
3031 error_at (gimple_location (stmt
),
3032 "work-sharing region may not be closely nested inside "
3033 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3034 "%<master%>, explicit %<task%> or %<taskloop%> region");
3036 case GIMPLE_OMP_PARALLEL
:
3037 case GIMPLE_OMP_TEAMS
:
3039 case GIMPLE_OMP_TARGET
:
3040 if (gimple_omp_target_kind (ctx
->stmt
)
3041 == GF_OMP_TARGET_KIND_REGION
)
3048 case GIMPLE_OMP_MASTER
:
3049 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3050 switch (gimple_code (ctx
->stmt
))
3052 case GIMPLE_OMP_FOR
:
3053 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3054 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3057 case GIMPLE_OMP_SECTIONS
:
3058 case GIMPLE_OMP_SINGLE
:
3059 case GIMPLE_OMP_TASK
:
3060 error_at (gimple_location (stmt
),
3061 "%<master%> region may not be closely nested inside "
3062 "of work-sharing, %<loop%>, explicit %<task%> or "
3063 "%<taskloop%> region");
3065 case GIMPLE_OMP_PARALLEL
:
3066 case GIMPLE_OMP_TEAMS
:
3068 case GIMPLE_OMP_TARGET
:
3069 if (gimple_omp_target_kind (ctx
->stmt
)
3070 == GF_OMP_TARGET_KIND_REGION
)
3077 case GIMPLE_OMP_TASK
:
3078 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3079 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3080 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3081 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3083 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3084 error_at (OMP_CLAUSE_LOCATION (c
),
3085 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3086 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3090 case GIMPLE_OMP_ORDERED
:
3091 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3092 c
; c
= OMP_CLAUSE_CHAIN (c
))
3094 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
3096 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3097 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3100 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3101 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
3102 || kind
== OMP_CLAUSE_DEPEND_SINK
)
3105 /* Look for containing ordered(N) loop. */
3107 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3109 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3110 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3112 error_at (OMP_CLAUSE_LOCATION (c
),
3113 "%<ordered%> construct with %<depend%> clause "
3114 "must be closely nested inside an %<ordered%> "
3118 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
3120 error_at (OMP_CLAUSE_LOCATION (c
),
3121 "%<ordered%> construct with %<depend%> clause "
3122 "must be closely nested inside a loop with "
3123 "%<ordered%> clause with a parameter");
3129 error_at (OMP_CLAUSE_LOCATION (c
),
3130 "invalid depend kind in omp %<ordered%> %<depend%>");
3134 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3135 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3137 /* ordered simd must be closely nested inside of simd region,
3138 and simd region must not encounter constructs other than
3139 ordered simd, therefore ordered simd may be either orphaned,
3140 or ctx->stmt must be simd. The latter case is handled already
3144 error_at (gimple_location (stmt
),
3145 "%<ordered%> %<simd%> must be closely nested inside "
3150 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3151 switch (gimple_code (ctx
->stmt
))
3153 case GIMPLE_OMP_CRITICAL
:
3154 case GIMPLE_OMP_TASK
:
3155 case GIMPLE_OMP_ORDERED
:
3156 ordered_in_taskloop
:
3157 error_at (gimple_location (stmt
),
3158 "%<ordered%> region may not be closely nested inside "
3159 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3160 "%<taskloop%> region");
3162 case GIMPLE_OMP_FOR
:
3163 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3164 goto ordered_in_taskloop
;
3166 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3167 OMP_CLAUSE_ORDERED
);
3170 error_at (gimple_location (stmt
),
3171 "%<ordered%> region must be closely nested inside "
3172 "a loop region with an %<ordered%> clause");
3175 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3176 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3178 error_at (gimple_location (stmt
),
3179 "%<ordered%> region without %<depend%> clause may "
3180 "not be closely nested inside a loop region with "
3181 "an %<ordered%> clause with a parameter");
3185 case GIMPLE_OMP_TARGET
:
3186 if (gimple_omp_target_kind (ctx
->stmt
)
3187 != GF_OMP_TARGET_KIND_REGION
)
3190 case GIMPLE_OMP_PARALLEL
:
3191 case GIMPLE_OMP_TEAMS
:
3192 error_at (gimple_location (stmt
),
3193 "%<ordered%> region must be closely nested inside "
3194 "a loop region with an %<ordered%> clause");
3200 case GIMPLE_OMP_CRITICAL
:
3203 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3204 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3205 if (gomp_critical
*other_crit
3206 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3207 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3209 error_at (gimple_location (stmt
),
3210 "%<critical%> region may not be nested inside "
3211 "a %<critical%> region with the same name");
3216 case GIMPLE_OMP_TEAMS
:
3219 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3220 || (gimple_omp_target_kind (ctx
->stmt
)
3221 != GF_OMP_TARGET_KIND_REGION
))
3223 /* Teams construct can appear either strictly nested inside of
3224 target construct with no intervening stmts, or can be encountered
3225 only by initial task (so must not appear inside any OpenMP
3227 error_at (gimple_location (stmt
),
3228 "%<teams%> construct must be closely nested inside of "
3229 "%<target%> construct or not nested in any OpenMP "
3234 case GIMPLE_OMP_TARGET
:
3235 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3236 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3237 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3238 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3240 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3241 error_at (OMP_CLAUSE_LOCATION (c
),
3242 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3243 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3246 if (is_gimple_omp_offloaded (stmt
)
3247 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3249 error_at (gimple_location (stmt
),
3250 "OpenACC region inside of OpenACC routine, nested "
3251 "parallelism not supported yet");
3254 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3256 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3258 if (is_gimple_omp (stmt
)
3259 && is_gimple_omp_oacc (stmt
)
3260 && is_gimple_omp (ctx
->stmt
))
3262 error_at (gimple_location (stmt
),
3263 "OpenACC construct inside of non-OpenACC region");
3269 const char *stmt_name
, *ctx_stmt_name
;
3270 switch (gimple_omp_target_kind (stmt
))
3272 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3273 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3274 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3275 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3276 stmt_name
= "target enter data"; break;
3277 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3278 stmt_name
= "target exit data"; break;
3279 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3280 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3281 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3282 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3283 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
3284 stmt_name
= "enter/exit data"; break;
3285 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3286 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3288 default: gcc_unreachable ();
3290 switch (gimple_omp_target_kind (ctx
->stmt
))
3292 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3293 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3294 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3295 ctx_stmt_name
= "parallel"; break;
3296 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3297 ctx_stmt_name
= "kernels"; break;
3298 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3299 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3300 ctx_stmt_name
= "host_data"; break;
3301 default: gcc_unreachable ();
3304 /* OpenACC/OpenMP mismatch? */
3305 if (is_gimple_omp_oacc (stmt
)
3306 != is_gimple_omp_oacc (ctx
->stmt
))
3308 error_at (gimple_location (stmt
),
3309 "%s %qs construct inside of %s %qs region",
3310 (is_gimple_omp_oacc (stmt
)
3311 ? "OpenACC" : "OpenMP"), stmt_name
,
3312 (is_gimple_omp_oacc (ctx
->stmt
)
3313 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3316 if (is_gimple_omp_offloaded (ctx
->stmt
))
3318 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3319 if (is_gimple_omp_oacc (ctx
->stmt
))
3321 error_at (gimple_location (stmt
),
3322 "%qs construct inside of %qs region",
3323 stmt_name
, ctx_stmt_name
);
3328 warning_at (gimple_location (stmt
), 0,
3329 "%qs construct inside of %qs region",
3330 stmt_name
, ctx_stmt_name
);
3342 /* Helper function scan_omp.
3344 Callback for walk_tree or operators in walk_gimple_stmt used to
3345 scan for OMP directives in TP. */
3348 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3350 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3351 omp_context
*ctx
= (omp_context
*) wi
->info
;
3354 switch (TREE_CODE (t
))
3362 tree repl
= remap_decl (t
, &ctx
->cb
);
3363 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3369 if (ctx
&& TYPE_P (t
))
3370 *tp
= remap_type (t
, &ctx
->cb
);
3371 else if (!DECL_P (t
))
3376 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3377 if (tem
!= TREE_TYPE (t
))
3379 if (TREE_CODE (t
) == INTEGER_CST
)
3380 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3382 TREE_TYPE (t
) = tem
;
3392 /* Return true if FNDECL is a setjmp or a longjmp. */
3395 setjmp_or_longjmp_p (const_tree fndecl
)
3397 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3398 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3401 tree declname
= DECL_NAME (fndecl
);
3403 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3404 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3405 || !TREE_PUBLIC (fndecl
))
3408 const char *name
= IDENTIFIER_POINTER (declname
);
3409 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3412 /* Return true if FNDECL is an omp_* runtime API call. */
3415 omp_runtime_api_call (const_tree fndecl
)
3417 tree declname
= DECL_NAME (fndecl
);
3419 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3420 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3421 || !TREE_PUBLIC (fndecl
))
3424 const char *name
= IDENTIFIER_POINTER (declname
);
3425 if (strncmp (name
, "omp_", 4) != 0)
3428 static const char *omp_runtime_apis
[] =
3430 /* This array has 3 sections. First omp_* calls that don't
3431 have any suffixes. */
3433 "target_associate_ptr",
3434 "target_disassociate_ptr",
3436 "target_is_present",
3438 "target_memcpy_rect",
3440 /* Now omp_* calls that are available as omp_* and omp_*_. */
3443 "destroy_nest_lock",
3446 "get_affinity_format",
3448 "get_default_device",
3450 "get_initial_device",
3452 "get_max_active_levels",
3453 "get_max_task_priority",
3461 "get_partition_num_places",
3473 "is_initial_device",
3475 "pause_resource_all",
3476 "set_affinity_format",
3484 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3485 "get_ancestor_thread_num",
3486 "get_partition_place_nums",
3487 "get_place_num_procs",
3488 "get_place_proc_ids",
3491 "set_default_device",
3493 "set_max_active_levels",
3500 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
3502 if (omp_runtime_apis
[i
] == NULL
)
3507 size_t len
= strlen (omp_runtime_apis
[i
]);
3508 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
3509 && (name
[4 + len
] == '\0'
3511 && name
[4 + len
] == '_'
3512 && (name
[4 + len
+ 1] == '\0'
3514 && strcmp (name
+ 4 + len
+ 1, "8_") == 0)))))
3520 /* Helper function for scan_omp.
3522 Callback for walk_gimple_stmt used to scan for OMP directives in
3523 the current statement in GSI. */
3526 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3527 struct walk_stmt_info
*wi
)
3529 gimple
*stmt
= gsi_stmt (*gsi
);
3530 omp_context
*ctx
= (omp_context
*) wi
->info
;
3532 if (gimple_has_location (stmt
))
3533 input_location
= gimple_location (stmt
);
3535 /* Check the nesting restrictions. */
3536 bool remove
= false;
3537 if (is_gimple_omp (stmt
))
3538 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3539 else if (is_gimple_call (stmt
))
3541 tree fndecl
= gimple_call_fndecl (stmt
);
3545 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3546 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3547 && setjmp_or_longjmp_p (fndecl
)
3551 error_at (gimple_location (stmt
),
3552 "setjmp/longjmp inside %<simd%> construct");
3554 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3555 switch (DECL_FUNCTION_CODE (fndecl
))
3557 case BUILT_IN_GOMP_BARRIER
:
3558 case BUILT_IN_GOMP_CANCEL
:
3559 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3560 case BUILT_IN_GOMP_TASKYIELD
:
3561 case BUILT_IN_GOMP_TASKWAIT
:
3562 case BUILT_IN_GOMP_TASKGROUP_START
:
3563 case BUILT_IN_GOMP_TASKGROUP_END
:
3564 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3571 omp_context
*octx
= ctx
;
3572 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
3574 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
3577 error_at (gimple_location (stmt
),
3578 "OpenMP runtime API call %qD in a region with "
3579 "%<order(concurrent)%> clause", fndecl
);
3586 stmt
= gimple_build_nop ();
3587 gsi_replace (gsi
, stmt
, false);
3590 *handled_ops_p
= true;
3592 switch (gimple_code (stmt
))
3594 case GIMPLE_OMP_PARALLEL
:
3595 taskreg_nesting_level
++;
3596 scan_omp_parallel (gsi
, ctx
);
3597 taskreg_nesting_level
--;
3600 case GIMPLE_OMP_TASK
:
3601 taskreg_nesting_level
++;
3602 scan_omp_task (gsi
, ctx
);
3603 taskreg_nesting_level
--;
3606 case GIMPLE_OMP_FOR
:
3607 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3608 == GF_OMP_FOR_KIND_SIMD
)
3609 && gimple_omp_for_combined_into_p (stmt
)
3610 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
3612 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
3613 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
3614 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
3616 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3620 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3621 == GF_OMP_FOR_KIND_SIMD
)
3622 && omp_maybe_offloaded_ctx (ctx
)
3623 && omp_max_simt_vf ())
3624 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3626 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3629 case GIMPLE_OMP_SECTIONS
:
3630 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3633 case GIMPLE_OMP_SINGLE
:
3634 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3637 case GIMPLE_OMP_SCAN
:
3638 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
3640 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
3641 ctx
->scan_inclusive
= true;
3642 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
3643 ctx
->scan_exclusive
= true;
3646 case GIMPLE_OMP_SECTION
:
3647 case GIMPLE_OMP_MASTER
:
3648 case GIMPLE_OMP_ORDERED
:
3649 case GIMPLE_OMP_CRITICAL
:
3650 case GIMPLE_OMP_GRID_BODY
:
3651 ctx
= new_omp_context (stmt
, ctx
);
3652 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3655 case GIMPLE_OMP_TASKGROUP
:
3656 ctx
= new_omp_context (stmt
, ctx
);
3657 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
3658 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3661 case GIMPLE_OMP_TARGET
:
3662 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3665 case GIMPLE_OMP_TEAMS
:
3666 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
3668 taskreg_nesting_level
++;
3669 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3670 taskreg_nesting_level
--;
3673 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3680 *handled_ops_p
= false;
3682 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3684 var
= DECL_CHAIN (var
))
3685 insert_decl_map (&ctx
->cb
, var
, var
);
3689 *handled_ops_p
= false;
3697 /* Scan all the statements starting at the current statement. CTX
3698 contains context information about the OMP directives and
3699 clauses found during the scan. */
3702 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3704 location_t saved_location
;
3705 struct walk_stmt_info wi
;
3707 memset (&wi
, 0, sizeof (wi
));
3709 wi
.want_locations
= true;
3711 saved_location
= input_location
;
3712 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3713 input_location
= saved_location
;
3716 /* Re-gimplification and code generation routines. */
3718 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3719 of BIND if in a method. */
3722 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3724 if (DECL_ARGUMENTS (current_function_decl
)
3725 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3726 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3729 tree vars
= gimple_bind_vars (bind
);
3730 for (tree
*pvar
= &vars
; *pvar
; )
3731 if (omp_member_access_dummy_var (*pvar
))
3732 *pvar
= DECL_CHAIN (*pvar
);
3734 pvar
= &DECL_CHAIN (*pvar
);
3735 gimple_bind_set_vars (bind
, vars
);
3739 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3740 block and its subblocks. */
3743 remove_member_access_dummy_vars (tree block
)
3745 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3746 if (omp_member_access_dummy_var (*pvar
))
3747 *pvar
= DECL_CHAIN (*pvar
);
3749 pvar
= &DECL_CHAIN (*pvar
);
3751 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3752 remove_member_access_dummy_vars (block
);
3755 /* If a context was created for STMT when it was scanned, return it. */
3757 static omp_context
*
3758 maybe_lookup_ctx (gimple
*stmt
)
3761 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3762 return n
? (omp_context
*) n
->value
: NULL
;
3766 /* Find the mapping for DECL in CTX or the immediately enclosing
3767 context that has a mapping for DECL.
3769 If CTX is a nested parallel directive, we may have to use the decl
3770 mappings created in CTX's parent context. Suppose that we have the
3771 following parallel nesting (variable UIDs showed for clarity):
3774 #omp parallel shared(iD.1562) -> outer parallel
3775 iD.1562 = iD.1562 + 1;
3777 #omp parallel shared (iD.1562) -> inner parallel
3778 iD.1562 = iD.1562 - 1;
3780 Each parallel structure will create a distinct .omp_data_s structure
3781 for copying iD.1562 in/out of the directive:
3783 outer parallel .omp_data_s.1.i -> iD.1562
3784 inner parallel .omp_data_s.2.i -> iD.1562
3786 A shared variable mapping will produce a copy-out operation before
3787 the parallel directive and a copy-in operation after it. So, in
3788 this case we would have:
3791 .omp_data_o.1.i = iD.1562;
3792 #omp parallel shared(iD.1562) -> outer parallel
3793 .omp_data_i.1 = &.omp_data_o.1
3794 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3796 .omp_data_o.2.i = iD.1562; -> **
3797 #omp parallel shared(iD.1562) -> inner parallel
3798 .omp_data_i.2 = &.omp_data_o.2
3799 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3802 ** This is a problem. The symbol iD.1562 cannot be referenced
3803 inside the body of the outer parallel region. But since we are
3804 emitting this copy operation while expanding the inner parallel
3805 directive, we need to access the CTX structure of the outer
3806 parallel directive to get the correct mapping:
3808 .omp_data_o.2.i = .omp_data_i.1->i
3810 Since there may be other workshare or parallel directives enclosing
3811 the parallel directive, it may be necessary to walk up the context
3812 parent chain. This is not a problem in general because nested
3813 parallelism happens only rarely. */
3816 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3821 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3822 t
= maybe_lookup_decl (decl
, up
);
3824 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3826 return t
? t
: decl
;
3830 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3831 in outer contexts. */
3834 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3839 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3840 t
= maybe_lookup_decl (decl
, up
);
3842 return t
? t
: decl
;
3846 /* Construct the initialization value for reduction operation OP. */
3849 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3858 case TRUTH_ORIF_EXPR
:
3859 case TRUTH_XOR_EXPR
:
3861 return build_zero_cst (type
);
3864 case TRUTH_AND_EXPR
:
3865 case TRUTH_ANDIF_EXPR
:
3867 return fold_convert_loc (loc
, type
, integer_one_node
);
3870 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3873 if (SCALAR_FLOAT_TYPE_P (type
))
3875 REAL_VALUE_TYPE max
, min
;
3876 if (HONOR_INFINITIES (type
))
3879 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3882 real_maxval (&min
, 1, TYPE_MODE (type
));
3883 return build_real (type
, min
);
3885 else if (POINTER_TYPE_P (type
))
3888 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3889 return wide_int_to_tree (type
, min
);
3893 gcc_assert (INTEGRAL_TYPE_P (type
));
3894 return TYPE_MIN_VALUE (type
);
3898 if (SCALAR_FLOAT_TYPE_P (type
))
3900 REAL_VALUE_TYPE max
;
3901 if (HONOR_INFINITIES (type
))
3904 real_maxval (&max
, 0, TYPE_MODE (type
));
3905 return build_real (type
, max
);
3907 else if (POINTER_TYPE_P (type
))
3910 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3911 return wide_int_to_tree (type
, max
);
3915 gcc_assert (INTEGRAL_TYPE_P (type
));
3916 return TYPE_MAX_VALUE (type
);
3924 /* Construct the initialization value for reduction CLAUSE. */
3927 omp_reduction_init (tree clause
, tree type
)
3929 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
3930 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
3933 /* Return alignment to be assumed for var in CLAUSE, which should be
3934 OMP_CLAUSE_ALIGNED. */
3937 omp_clause_aligned_alignment (tree clause
)
3939 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
3940 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
3942 /* Otherwise return implementation defined alignment. */
3943 unsigned int al
= 1;
3944 opt_scalar_mode mode_iter
;
3945 auto_vector_sizes sizes
;
3946 targetm
.vectorize
.autovectorize_vector_sizes (&sizes
, true);
3948 for (unsigned int i
= 0; i
< sizes
.length (); ++i
)
3949 vs
= ordered_max (vs
, sizes
[i
]);
3950 static enum mode_class classes
[]
3951 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
3952 for (int i
= 0; i
< 4; i
+= 2)
3953 /* The for loop above dictates that we only walk through scalar classes. */
3954 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
3956 scalar_mode mode
= mode_iter
.require ();
3957 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
3958 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
3960 while (maybe_ne (vs
, 0U)
3961 && known_lt (GET_MODE_SIZE (vmode
), vs
)
3962 && GET_MODE_2XWIDER_MODE (vmode
).exists ())
3963 vmode
= GET_MODE_2XWIDER_MODE (vmode
).require ();
3965 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3966 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
3968 poly_uint64 nelts
= exact_div (GET_MODE_SIZE (vmode
),
3969 GET_MODE_SIZE (mode
));
3970 type
= build_vector_type (type
, nelts
);
3971 if (TYPE_MODE (type
) != vmode
)
3973 if (TYPE_ALIGN_UNIT (type
) > al
)
3974 al
= TYPE_ALIGN_UNIT (type
);
3976 return build_int_cst (integer_type_node
, al
);
3980 /* This structure is part of the interface between lower_rec_simd_input_clauses
3981 and lower_rec_input_clauses. */
3983 class omplow_simd_context
{
3985 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3989 vec
<tree
, va_heap
> simt_eargs
;
3990 gimple_seq simt_dlist
;
3991 poly_uint64_pod max_vf
;
3995 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3999 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4000 omplow_simd_context
*sctx
, tree
&ivar
,
4001 tree
&lvar
, tree
*rvar
= NULL
,
4004 if (known_eq (sctx
->max_vf
, 0U))
4006 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4007 if (maybe_gt (sctx
->max_vf
, 1U))
4009 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4010 OMP_CLAUSE_SAFELEN
);
4013 poly_uint64 safe_len
;
4014 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4015 || maybe_lt (safe_len
, 1U))
4018 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4021 if (maybe_gt (sctx
->max_vf
, 1U))
4023 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4024 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4027 if (known_eq (sctx
->max_vf
, 1U))
4032 if (is_gimple_reg (new_var
))
4034 ivar
= lvar
= new_var
;
4037 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4038 ivar
= lvar
= create_tmp_var (type
);
4039 TREE_ADDRESSABLE (ivar
) = 1;
4040 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4041 NULL
, DECL_ATTRIBUTES (ivar
));
4042 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4043 tree clobber
= build_constructor (type
, NULL
);
4044 TREE_THIS_VOLATILE (clobber
) = 1;
4045 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4046 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4050 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4051 tree avar
= create_tmp_var_raw (atype
);
4052 if (TREE_ADDRESSABLE (new_var
))
4053 TREE_ADDRESSABLE (avar
) = 1;
4054 DECL_ATTRIBUTES (avar
)
4055 = tree_cons (get_identifier ("omp simd array"), NULL
,
4056 DECL_ATTRIBUTES (avar
));
4057 gimple_add_tmp_var (avar
);
4059 if (rvar
&& !ctx
->for_simd_scan_phase
)
4061 /* For inscan reductions, create another array temporary,
4062 which will hold the reduced value. */
4063 iavar
= create_tmp_var_raw (atype
);
4064 if (TREE_ADDRESSABLE (new_var
))
4065 TREE_ADDRESSABLE (iavar
) = 1;
4066 DECL_ATTRIBUTES (iavar
)
4067 = tree_cons (get_identifier ("omp simd array"), NULL
,
4068 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4069 DECL_ATTRIBUTES (iavar
)));
4070 gimple_add_tmp_var (iavar
);
4071 ctx
->cb
.decl_map
->put (avar
, iavar
);
4072 if (sctx
->lastlane
== NULL_TREE
)
4073 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4074 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4075 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4076 TREE_THIS_NOTRAP (*rvar
) = 1;
4078 if (ctx
->scan_exclusive
)
4080 /* And for exclusive scan yet another one, which will
4081 hold the value during the scan phase. */
4082 tree savar
= create_tmp_var_raw (atype
);
4083 if (TREE_ADDRESSABLE (new_var
))
4084 TREE_ADDRESSABLE (savar
) = 1;
4085 DECL_ATTRIBUTES (savar
)
4086 = tree_cons (get_identifier ("omp simd array"), NULL
,
4087 tree_cons (get_identifier ("omp simd inscan "
4089 DECL_ATTRIBUTES (savar
)));
4090 gimple_add_tmp_var (savar
);
4091 ctx
->cb
.decl_map
->put (iavar
, savar
);
4092 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4093 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4094 TREE_THIS_NOTRAP (*rvar2
) = 1;
4097 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4098 NULL_TREE
, NULL_TREE
);
4099 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4100 NULL_TREE
, NULL_TREE
);
4101 TREE_THIS_NOTRAP (ivar
) = 1;
4102 TREE_THIS_NOTRAP (lvar
) = 1;
4104 if (DECL_P (new_var
))
4106 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4107 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4112 /* Helper function of lower_rec_input_clauses. For a reference
4113 in simd reduction, add an underlying variable it will reference. */
4116 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4118 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4119 if (TREE_CONSTANT (z
))
4121 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4122 get_name (new_vard
));
4123 gimple_add_tmp_var (z
);
4124 TREE_ADDRESSABLE (z
) = 1;
4125 z
= build_fold_addr_expr_loc (loc
, z
);
4126 gimplify_assign (new_vard
, z
, ilist
);
4130 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4131 code to emit (type) (tskred_temp[idx]). */
4134 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4137 unsigned HOST_WIDE_INT sz
4138 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4139 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4140 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4142 tree v
= create_tmp_var (pointer_sized_int_node
);
4143 gimple
*g
= gimple_build_assign (v
, r
);
4144 gimple_seq_add_stmt (ilist
, g
);
4145 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4147 v
= create_tmp_var (type
);
4148 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4149 gimple_seq_add_stmt (ilist
, g
);
4154 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4155 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4156 private variables. Initialization statements go in ILIST, while calls
4157 to destructors go in DLIST. */
4160 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4161 omp_context
*ctx
, struct omp_for_data
*fd
)
4163 tree c
, copyin_seq
, x
, ptr
;
4164 bool copyin_by_ref
= false;
4165 bool lastprivate_firstprivate
= false;
4166 bool reduction_omp_orig_ref
= false;
4168 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4169 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4170 omplow_simd_context sctx
= omplow_simd_context ();
4171 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4172 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4173 gimple_seq llist
[4] = { };
4174 tree nonconst_simd_if
= NULL_TREE
;
4177 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4179 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4180 with data sharing clauses referencing variable sized vars. That
4181 is unnecessarily hard to support and very unlikely to result in
4182 vectorized code anyway. */
4184 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4185 switch (OMP_CLAUSE_CODE (c
))
4187 case OMP_CLAUSE_LINEAR
:
4188 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4191 case OMP_CLAUSE_PRIVATE
:
4192 case OMP_CLAUSE_FIRSTPRIVATE
:
4193 case OMP_CLAUSE_LASTPRIVATE
:
4194 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4196 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4198 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4199 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4203 case OMP_CLAUSE_REDUCTION
:
4204 case OMP_CLAUSE_IN_REDUCTION
:
4205 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4206 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4208 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4210 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4211 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4216 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4218 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4219 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4221 case OMP_CLAUSE_SIMDLEN
:
4222 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4225 case OMP_CLAUSE__CONDTEMP_
:
4226 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4234 /* Add a placeholder for simduid. */
4235 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4236 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4238 unsigned task_reduction_cnt
= 0;
4239 unsigned task_reduction_cntorig
= 0;
4240 unsigned task_reduction_cnt_full
= 0;
4241 unsigned task_reduction_cntorig_full
= 0;
4242 unsigned task_reduction_other_cnt
= 0;
4243 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4244 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4245 /* Do all the fixed sized types in the first pass, and the variable sized
4246 types in the second pass. This makes sure that the scalar arguments to
4247 the variable sized types are processed before we use them in the
4248 variable sized operations. For task reductions we use 4 passes, in the
4249 first two we ignore them, in the third one gather arguments for
4250 GOMP_task_reduction_remap call and in the last pass actually handle
4251 the task reductions. */
4252 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
4255 if (pass
== 2 && task_reduction_cnt
)
4258 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
4259 + task_reduction_cntorig
);
4260 tskred_avar
= create_tmp_var_raw (tskred_atype
);
4261 gimple_add_tmp_var (tskred_avar
);
4262 TREE_ADDRESSABLE (tskred_avar
) = 1;
4263 task_reduction_cnt_full
= task_reduction_cnt
;
4264 task_reduction_cntorig_full
= task_reduction_cntorig
;
4266 else if (pass
== 3 && task_reduction_cnt
)
4268 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
4270 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
4271 size_int (task_reduction_cntorig
),
4272 build_fold_addr_expr (tskred_avar
));
4273 gimple_seq_add_stmt (ilist
, g
);
4275 if (pass
== 3 && task_reduction_other_cnt
)
4277 /* For reduction clauses, build
4278 tskred_base = (void *) tskred_temp[2]
4279 + omp_get_thread_num () * tskred_temp[1]
4280 or if tskred_temp[1] is known to be constant, that constant
4281 directly. This is the start of the private reduction copy block
4282 for the current thread. */
4283 tree v
= create_tmp_var (integer_type_node
);
4284 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
4285 gimple
*g
= gimple_build_call (x
, 0);
4286 gimple_call_set_lhs (g
, v
);
4287 gimple_seq_add_stmt (ilist
, g
);
4288 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
4289 tskred_temp
= OMP_CLAUSE_DECL (c
);
4290 if (is_taskreg_ctx (ctx
))
4291 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
4292 tree v2
= create_tmp_var (sizetype
);
4293 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
4294 gimple_seq_add_stmt (ilist
, g
);
4295 if (ctx
->task_reductions
[0])
4296 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
4298 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4299 tree v3
= create_tmp_var (sizetype
);
4300 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4301 gimple_seq_add_stmt (ilist
, g
);
4302 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4303 tskred_base
= create_tmp_var (ptr_type_node
);
4304 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4305 gimple_seq_add_stmt (ilist
, g
);
4307 task_reduction_cnt
= 0;
4308 task_reduction_cntorig
= 0;
4309 task_reduction_other_cnt
= 0;
4310 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4312 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4315 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4316 bool task_reduction_p
= false;
4317 bool task_reduction_needs_orig_p
= false;
4318 tree cond
= NULL_TREE
;
4322 case OMP_CLAUSE_PRIVATE
:
4323 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4326 case OMP_CLAUSE_SHARED
:
4327 /* Ignore shared directives in teams construct inside
4328 of target construct. */
4329 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4330 && !is_host_teams_ctx (ctx
))
4332 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
4334 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
4335 || is_global_var (OMP_CLAUSE_DECL (c
)));
4338 case OMP_CLAUSE_FIRSTPRIVATE
:
4339 case OMP_CLAUSE_COPYIN
:
4341 case OMP_CLAUSE_LINEAR
:
4342 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
4343 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4344 lastprivate_firstprivate
= true;
4346 case OMP_CLAUSE_REDUCTION
:
4347 case OMP_CLAUSE_IN_REDUCTION
:
4348 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
4350 task_reduction_p
= true;
4351 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4353 task_reduction_other_cnt
++;
4358 task_reduction_cnt
++;
4359 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4361 var
= OMP_CLAUSE_DECL (c
);
4362 /* If var is a global variable that isn't privatized
4363 in outer contexts, we don't need to look up the
4364 original address, it is always the address of the
4365 global variable itself. */
4367 || omp_is_reference (var
)
4369 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4371 task_reduction_needs_orig_p
= true;
4372 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4373 task_reduction_cntorig
++;
4377 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4378 reduction_omp_orig_ref
= true;
4380 case OMP_CLAUSE__REDUCTEMP_
:
4381 if (!is_taskreg_ctx (ctx
))
4384 case OMP_CLAUSE__LOOPTEMP_
:
4385 /* Handle _looptemp_/_reductemp_ clauses only on
4390 case OMP_CLAUSE_LASTPRIVATE
:
4391 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4393 lastprivate_firstprivate
= true;
4394 if (pass
!= 0 || is_taskloop_ctx (ctx
))
4397 /* Even without corresponding firstprivate, if
4398 decl is Fortran allocatable, it needs outer var
4401 && lang_hooks
.decls
.omp_private_outer_ref
4402 (OMP_CLAUSE_DECL (c
)))
4403 lastprivate_firstprivate
= true;
4405 case OMP_CLAUSE_ALIGNED
:
4408 var
= OMP_CLAUSE_DECL (c
);
4409 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
4410 && !is_global_var (var
))
4412 new_var
= maybe_lookup_decl (var
, ctx
);
4413 if (new_var
== NULL_TREE
)
4414 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4415 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4416 tree alarg
= omp_clause_aligned_alignment (c
);
4417 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4418 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
4419 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4420 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4421 gimplify_and_add (x
, ilist
);
4423 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
4424 && is_global_var (var
))
4426 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
4427 new_var
= lookup_decl (var
, ctx
);
4428 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4429 t
= build_fold_addr_expr_loc (clause_loc
, t
);
4430 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4431 tree alarg
= omp_clause_aligned_alignment (c
);
4432 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4433 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
4434 t
= fold_convert_loc (clause_loc
, ptype
, t
);
4435 x
= create_tmp_var (ptype
);
4436 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
4437 gimplify_and_add (t
, ilist
);
4438 t
= build_simple_mem_ref_loc (clause_loc
, x
);
4439 SET_DECL_VALUE_EXPR (new_var
, t
);
4440 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4443 case OMP_CLAUSE__CONDTEMP_
:
4444 if (is_parallel_ctx (ctx
)
4445 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
4452 if (task_reduction_p
!= (pass
>= 2))
4455 new_var
= var
= OMP_CLAUSE_DECL (c
);
4456 if ((c_kind
== OMP_CLAUSE_REDUCTION
4457 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4458 && TREE_CODE (var
) == MEM_REF
)
4460 var
= TREE_OPERAND (var
, 0);
4461 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
4462 var
= TREE_OPERAND (var
, 0);
4463 if (TREE_CODE (var
) == INDIRECT_REF
4464 || TREE_CODE (var
) == ADDR_EXPR
)
4465 var
= TREE_OPERAND (var
, 0);
4466 if (is_variable_sized (var
))
4468 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
4469 var
= DECL_VALUE_EXPR (var
);
4470 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
4471 var
= TREE_OPERAND (var
, 0);
4472 gcc_assert (DECL_P (var
));
4476 if (c_kind
!= OMP_CLAUSE_COPYIN
)
4477 new_var
= lookup_decl (var
, ctx
);
4479 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
4484 /* C/C++ array section reductions. */
4485 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4486 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4487 && var
!= OMP_CLAUSE_DECL (c
))
4492 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
4493 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
4495 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
4497 tree b
= TREE_OPERAND (orig_var
, 1);
4498 b
= maybe_lookup_decl (b
, ctx
);
4501 b
= TREE_OPERAND (orig_var
, 1);
4502 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
4504 if (integer_zerop (bias
))
4508 bias
= fold_convert_loc (clause_loc
,
4509 TREE_TYPE (b
), bias
);
4510 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4511 TREE_TYPE (b
), b
, bias
);
4513 orig_var
= TREE_OPERAND (orig_var
, 0);
4517 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4518 if (is_global_var (out
)
4519 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
4520 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
4521 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
4526 bool by_ref
= use_pointer_for_field (var
, NULL
);
4527 x
= build_receiver_ref (var
, by_ref
, ctx
);
4528 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
4529 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
4531 x
= build_fold_addr_expr (x
);
4533 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
4534 x
= build_simple_mem_ref (x
);
4535 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
4537 if (var
== TREE_OPERAND (orig_var
, 0))
4538 x
= build_fold_addr_expr (x
);
4540 bias
= fold_convert (sizetype
, bias
);
4541 x
= fold_convert (ptr_type_node
, x
);
4542 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
4543 TREE_TYPE (x
), x
, bias
);
4544 unsigned cnt
= task_reduction_cnt
- 1;
4545 if (!task_reduction_needs_orig_p
)
4546 cnt
+= (task_reduction_cntorig_full
4547 - task_reduction_cntorig
);
4549 cnt
= task_reduction_cntorig
- 1;
4550 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4551 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4552 gimplify_assign (r
, x
, ilist
);
4556 if (TREE_CODE (orig_var
) == INDIRECT_REF
4557 || TREE_CODE (orig_var
) == ADDR_EXPR
)
4558 orig_var
= TREE_OPERAND (orig_var
, 0);
4559 tree d
= OMP_CLAUSE_DECL (c
);
4560 tree type
= TREE_TYPE (d
);
4561 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
4562 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
4563 const char *name
= get_name (orig_var
);
4566 tree xv
= create_tmp_var (ptr_type_node
);
4567 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4569 unsigned cnt
= task_reduction_cnt
- 1;
4570 if (!task_reduction_needs_orig_p
)
4571 cnt
+= (task_reduction_cntorig_full
4572 - task_reduction_cntorig
);
4574 cnt
= task_reduction_cntorig
- 1;
4575 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4576 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4578 gimple
*g
= gimple_build_assign (xv
, x
);
4579 gimple_seq_add_stmt (ilist
, g
);
4583 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4585 if (ctx
->task_reductions
[1 + idx
])
4586 off
= fold_convert (sizetype
,
4587 ctx
->task_reductions
[1 + idx
]);
4589 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4591 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
4593 gimple_seq_add_stmt (ilist
, g
);
4595 x
= fold_convert (build_pointer_type (boolean_type_node
),
4597 if (TREE_CONSTANT (v
))
4598 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
4599 TYPE_SIZE_UNIT (type
));
4602 tree t
= maybe_lookup_decl (v
, ctx
);
4606 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4607 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
4609 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4611 build_int_cst (TREE_TYPE (v
), 1));
4612 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4614 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4615 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4617 cond
= create_tmp_var (TREE_TYPE (x
));
4618 gimplify_assign (cond
, x
, ilist
);
4621 else if (TREE_CONSTANT (v
))
4623 x
= create_tmp_var_raw (type
, name
);
4624 gimple_add_tmp_var (x
);
4625 TREE_ADDRESSABLE (x
) = 1;
4626 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4631 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4632 tree t
= maybe_lookup_decl (v
, ctx
);
4636 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4637 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
4638 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4640 build_int_cst (TREE_TYPE (v
), 1));
4641 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4643 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4644 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
4645 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
4648 tree ptype
= build_pointer_type (TREE_TYPE (type
));
4649 x
= fold_convert_loc (clause_loc
, ptype
, x
);
4650 tree y
= create_tmp_var (ptype
, name
);
4651 gimplify_assign (y
, x
, ilist
);
4655 if (!integer_zerop (bias
))
4657 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4659 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4661 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
4662 pointer_sized_int_node
, yb
, bias
);
4663 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
4664 yb
= create_tmp_var (ptype
, name
);
4665 gimplify_assign (yb
, x
, ilist
);
4669 d
= TREE_OPERAND (d
, 0);
4670 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
4671 d
= TREE_OPERAND (d
, 0);
4672 if (TREE_CODE (d
) == ADDR_EXPR
)
4674 if (orig_var
!= var
)
4676 gcc_assert (is_variable_sized (orig_var
));
4677 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
4679 gimplify_assign (new_var
, x
, ilist
);
4680 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
4681 tree t
= build_fold_indirect_ref (new_var
);
4682 DECL_IGNORED_P (new_var
) = 0;
4683 TREE_THIS_NOTRAP (t
) = 1;
4684 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
4685 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
4689 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
4690 build_int_cst (ptype
, 0));
4691 SET_DECL_VALUE_EXPR (new_var
, x
);
4692 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4697 gcc_assert (orig_var
== var
);
4698 if (TREE_CODE (d
) == INDIRECT_REF
)
4700 x
= create_tmp_var (ptype
, name
);
4701 TREE_ADDRESSABLE (x
) = 1;
4702 gimplify_assign (x
, yb
, ilist
);
4703 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4705 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4706 gimplify_assign (new_var
, x
, ilist
);
4708 /* GOMP_taskgroup_reduction_register memsets the whole
4709 array to zero. If the initializer is zero, we don't
4710 need to initialize it again, just mark it as ever
4711 used unconditionally, i.e. cond = true. */
4713 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
4714 && initializer_zerop (omp_reduction_init (c
,
4717 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
4719 gimple_seq_add_stmt (ilist
, g
);
4722 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4726 if (!is_parallel_ctx (ctx
))
4728 tree condv
= create_tmp_var (boolean_type_node
);
4729 g
= gimple_build_assign (condv
,
4730 build_simple_mem_ref (cond
));
4731 gimple_seq_add_stmt (ilist
, g
);
4732 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
4733 g
= gimple_build_cond (NE_EXPR
, condv
,
4734 boolean_false_node
, end
, lab1
);
4735 gimple_seq_add_stmt (ilist
, g
);
4736 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
4738 g
= gimple_build_assign (build_simple_mem_ref (cond
),
4740 gimple_seq_add_stmt (ilist
, g
);
4743 tree y1
= create_tmp_var (ptype
);
4744 gimplify_assign (y1
, y
, ilist
);
4745 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
4746 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
4747 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
4748 if (task_reduction_needs_orig_p
)
4750 y3
= create_tmp_var (ptype
);
4752 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4753 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4754 size_int (task_reduction_cnt_full
4755 + task_reduction_cntorig
- 1),
4756 NULL_TREE
, NULL_TREE
);
4759 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4760 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
4763 gimplify_assign (y3
, ref
, ilist
);
4765 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
4769 y2
= create_tmp_var (ptype
);
4770 gimplify_assign (y2
, y
, ilist
);
4772 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4774 tree ref
= build_outer_var_ref (var
, ctx
);
4775 /* For ref build_outer_var_ref already performs this. */
4776 if (TREE_CODE (d
) == INDIRECT_REF
)
4777 gcc_assert (omp_is_reference (var
));
4778 else if (TREE_CODE (d
) == ADDR_EXPR
)
4779 ref
= build_fold_addr_expr (ref
);
4780 else if (omp_is_reference (var
))
4781 ref
= build_fold_addr_expr (ref
);
4782 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
4783 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
4784 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4786 y3
= create_tmp_var (ptype
);
4787 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
4791 y4
= create_tmp_var (ptype
);
4792 gimplify_assign (y4
, ref
, dlist
);
4796 tree i
= create_tmp_var (TREE_TYPE (v
));
4797 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
4798 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4799 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
4802 i2
= create_tmp_var (TREE_TYPE (v
));
4803 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
4804 body2
= create_artificial_label (UNKNOWN_LOCATION
);
4805 end2
= create_artificial_label (UNKNOWN_LOCATION
);
4806 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
4808 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4810 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4811 tree decl_placeholder
4812 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
4813 SET_DECL_VALUE_EXPR (decl_placeholder
,
4814 build_simple_mem_ref (y1
));
4815 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
4816 SET_DECL_VALUE_EXPR (placeholder
,
4817 y3
? build_simple_mem_ref (y3
)
4819 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4820 x
= lang_hooks
.decls
.omp_clause_default_ctor
4821 (c
, build_simple_mem_ref (y1
),
4822 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
4824 gimplify_and_add (x
, ilist
);
4825 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4827 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4828 lower_omp (&tseq
, ctx
);
4829 gimple_seq_add_seq (ilist
, tseq
);
4831 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4834 SET_DECL_VALUE_EXPR (decl_placeholder
,
4835 build_simple_mem_ref (y2
));
4836 SET_DECL_VALUE_EXPR (placeholder
,
4837 build_simple_mem_ref (y4
));
4838 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4839 lower_omp (&tseq
, ctx
);
4840 gimple_seq_add_seq (dlist
, tseq
);
4841 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4843 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4844 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
4847 x
= lang_hooks
.decls
.omp_clause_dtor
4848 (c
, build_simple_mem_ref (y2
));
4850 gimplify_and_add (x
, dlist
);
4855 x
= omp_reduction_init (c
, TREE_TYPE (type
));
4856 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4858 /* reduction(-:var) sums up the partial results, so it
4859 acts identically to reduction(+:var). */
4860 if (code
== MINUS_EXPR
)
4863 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
4866 x
= build2 (code
, TREE_TYPE (type
),
4867 build_simple_mem_ref (y4
),
4868 build_simple_mem_ref (y2
));
4869 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
4873 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
4874 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4875 gimple_seq_add_stmt (ilist
, g
);
4878 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
4879 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4880 gimple_seq_add_stmt (ilist
, g
);
4882 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
4883 build_int_cst (TREE_TYPE (i
), 1));
4884 gimple_seq_add_stmt (ilist
, g
);
4885 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
4886 gimple_seq_add_stmt (ilist
, g
);
4887 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
4890 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
4891 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4892 gimple_seq_add_stmt (dlist
, g
);
4895 g
= gimple_build_assign
4896 (y4
, POINTER_PLUS_EXPR
, y4
,
4897 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4898 gimple_seq_add_stmt (dlist
, g
);
4900 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
4901 build_int_cst (TREE_TYPE (i2
), 1));
4902 gimple_seq_add_stmt (dlist
, g
);
4903 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
4904 gimple_seq_add_stmt (dlist
, g
);
4905 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
4911 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4915 bool by_ref
= use_pointer_for_field (var
, ctx
);
4916 x
= build_receiver_ref (var
, by_ref
, ctx
);
4918 if (!omp_is_reference (var
))
4919 x
= build_fold_addr_expr (x
);
4920 x
= fold_convert (ptr_type_node
, x
);
4921 unsigned cnt
= task_reduction_cnt
- 1;
4922 if (!task_reduction_needs_orig_p
)
4923 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
4925 cnt
= task_reduction_cntorig
- 1;
4926 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4927 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4928 gimplify_assign (r
, x
, ilist
);
4933 tree type
= TREE_TYPE (new_var
);
4934 if (!omp_is_reference (var
))
4935 type
= build_pointer_type (type
);
4936 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4938 unsigned cnt
= task_reduction_cnt
- 1;
4939 if (!task_reduction_needs_orig_p
)
4940 cnt
+= (task_reduction_cntorig_full
4941 - task_reduction_cntorig
);
4943 cnt
= task_reduction_cntorig
- 1;
4944 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4945 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4949 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4951 if (ctx
->task_reductions
[1 + idx
])
4952 off
= fold_convert (sizetype
,
4953 ctx
->task_reductions
[1 + idx
]);
4955 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4957 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
4960 x
= fold_convert (type
, x
);
4962 if (omp_is_reference (var
))
4964 gimplify_assign (new_var
, x
, ilist
);
4966 new_var
= build_simple_mem_ref (new_var
);
4970 t
= create_tmp_var (type
);
4971 gimplify_assign (t
, x
, ilist
);
4972 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
4973 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4975 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
4976 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
4977 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4978 cond
= create_tmp_var (TREE_TYPE (t
));
4979 gimplify_assign (cond
, t
, ilist
);
4981 else if (is_variable_sized (var
))
4983 /* For variable sized types, we need to allocate the
4984 actual storage here. Call alloca and store the
4985 result in the pointer decl that we created elsewhere. */
4989 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
4994 ptr
= DECL_VALUE_EXPR (new_var
);
4995 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
4996 ptr
= TREE_OPERAND (ptr
, 0);
4997 gcc_assert (DECL_P (ptr
));
4998 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5000 /* void *tmp = __builtin_alloca */
5001 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5002 stmt
= gimple_build_call (atmp
, 2, x
,
5003 size_int (DECL_ALIGN (var
)));
5004 tmp
= create_tmp_var_raw (ptr_type_node
);
5005 gimple_add_tmp_var (tmp
);
5006 gimple_call_set_lhs (stmt
, tmp
);
5008 gimple_seq_add_stmt (ilist
, stmt
);
5010 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5011 gimplify_assign (ptr
, x
, ilist
);
5014 else if (omp_is_reference (var
)
5015 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5016 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5018 /* For references that are being privatized for Fortran,
5019 allocate new backing storage for the new pointer
5020 variable. This allows us to avoid changing all the
5021 code that expects a pointer to something that expects
5022 a direct variable. */
5026 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5027 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5029 x
= build_receiver_ref (var
, false, ctx
);
5030 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5032 else if (TREE_CONSTANT (x
))
5034 /* For reduction in SIMD loop, defer adding the
5035 initialization of the reference, because if we decide
5036 to use SIMD array for it, the initilization could cause
5037 expansion ICE. Ditto for other privatization clauses. */
5042 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5044 gimple_add_tmp_var (x
);
5045 TREE_ADDRESSABLE (x
) = 1;
5046 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5052 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5053 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5054 tree al
= size_int (TYPE_ALIGN (rtype
));
5055 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5060 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5061 gimplify_assign (new_var
, x
, ilist
);
5064 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5066 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5067 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5068 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5076 switch (OMP_CLAUSE_CODE (c
))
5078 case OMP_CLAUSE_SHARED
:
5079 /* Ignore shared directives in teams construct inside
5080 target construct. */
5081 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5082 && !is_host_teams_ctx (ctx
))
5084 /* Shared global vars are just accessed directly. */
5085 if (is_global_var (new_var
))
5087 /* For taskloop firstprivate/lastprivate, represented
5088 as firstprivate and shared clause on the task, new_var
5089 is the firstprivate var. */
5090 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5092 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5093 needs to be delayed until after fixup_child_record_type so
5094 that we get the correct type during the dereference. */
5095 by_ref
= use_pointer_for_field (var
, ctx
);
5096 x
= build_receiver_ref (var
, by_ref
, ctx
);
5097 SET_DECL_VALUE_EXPR (new_var
, x
);
5098 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5100 /* ??? If VAR is not passed by reference, and the variable
5101 hasn't been initialized yet, then we'll get a warning for
5102 the store into the omp_data_s structure. Ideally, we'd be
5103 able to notice this and not store anything at all, but
5104 we're generating code too early. Suppress the warning. */
5106 TREE_NO_WARNING (var
) = 1;
5109 case OMP_CLAUSE__CONDTEMP_
:
5110 if (is_parallel_ctx (ctx
))
5112 x
= build_receiver_ref (var
, false, ctx
);
5113 SET_DECL_VALUE_EXPR (new_var
, x
);
5114 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5116 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5118 x
= build_zero_cst (TREE_TYPE (var
));
5123 case OMP_CLAUSE_LASTPRIVATE
:
5124 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5128 case OMP_CLAUSE_PRIVATE
:
5129 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5130 x
= build_outer_var_ref (var
, ctx
);
5131 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5133 if (is_task_ctx (ctx
))
5134 x
= build_receiver_ref (var
, false, ctx
);
5136 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5142 nx
= lang_hooks
.decls
.omp_clause_default_ctor
5143 (c
, unshare_expr (new_var
), x
);
5146 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5147 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5148 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5149 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5150 || (gimple_omp_for_index (ctx
->stmt
, 0)
5152 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5153 || omp_is_reference (var
))
5154 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5157 if (omp_is_reference (var
))
5159 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5160 tree new_vard
= TREE_OPERAND (new_var
, 0);
5161 gcc_assert (DECL_P (new_vard
));
5162 SET_DECL_VALUE_EXPR (new_vard
,
5163 build_fold_addr_expr (lvar
));
5164 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5168 x
= lang_hooks
.decls
.omp_clause_default_ctor
5169 (c
, unshare_expr (ivar
), x
);
5170 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
5172 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
5173 unshare_expr (ivar
), x
);
5177 gimplify_and_add (x
, &llist
[0]);
5178 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5179 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5184 gcc_assert (TREE_CODE (v
) == MEM_REF
);
5185 v
= TREE_OPERAND (v
, 0);
5186 gcc_assert (DECL_P (v
));
5188 v
= *ctx
->lastprivate_conditional_map
->get (v
);
5189 tree t
= create_tmp_var (TREE_TYPE (v
));
5190 tree z
= build_zero_cst (TREE_TYPE (v
));
5192 = build_outer_var_ref (var
, ctx
,
5193 OMP_CLAUSE_LASTPRIVATE
);
5194 gimple_seq_add_stmt (dlist
,
5195 gimple_build_assign (t
, z
));
5196 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
5197 tree civar
= DECL_VALUE_EXPR (v
);
5198 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
5199 civar
= unshare_expr (civar
);
5200 TREE_OPERAND (civar
, 1) = sctx
.idx
;
5201 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
5202 unshare_expr (civar
));
5203 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
5204 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
5205 orig_v
, unshare_expr (ivar
)));
5206 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
5208 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
5210 gimple_seq tseq
= NULL
;
5211 gimplify_and_add (x
, &tseq
);
5213 lower_omp (&tseq
, ctx
->outer
);
5214 gimple_seq_add_seq (&llist
[1], tseq
);
5216 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5217 && ctx
->for_simd_scan_phase
)
5219 x
= unshare_expr (ivar
);
5221 = build_outer_var_ref (var
, ctx
,
5222 OMP_CLAUSE_LASTPRIVATE
);
5223 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5225 gimplify_and_add (x
, &llist
[0]);
5229 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5231 gimplify_and_add (y
, &llist
[1]);
5235 if (omp_is_reference (var
))
5237 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5238 tree new_vard
= TREE_OPERAND (new_var
, 0);
5239 gcc_assert (DECL_P (new_vard
));
5240 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5241 x
= TYPE_SIZE_UNIT (type
);
5242 if (TREE_CONSTANT (x
))
5244 x
= create_tmp_var_raw (type
, get_name (var
));
5245 gimple_add_tmp_var (x
);
5246 TREE_ADDRESSABLE (x
) = 1;
5247 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5248 x
= fold_convert_loc (clause_loc
,
5249 TREE_TYPE (new_vard
), x
);
5250 gimplify_assign (new_vard
, x
, ilist
);
5255 gimplify_and_add (nx
, ilist
);
5256 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5258 && ctx
->for_simd_scan_phase
)
5260 tree orig_v
= build_outer_var_ref (var
, ctx
,
5261 OMP_CLAUSE_LASTPRIVATE
);
5262 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
5264 gimplify_and_add (x
, ilist
);
5269 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5271 gimplify_and_add (x
, dlist
);
5274 case OMP_CLAUSE_LINEAR
:
5275 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
5276 goto do_firstprivate
;
5277 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5280 x
= build_outer_var_ref (var
, ctx
);
5283 case OMP_CLAUSE_FIRSTPRIVATE
:
5284 if (is_task_ctx (ctx
))
5286 if ((omp_is_reference (var
)
5287 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
5288 || is_variable_sized (var
))
5290 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
5292 || use_pointer_for_field (var
, NULL
))
5294 x
= build_receiver_ref (var
, false, ctx
);
5295 SET_DECL_VALUE_EXPR (new_var
, x
);
5296 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5300 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
5301 && omp_is_reference (var
))
5303 x
= build_outer_var_ref (var
, ctx
);
5304 gcc_assert (TREE_CODE (x
) == MEM_REF
5305 && integer_zerop (TREE_OPERAND (x
, 1)));
5306 x
= TREE_OPERAND (x
, 0);
5307 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5308 (c
, unshare_expr (new_var
), x
);
5309 gimplify_and_add (x
, ilist
);
5313 x
= build_outer_var_ref (var
, ctx
);
5316 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5317 && gimple_omp_for_combined_into_p (ctx
->stmt
))
5319 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5320 tree stept
= TREE_TYPE (t
);
5321 tree ct
= omp_find_clause (clauses
,
5322 OMP_CLAUSE__LOOPTEMP_
);
5324 tree l
= OMP_CLAUSE_DECL (ct
);
5325 tree n1
= fd
->loop
.n1
;
5326 tree step
= fd
->loop
.step
;
5327 tree itype
= TREE_TYPE (l
);
5328 if (POINTER_TYPE_P (itype
))
5329 itype
= signed_type_for (itype
);
5330 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
5331 if (TYPE_UNSIGNED (itype
)
5332 && fd
->loop
.cond_code
== GT_EXPR
)
5333 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
5334 fold_build1 (NEGATE_EXPR
, itype
, l
),
5335 fold_build1 (NEGATE_EXPR
,
5338 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
5339 t
= fold_build2 (MULT_EXPR
, stept
,
5340 fold_convert (stept
, l
), t
);
5342 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
5344 if (omp_is_reference (var
))
5346 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5347 tree new_vard
= TREE_OPERAND (new_var
, 0);
5348 gcc_assert (DECL_P (new_vard
));
5349 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5350 nx
= TYPE_SIZE_UNIT (type
);
5351 if (TREE_CONSTANT (nx
))
5353 nx
= create_tmp_var_raw (type
,
5355 gimple_add_tmp_var (nx
);
5356 TREE_ADDRESSABLE (nx
) = 1;
5357 nx
= build_fold_addr_expr_loc (clause_loc
,
5359 nx
= fold_convert_loc (clause_loc
,
5360 TREE_TYPE (new_vard
),
5362 gimplify_assign (new_vard
, nx
, ilist
);
5366 x
= lang_hooks
.decls
.omp_clause_linear_ctor
5368 gimplify_and_add (x
, ilist
);
5372 if (POINTER_TYPE_P (TREE_TYPE (x
)))
5373 x
= fold_build2 (POINTER_PLUS_EXPR
,
5374 TREE_TYPE (x
), x
, t
);
5376 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5379 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
5380 || TREE_ADDRESSABLE (new_var
)
5381 || omp_is_reference (var
))
5382 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5385 if (omp_is_reference (var
))
5387 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5388 tree new_vard
= TREE_OPERAND (new_var
, 0);
5389 gcc_assert (DECL_P (new_vard
));
5390 SET_DECL_VALUE_EXPR (new_vard
,
5391 build_fold_addr_expr (lvar
));
5392 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5394 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
5396 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
5397 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
5398 gimplify_and_add (x
, ilist
);
5399 gimple_stmt_iterator gsi
5400 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5402 = gimple_build_assign (unshare_expr (lvar
), iv
);
5403 gsi_insert_before_without_update (&gsi
, g
,
5405 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5406 enum tree_code code
= PLUS_EXPR
;
5407 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
5408 code
= POINTER_PLUS_EXPR
;
5409 g
= gimple_build_assign (iv
, code
, iv
, t
);
5410 gsi_insert_before_without_update (&gsi
, g
,
5414 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5415 (c
, unshare_expr (ivar
), x
);
5416 gimplify_and_add (x
, &llist
[0]);
5417 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5419 gimplify_and_add (x
, &llist
[1]);
5422 if (omp_is_reference (var
))
5424 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5425 tree new_vard
= TREE_OPERAND (new_var
, 0);
5426 gcc_assert (DECL_P (new_vard
));
5427 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5428 nx
= TYPE_SIZE_UNIT (type
);
5429 if (TREE_CONSTANT (nx
))
5431 nx
= create_tmp_var_raw (type
, get_name (var
));
5432 gimple_add_tmp_var (nx
);
5433 TREE_ADDRESSABLE (nx
) = 1;
5434 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
5435 nx
= fold_convert_loc (clause_loc
,
5436 TREE_TYPE (new_vard
), nx
);
5437 gimplify_assign (new_vard
, nx
, ilist
);
5441 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5442 (c
, unshare_expr (new_var
), x
);
5443 gimplify_and_add (x
, ilist
);
5446 case OMP_CLAUSE__LOOPTEMP_
:
5447 case OMP_CLAUSE__REDUCTEMP_
:
5448 gcc_assert (is_taskreg_ctx (ctx
));
5449 x
= build_outer_var_ref (var
, ctx
);
5450 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5451 gimplify_and_add (x
, ilist
);
5454 case OMP_CLAUSE_COPYIN
:
5455 by_ref
= use_pointer_for_field (var
, NULL
);
5456 x
= build_receiver_ref (var
, by_ref
, ctx
);
5457 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
5458 append_to_statement_list (x
, ©in_seq
);
5459 copyin_by_ref
|= by_ref
;
5462 case OMP_CLAUSE_REDUCTION
:
5463 case OMP_CLAUSE_IN_REDUCTION
:
5464 /* OpenACC reductions are initialized using the
5465 GOACC_REDUCTION internal function. */
5466 if (is_gimple_omp_oacc (ctx
->stmt
))
5468 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5470 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5472 tree ptype
= TREE_TYPE (placeholder
);
5475 x
= error_mark_node
;
5476 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
5477 && !task_reduction_needs_orig_p
)
5479 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5481 tree pptype
= build_pointer_type (ptype
);
5482 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5483 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5484 size_int (task_reduction_cnt_full
5485 + task_reduction_cntorig
- 1),
5486 NULL_TREE
, NULL_TREE
);
5490 = *ctx
->task_reduction_map
->get (c
);
5491 x
= task_reduction_read (ilist
, tskred_temp
,
5492 pptype
, 7 + 3 * idx
);
5494 x
= fold_convert (pptype
, x
);
5495 x
= build_simple_mem_ref (x
);
5500 x
= build_outer_var_ref (var
, ctx
);
5502 if (omp_is_reference (var
)
5503 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
5504 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5506 SET_DECL_VALUE_EXPR (placeholder
, x
);
5507 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5508 tree new_vard
= new_var
;
5509 if (omp_is_reference (var
))
5511 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5512 new_vard
= TREE_OPERAND (new_var
, 0);
5513 gcc_assert (DECL_P (new_vard
));
5515 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5517 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5518 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5521 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5525 if (new_vard
== new_var
)
5527 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
5528 SET_DECL_VALUE_EXPR (new_var
, ivar
);
5532 SET_DECL_VALUE_EXPR (new_vard
,
5533 build_fold_addr_expr (ivar
));
5534 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5536 x
= lang_hooks
.decls
.omp_clause_default_ctor
5537 (c
, unshare_expr (ivar
),
5538 build_outer_var_ref (var
, ctx
));
5539 if (rvarp
&& ctx
->for_simd_scan_phase
)
5542 gimplify_and_add (x
, &llist
[0]);
5543 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5545 gimplify_and_add (x
, &llist
[1]);
5552 gimplify_and_add (x
, &llist
[0]);
5554 tree ivar2
= unshare_expr (lvar
);
5555 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5556 x
= lang_hooks
.decls
.omp_clause_default_ctor
5557 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
5558 gimplify_and_add (x
, &llist
[0]);
5562 x
= lang_hooks
.decls
.omp_clause_default_ctor
5563 (c
, unshare_expr (rvar2
),
5564 build_outer_var_ref (var
, ctx
));
5565 gimplify_and_add (x
, &llist
[0]);
5568 /* For types that need construction, add another
5569 private var which will be default constructed
5570 and optionally initialized with
5571 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5572 loop we want to assign this value instead of
5573 constructing and destructing it in each
5575 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
5576 gimple_add_tmp_var (nv
);
5577 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
5581 x
= lang_hooks
.decls
.omp_clause_default_ctor
5582 (c
, nv
, build_outer_var_ref (var
, ctx
));
5583 gimplify_and_add (x
, ilist
);
5585 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5587 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5588 x
= DECL_VALUE_EXPR (new_vard
);
5590 if (new_vard
!= new_var
)
5591 vexpr
= build_fold_addr_expr (nv
);
5592 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5593 lower_omp (&tseq
, ctx
);
5594 SET_DECL_VALUE_EXPR (new_vard
, x
);
5595 gimple_seq_add_seq (ilist
, tseq
);
5596 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5599 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5601 gimplify_and_add (x
, dlist
);
5604 tree ref
= build_outer_var_ref (var
, ctx
);
5605 x
= unshare_expr (ivar
);
5606 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5608 gimplify_and_add (x
, &llist
[0]);
5610 ref
= build_outer_var_ref (var
, ctx
);
5611 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
5613 gimplify_and_add (x
, &llist
[3]);
5615 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5616 if (new_vard
== new_var
)
5617 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5619 SET_DECL_VALUE_EXPR (new_vard
,
5620 build_fold_addr_expr (lvar
));
5622 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5624 gimplify_and_add (x
, &llist
[1]);
5626 tree ivar2
= unshare_expr (lvar
);
5627 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5628 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
5630 gimplify_and_add (x
, &llist
[1]);
5634 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
5636 gimplify_and_add (x
, &llist
[1]);
5641 gimplify_and_add (x
, &llist
[0]);
5642 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5644 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5645 lower_omp (&tseq
, ctx
);
5646 gimple_seq_add_seq (&llist
[0], tseq
);
5648 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5649 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5650 lower_omp (&tseq
, ctx
);
5651 gimple_seq_add_seq (&llist
[1], tseq
);
5652 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5653 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5654 if (new_vard
== new_var
)
5655 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5657 SET_DECL_VALUE_EXPR (new_vard
,
5658 build_fold_addr_expr (lvar
));
5659 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5661 gimplify_and_add (x
, &llist
[1]);
5664 /* If this is a reference to constant size reduction var
5665 with placeholder, we haven't emitted the initializer
5666 for it because it is undesirable if SIMD arrays are used.
5667 But if they aren't used, we need to emit the deferred
5668 initialization now. */
5669 else if (omp_is_reference (var
) && is_simd
)
5670 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5672 tree lab2
= NULL_TREE
;
5676 if (!is_parallel_ctx (ctx
))
5678 tree condv
= create_tmp_var (boolean_type_node
);
5679 tree m
= build_simple_mem_ref (cond
);
5680 g
= gimple_build_assign (condv
, m
);
5681 gimple_seq_add_stmt (ilist
, g
);
5683 = create_artificial_label (UNKNOWN_LOCATION
);
5684 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5685 g
= gimple_build_cond (NE_EXPR
, condv
,
5688 gimple_seq_add_stmt (ilist
, g
);
5689 gimple_seq_add_stmt (ilist
,
5690 gimple_build_label (lab1
));
5692 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5694 gimple_seq_add_stmt (ilist
, g
);
5696 x
= lang_hooks
.decls
.omp_clause_default_ctor
5697 (c
, unshare_expr (new_var
),
5699 : build_outer_var_ref (var
, ctx
));
5701 gimplify_and_add (x
, ilist
);
5703 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5704 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5706 if (ctx
->for_simd_scan_phase
)
5709 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
5711 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
5712 gimple_add_tmp_var (nv
);
5713 ctx
->cb
.decl_map
->put (new_vard
, nv
);
5714 x
= lang_hooks
.decls
.omp_clause_default_ctor
5715 (c
, nv
, build_outer_var_ref (var
, ctx
));
5717 gimplify_and_add (x
, ilist
);
5718 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5720 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5722 if (new_vard
!= new_var
)
5723 vexpr
= build_fold_addr_expr (nv
);
5724 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5725 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5726 lower_omp (&tseq
, ctx
);
5727 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
5728 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
5729 gimple_seq_add_seq (ilist
, tseq
);
5731 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5732 if (is_simd
&& ctx
->scan_exclusive
)
5735 = create_tmp_var_raw (TREE_TYPE (new_var
));
5736 gimple_add_tmp_var (nv2
);
5737 ctx
->cb
.decl_map
->put (nv
, nv2
);
5738 x
= lang_hooks
.decls
.omp_clause_default_ctor
5739 (c
, nv2
, build_outer_var_ref (var
, ctx
));
5740 gimplify_and_add (x
, ilist
);
5741 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5743 gimplify_and_add (x
, dlist
);
5745 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5747 gimplify_and_add (x
, dlist
);
5750 && ctx
->scan_exclusive
5751 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
5753 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
5754 gimple_add_tmp_var (nv2
);
5755 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
5756 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5758 gimplify_and_add (x
, dlist
);
5760 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5764 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5766 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5767 lower_omp (&tseq
, ctx
);
5768 gimple_seq_add_seq (ilist
, tseq
);
5770 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5773 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5774 lower_omp (&tseq
, ctx
);
5775 gimple_seq_add_seq (dlist
, tseq
);
5776 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5778 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5782 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5789 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
5790 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
5791 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5796 tree lab2
= NULL_TREE
;
5797 /* GOMP_taskgroup_reduction_register memsets the whole
5798 array to zero. If the initializer is zero, we don't
5799 need to initialize it again, just mark it as ever
5800 used unconditionally, i.e. cond = true. */
5801 if (initializer_zerop (x
))
5803 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5805 gimple_seq_add_stmt (ilist
, g
);
5810 if (!cond) { cond = true; new_var = x; } */
5811 if (!is_parallel_ctx (ctx
))
5813 tree condv
= create_tmp_var (boolean_type_node
);
5814 tree m
= build_simple_mem_ref (cond
);
5815 g
= gimple_build_assign (condv
, m
);
5816 gimple_seq_add_stmt (ilist
, g
);
5818 = create_artificial_label (UNKNOWN_LOCATION
);
5819 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5820 g
= gimple_build_cond (NE_EXPR
, condv
,
5823 gimple_seq_add_stmt (ilist
, g
);
5824 gimple_seq_add_stmt (ilist
,
5825 gimple_build_label (lab1
));
5827 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5829 gimple_seq_add_stmt (ilist
, g
);
5830 gimplify_assign (new_var
, x
, ilist
);
5832 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5836 /* reduction(-:var) sums up the partial results, so it
5837 acts identically to reduction(+:var). */
5838 if (code
== MINUS_EXPR
)
5841 tree new_vard
= new_var
;
5842 if (is_simd
&& omp_is_reference (var
))
5844 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5845 new_vard
= TREE_OPERAND (new_var
, 0);
5846 gcc_assert (DECL_P (new_vard
));
5848 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5850 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5851 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5854 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5858 if (new_vard
!= new_var
)
5860 SET_DECL_VALUE_EXPR (new_vard
,
5861 build_fold_addr_expr (lvar
));
5862 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5865 tree ref
= build_outer_var_ref (var
, ctx
);
5869 if (ctx
->for_simd_scan_phase
)
5871 gimplify_assign (ivar
, ref
, &llist
[0]);
5872 ref
= build_outer_var_ref (var
, ctx
);
5873 gimplify_assign (ref
, rvar
, &llist
[3]);
5877 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
5882 simt_lane
= create_tmp_var (unsigned_type_node
);
5883 x
= build_call_expr_internal_loc
5884 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
5885 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
5886 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
5887 gimplify_assign (ivar
, x
, &llist
[2]);
5889 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
5890 ref
= build_outer_var_ref (var
, ctx
);
5891 gimplify_assign (ref
, x
, &llist
[1]);
5896 if (omp_is_reference (var
) && is_simd
)
5897 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5898 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5899 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5901 gimplify_assign (new_var
, x
, ilist
);
5904 tree ref
= build_outer_var_ref (var
, ctx
);
5906 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
5907 ref
= build_outer_var_ref (var
, ctx
);
5908 gimplify_assign (ref
, x
, dlist
);
5921 tree clobber
= build_constructor (TREE_TYPE (tskred_avar
), NULL
);
5922 TREE_THIS_VOLATILE (clobber
) = 1;
5923 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
5926 if (known_eq (sctx
.max_vf
, 1U))
5928 sctx
.is_simt
= false;
5929 if (ctx
->lastprivate_conditional_map
)
5931 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
5933 /* Signal to lower_omp_1 that it should use parent context. */
5934 ctx
->combined_into_simd_safelen1
= true;
5935 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5936 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5937 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5939 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
5940 omp_context
*outer
= ctx
->outer
;
5941 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
5942 outer
= outer
->outer
;
5943 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
5944 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
5945 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
5951 /* When not vectorized, treat lastprivate(conditional:) like
5952 normal lastprivate, as there will be just one simd lane
5953 writing the privatized variable. */
5954 delete ctx
->lastprivate_conditional_map
;
5955 ctx
->lastprivate_conditional_map
= NULL
;
5960 if (nonconst_simd_if
)
5962 if (sctx
.lane
== NULL_TREE
)
5964 sctx
.idx
= create_tmp_var (unsigned_type_node
);
5965 sctx
.lane
= create_tmp_var (unsigned_type_node
);
5967 /* FIXME: For now. */
5968 sctx
.is_simt
= false;
5971 if (sctx
.lane
|| sctx
.is_simt
)
5973 uid
= create_tmp_var (ptr_type_node
, "simduid");
5974 /* Don't want uninit warnings on simduid, it is always uninitialized,
5975 but we use it not for the value, but for the DECL_UID only. */
5976 TREE_NO_WARNING (uid
) = 1;
5977 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
5978 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
5979 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
5980 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
5982 /* Emit calls denoting privatized variables and initializing a pointer to
5983 structure that holds private variables as fields after ompdevlow pass. */
5986 sctx
.simt_eargs
[0] = uid
;
5988 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
5989 gimple_call_set_lhs (g
, uid
);
5990 gimple_seq_add_stmt (ilist
, g
);
5991 sctx
.simt_eargs
.release ();
5993 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
5994 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
5995 gimple_call_set_lhs (g
, simtrec
);
5996 gimple_seq_add_stmt (ilist
, g
);
6000 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6001 2 + (nonconst_simd_if
!= NULL
),
6002 uid
, integer_zero_node
,
6004 gimple_call_set_lhs (g
, sctx
.lane
);
6005 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6006 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6007 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6008 build_int_cst (unsigned_type_node
, 0));
6009 gimple_seq_add_stmt (ilist
, g
);
6012 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6014 gimple_call_set_lhs (g
, sctx
.lastlane
);
6015 gimple_seq_add_stmt (dlist
, g
);
6016 gimple_seq_add_seq (dlist
, llist
[3]);
6018 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6021 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6022 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6023 gimple_call_set_lhs (g
, simt_vf
);
6024 gimple_seq_add_stmt (dlist
, g
);
6026 tree t
= build_int_cst (unsigned_type_node
, 1);
6027 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6028 gimple_seq_add_stmt (dlist
, g
);
6030 t
= build_int_cst (unsigned_type_node
, 0);
6031 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6032 gimple_seq_add_stmt (dlist
, g
);
6034 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6035 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6036 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6037 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6038 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6040 gimple_seq_add_seq (dlist
, llist
[2]);
6042 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6043 gimple_seq_add_stmt (dlist
, g
);
6045 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6046 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6047 gimple_seq_add_stmt (dlist
, g
);
6049 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6051 for (int i
= 0; i
< 2; i
++)
6054 tree vf
= create_tmp_var (unsigned_type_node
);
6055 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6056 gimple_call_set_lhs (g
, vf
);
6057 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
6058 gimple_seq_add_stmt (seq
, g
);
6059 tree t
= build_int_cst (unsigned_type_node
, 0);
6060 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6061 gimple_seq_add_stmt (seq
, g
);
6062 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6063 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6064 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6065 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
6066 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
6067 gimple_seq_add_seq (seq
, llist
[i
]);
6068 t
= build_int_cst (unsigned_type_node
, 1);
6069 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
6070 gimple_seq_add_stmt (seq
, g
);
6071 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
6072 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
6073 gimple_seq_add_stmt (seq
, g
);
6074 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
6079 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
6081 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
6082 gimple_seq_add_stmt (dlist
, g
);
6085 /* The copyin sequence is not to be executed by the main thread, since
6086 that would result in self-copies. Perhaps not visible to scalars,
6087 but it certainly is to C++ operator=. */
6090 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
6092 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
6093 build_int_cst (TREE_TYPE (x
), 0));
6094 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
6095 gimplify_and_add (x
, ilist
);
6098 /* If any copyin variable is passed by reference, we must ensure the
6099 master thread doesn't modify it before it is copied over in all
6100 threads. Similarly for variables in both firstprivate and
6101 lastprivate clauses we need to ensure the lastprivate copying
6102 happens after firstprivate copying in all threads. And similarly
6103 for UDRs if initializer expression refers to omp_orig. */
6104 if (copyin_by_ref
|| lastprivate_firstprivate
6105 || (reduction_omp_orig_ref
6106 && !ctx
->scan_inclusive
6107 && !ctx
->scan_exclusive
))
6109 /* Don't add any barrier for #pragma omp simd or
6110 #pragma omp distribute. */
6111 if (!is_task_ctx (ctx
)
6112 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
6113 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
6114 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
6117 /* If max_vf is non-zero, then we can use only a vectorization factor
6118 up to the max_vf we chose. So stick it into the safelen clause. */
6119 if (maybe_ne (sctx
.max_vf
, 0U))
6121 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
6122 OMP_CLAUSE_SAFELEN
);
6123 poly_uint64 safe_len
;
6125 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
6126 && maybe_gt (safe_len
, sctx
.max_vf
)))
6128 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
6129 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
6131 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6132 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6137 /* Create temporary variables for lastprivate(conditional:) implementation
6138 in context CTX with CLAUSES. */
6141 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
6143 tree iter_type
= NULL_TREE
;
6144 tree cond_ptr
= NULL_TREE
;
6145 tree iter_var
= NULL_TREE
;
6146 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6147 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
6148 tree next
= *clauses
;
6149 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6150 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6151 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6155 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
6157 if (iter_type
== NULL_TREE
)
6159 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
6160 iter_var
= create_tmp_var_raw (iter_type
);
6161 DECL_CONTEXT (iter_var
) = current_function_decl
;
6162 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6163 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6164 ctx
->block_vars
= iter_var
;
6166 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6167 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6168 OMP_CLAUSE_DECL (c3
) = iter_var
;
6169 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
6171 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6173 next
= OMP_CLAUSE_CHAIN (cc
);
6174 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6175 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
6176 ctx
->lastprivate_conditional_map
->put (o
, v
);
6179 if (iter_type
== NULL
)
6181 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
6183 struct omp_for_data fd
;
6184 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
6186 iter_type
= unsigned_type_for (fd
.iter_type
);
6188 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
6189 iter_type
= unsigned_type_node
;
6190 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
6194 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
6195 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6199 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
6200 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
6201 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
6202 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
6203 ctx
->block_vars
= cond_ptr
;
6204 c2
= build_omp_clause (UNKNOWN_LOCATION
,
6205 OMP_CLAUSE__CONDTEMP_
);
6206 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6207 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
6210 iter_var
= create_tmp_var_raw (iter_type
);
6211 DECL_CONTEXT (iter_var
) = current_function_decl
;
6212 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6213 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6214 ctx
->block_vars
= iter_var
;
6216 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6217 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6218 OMP_CLAUSE_DECL (c3
) = iter_var
;
6219 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
6220 OMP_CLAUSE_CHAIN (c2
) = c3
;
6221 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6223 tree v
= create_tmp_var_raw (iter_type
);
6224 DECL_CONTEXT (v
) = current_function_decl
;
6225 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
6226 DECL_CHAIN (v
) = ctx
->block_vars
;
6227 ctx
->block_vars
= v
;
6228 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6229 ctx
->lastprivate_conditional_map
->put (o
, v
);
6234 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6235 both parallel and workshare constructs. PREDICATE may be NULL if it's
6236 always true. BODY_P is the sequence to insert early initialization
6237 if needed, STMT_LIST is where the non-conditional lastprivate handling
6238 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6242 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
6243 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
6246 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
6247 bool par_clauses
= false;
6248 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
6249 unsigned HOST_WIDE_INT conditional_off
= 0;
6250 gimple_seq post_stmt_list
= NULL
;
6252 /* Early exit if there are no lastprivate or linear clauses. */
6253 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
6254 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
6255 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
6256 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
6258 if (clauses
== NULL
)
6260 /* If this was a workshare clause, see if it had been combined
6261 with its parallel. In that case, look for the clauses on the
6262 parallel statement itself. */
6263 if (is_parallel_ctx (ctx
))
6267 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6270 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6271 OMP_CLAUSE_LASTPRIVATE
);
6272 if (clauses
== NULL
)
6277 bool maybe_simt
= false;
6278 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6279 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6281 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
6282 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
6284 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
6290 tree label_true
, arm1
, arm2
;
6291 enum tree_code pred_code
= TREE_CODE (predicate
);
6293 label
= create_artificial_label (UNKNOWN_LOCATION
);
6294 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
6295 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
6297 arm1
= TREE_OPERAND (predicate
, 0);
6298 arm2
= TREE_OPERAND (predicate
, 1);
6299 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6300 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6305 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6306 arm2
= boolean_false_node
;
6307 pred_code
= NE_EXPR
;
6311 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
6312 c
= fold_convert (integer_type_node
, c
);
6313 simtcond
= create_tmp_var (integer_type_node
);
6314 gimplify_assign (simtcond
, c
, stmt_list
);
6315 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
6317 c
= create_tmp_var (integer_type_node
);
6318 gimple_call_set_lhs (g
, c
);
6319 gimple_seq_add_stmt (stmt_list
, g
);
6320 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
6324 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
6325 gimple_seq_add_stmt (stmt_list
, stmt
);
6326 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
6329 tree cond_ptr
= NULL_TREE
;
6330 for (c
= clauses
; c
;)
6333 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6334 gimple_seq
*this_stmt_list
= stmt_list
;
6335 tree lab2
= NULL_TREE
;
6337 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6338 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6339 && ctx
->lastprivate_conditional_map
6340 && !ctx
->combined_into_simd_safelen1
)
6342 gcc_assert (body_p
);
6345 if (cond_ptr
== NULL_TREE
)
6347 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
6348 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
6350 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
6351 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6352 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
6353 gimplify_assign (v
, build_zero_cst (type
), body_p
);
6354 this_stmt_list
= cstmt_list
;
6356 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
6358 mem
= build2 (MEM_REF
, type
, cond_ptr
,
6359 build_int_cst (TREE_TYPE (cond_ptr
),
6361 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
6364 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
6365 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
6366 tree mem2
= copy_node (mem
);
6367 gimple_seq seq
= NULL
;
6368 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
6369 gimple_seq_add_seq (this_stmt_list
, seq
);
6370 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
6371 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6372 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
6373 gimple_seq_add_stmt (this_stmt_list
, g
);
6374 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
6375 gimplify_assign (mem2
, v
, this_stmt_list
);
6378 && ctx
->combined_into_simd_safelen1
6379 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6380 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6381 && ctx
->lastprivate_conditional_map
)
6382 this_stmt_list
= &post_stmt_list
;
6384 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6385 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6386 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
6388 var
= OMP_CLAUSE_DECL (c
);
6389 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6390 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
6391 && is_taskloop_ctx (ctx
))
6393 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
6394 new_var
= lookup_decl (var
, ctx
->outer
);
6398 new_var
= lookup_decl (var
, ctx
);
6399 /* Avoid uninitialized warnings for lastprivate and
6400 for linear iterators. */
6402 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6403 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
6404 TREE_NO_WARNING (new_var
) = 1;
6407 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
6409 tree val
= DECL_VALUE_EXPR (new_var
);
6410 if (TREE_CODE (val
) == ARRAY_REF
6411 && VAR_P (TREE_OPERAND (val
, 0))
6412 && lookup_attribute ("omp simd array",
6413 DECL_ATTRIBUTES (TREE_OPERAND (val
,
6416 if (lastlane
== NULL
)
6418 lastlane
= create_tmp_var (unsigned_type_node
);
6420 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6422 TREE_OPERAND (val
, 1));
6423 gimple_call_set_lhs (g
, lastlane
);
6424 gimple_seq_add_stmt (this_stmt_list
, g
);
6426 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
6427 TREE_OPERAND (val
, 0), lastlane
,
6428 NULL_TREE
, NULL_TREE
);
6429 TREE_THIS_NOTRAP (new_var
) = 1;
6432 else if (maybe_simt
)
6434 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
6435 ? DECL_VALUE_EXPR (new_var
)
6437 if (simtlast
== NULL
)
6439 simtlast
= create_tmp_var (unsigned_type_node
);
6440 gcall
*g
= gimple_build_call_internal
6441 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
6442 gimple_call_set_lhs (g
, simtlast
);
6443 gimple_seq_add_stmt (this_stmt_list
, g
);
6445 x
= build_call_expr_internal_loc
6446 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
6447 TREE_TYPE (val
), 2, val
, simtlast
);
6448 new_var
= unshare_expr (new_var
);
6449 gimplify_assign (new_var
, x
, this_stmt_list
);
6450 new_var
= unshare_expr (new_var
);
6453 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6454 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
6456 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
6457 gimple_seq_add_seq (this_stmt_list
,
6458 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
6459 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
6461 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6462 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
6464 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
6465 gimple_seq_add_seq (this_stmt_list
,
6466 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
6467 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
6471 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6472 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
6474 gcc_checking_assert (is_taskloop_ctx (ctx
));
6475 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
6477 if (is_global_var (ovar
))
6481 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
6482 if (omp_is_reference (var
))
6483 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6484 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
6485 gimplify_and_add (x
, this_stmt_list
);
6488 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
6492 c
= OMP_CLAUSE_CHAIN (c
);
6493 if (c
== NULL
&& !par_clauses
)
6495 /* If this was a workshare clause, see if it had been combined
6496 with its parallel. In that case, continue looking for the
6497 clauses also on the parallel statement itself. */
6498 if (is_parallel_ctx (ctx
))
6502 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6505 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6506 OMP_CLAUSE_LASTPRIVATE
);
6512 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
6513 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
6516 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6517 (which might be a placeholder). INNER is true if this is an inner
6518 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6519 join markers. Generate the before-loop forking sequence in
6520 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6521 general form of these sequences is
6523 GOACC_REDUCTION_SETUP
6525 GOACC_REDUCTION_INIT
6527 GOACC_REDUCTION_FINI
6529 GOACC_REDUCTION_TEARDOWN. */
6532 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
6533 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
6534 gimple_seq
*join_seq
, omp_context
*ctx
)
6536 gimple_seq before_fork
= NULL
;
6537 gimple_seq after_fork
= NULL
;
6538 gimple_seq before_join
= NULL
;
6539 gimple_seq after_join
= NULL
;
6540 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
6541 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
6542 unsigned offset
= 0;
6544 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6545 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
6547 tree orig
= OMP_CLAUSE_DECL (c
);
6548 tree var
= maybe_lookup_decl (orig
, ctx
);
6549 tree ref_to_res
= NULL_TREE
;
6550 tree incoming
, outgoing
, v1
, v2
, v3
;
6551 bool is_private
= false;
6553 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
6554 if (rcode
== MINUS_EXPR
)
6556 else if (rcode
== TRUTH_ANDIF_EXPR
)
6557 rcode
= BIT_AND_EXPR
;
6558 else if (rcode
== TRUTH_ORIF_EXPR
)
6559 rcode
= BIT_IOR_EXPR
;
6560 tree op
= build_int_cst (unsigned_type_node
, rcode
);
6565 incoming
= outgoing
= var
;
6569 /* See if an outer construct also reduces this variable. */
6570 omp_context
*outer
= ctx
;
6572 while (omp_context
*probe
= outer
->outer
)
6574 enum gimple_code type
= gimple_code (probe
->stmt
);
6579 case GIMPLE_OMP_FOR
:
6580 cls
= gimple_omp_for_clauses (probe
->stmt
);
6583 case GIMPLE_OMP_TARGET
:
6584 if (gimple_omp_target_kind (probe
->stmt
)
6585 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
6588 cls
= gimple_omp_target_clauses (probe
->stmt
);
6596 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
6597 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
6598 && orig
== OMP_CLAUSE_DECL (cls
))
6600 incoming
= outgoing
= lookup_decl (orig
, probe
);
6601 goto has_outer_reduction
;
6603 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
6604 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
6605 && orig
== OMP_CLAUSE_DECL (cls
))
6613 /* This is the outermost construct with this reduction,
6614 see if there's a mapping for it. */
6615 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
6616 && maybe_lookup_field (orig
, outer
) && !is_private
)
6618 ref_to_res
= build_receiver_ref (orig
, false, outer
);
6619 if (omp_is_reference (orig
))
6620 ref_to_res
= build_simple_mem_ref (ref_to_res
);
6622 tree type
= TREE_TYPE (var
);
6623 if (POINTER_TYPE_P (type
))
6624 type
= TREE_TYPE (type
);
6627 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
6631 /* Try to look at enclosing contexts for reduction var,
6632 use original if no mapping found. */
6634 omp_context
*c
= ctx
->outer
;
6637 t
= maybe_lookup_decl (orig
, c
);
6640 incoming
= outgoing
= (t
? t
: orig
);
6643 has_outer_reduction
:;
6647 ref_to_res
= integer_zero_node
;
6649 if (omp_is_reference (orig
))
6651 tree type
= TREE_TYPE (var
);
6652 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
6656 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
6657 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
6660 v1
= create_tmp_var (type
, id
);
6661 v2
= create_tmp_var (type
, id
);
6662 v3
= create_tmp_var (type
, id
);
6664 gimplify_assign (v1
, var
, fork_seq
);
6665 gimplify_assign (v2
, var
, fork_seq
);
6666 gimplify_assign (v3
, var
, fork_seq
);
6668 var
= build_simple_mem_ref (var
);
6669 v1
= build_simple_mem_ref (v1
);
6670 v2
= build_simple_mem_ref (v2
);
6671 v3
= build_simple_mem_ref (v3
);
6672 outgoing
= build_simple_mem_ref (outgoing
);
6674 if (!TREE_CONSTANT (incoming
))
6675 incoming
= build_simple_mem_ref (incoming
);
6680 /* Determine position in reduction buffer, which may be used
6681 by target. The parser has ensured that this is not a
6682 variable-sized type. */
6683 fixed_size_mode mode
6684 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
6685 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
6686 offset
= (offset
+ align
- 1) & ~(align
- 1);
6687 tree off
= build_int_cst (sizetype
, offset
);
6688 offset
+= GET_MODE_SIZE (mode
);
6692 init_code
= build_int_cst (integer_type_node
,
6693 IFN_GOACC_REDUCTION_INIT
);
6694 fini_code
= build_int_cst (integer_type_node
,
6695 IFN_GOACC_REDUCTION_FINI
);
6696 setup_code
= build_int_cst (integer_type_node
,
6697 IFN_GOACC_REDUCTION_SETUP
);
6698 teardown_code
= build_int_cst (integer_type_node
,
6699 IFN_GOACC_REDUCTION_TEARDOWN
);
6703 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6704 TREE_TYPE (var
), 6, setup_code
,
6705 unshare_expr (ref_to_res
),
6706 incoming
, level
, op
, off
);
6708 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6709 TREE_TYPE (var
), 6, init_code
,
6710 unshare_expr (ref_to_res
),
6711 v1
, level
, op
, off
);
6713 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6714 TREE_TYPE (var
), 6, fini_code
,
6715 unshare_expr (ref_to_res
),
6716 v2
, level
, op
, off
);
6718 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6719 TREE_TYPE (var
), 6, teardown_code
,
6720 ref_to_res
, v3
, level
, op
, off
);
6722 gimplify_assign (v1
, setup_call
, &before_fork
);
6723 gimplify_assign (v2
, init_call
, &after_fork
);
6724 gimplify_assign (v3
, fini_call
, &before_join
);
6725 gimplify_assign (outgoing
, teardown_call
, &after_join
);
6728 /* Now stitch things together. */
6729 gimple_seq_add_seq (fork_seq
, before_fork
);
6731 gimple_seq_add_stmt (fork_seq
, fork
);
6732 gimple_seq_add_seq (fork_seq
, after_fork
);
6734 gimple_seq_add_seq (join_seq
, before_join
);
6736 gimple_seq_add_stmt (join_seq
, join
);
6737 gimple_seq_add_seq (join_seq
, after_join
);
6740 /* Generate code to implement the REDUCTION clauses, append it
6741 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6742 that should be emitted also inside of the critical section,
6743 in that case clear *CLIST afterwards, otherwise leave it as is
6744 and let the caller emit it itself. */
6747 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
6748 gimple_seq
*clist
, omp_context
*ctx
)
6750 gimple_seq sub_seq
= NULL
;
6755 /* OpenACC loop reductions are handled elsewhere. */
6756 if (is_gimple_omp_oacc (ctx
->stmt
))
6759 /* SIMD reductions are handled in lower_rec_input_clauses. */
6760 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6761 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6764 /* inscan reductions are handled elsewhere. */
6765 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
6768 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6769 update in that case, otherwise use a lock. */
6770 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
6771 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6772 && !OMP_CLAUSE_REDUCTION_TASK (c
))
6774 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
6775 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6777 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6787 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6789 tree var
, ref
, new_var
, orig_var
;
6790 enum tree_code code
;
6791 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6793 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
6794 || OMP_CLAUSE_REDUCTION_TASK (c
))
6797 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
6798 orig_var
= var
= OMP_CLAUSE_DECL (c
);
6799 if (TREE_CODE (var
) == MEM_REF
)
6801 var
= TREE_OPERAND (var
, 0);
6802 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
6803 var
= TREE_OPERAND (var
, 0);
6804 if (TREE_CODE (var
) == ADDR_EXPR
)
6805 var
= TREE_OPERAND (var
, 0);
6808 /* If this is a pointer or referenced based array
6809 section, the var could be private in the outer
6810 context e.g. on orphaned loop construct. Pretend this
6811 is private variable's outer reference. */
6812 ccode
= OMP_CLAUSE_PRIVATE
;
6813 if (TREE_CODE (var
) == INDIRECT_REF
)
6814 var
= TREE_OPERAND (var
, 0);
6817 if (is_variable_sized (var
))
6819 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
6820 var
= DECL_VALUE_EXPR (var
);
6821 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
6822 var
= TREE_OPERAND (var
, 0);
6823 gcc_assert (DECL_P (var
));
6826 new_var
= lookup_decl (var
, ctx
);
6827 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
6828 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6829 ref
= build_outer_var_ref (var
, ctx
, ccode
);
6830 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6832 /* reduction(-:var) sums up the partial results, so it acts
6833 identically to reduction(+:var). */
6834 if (code
== MINUS_EXPR
)
6839 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
6841 addr
= save_expr (addr
);
6842 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
6843 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
6844 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
6845 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
6846 gimplify_and_add (x
, stmt_seqp
);
6849 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6851 tree d
= OMP_CLAUSE_DECL (c
);
6852 tree type
= TREE_TYPE (d
);
6853 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
6854 tree i
= create_tmp_var (TREE_TYPE (v
));
6855 tree ptype
= build_pointer_type (TREE_TYPE (type
));
6856 tree bias
= TREE_OPERAND (d
, 1);
6857 d
= TREE_OPERAND (d
, 0);
6858 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
6860 tree b
= TREE_OPERAND (d
, 1);
6861 b
= maybe_lookup_decl (b
, ctx
);
6864 b
= TREE_OPERAND (d
, 1);
6865 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
6867 if (integer_zerop (bias
))
6871 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
6872 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
6873 TREE_TYPE (b
), b
, bias
);
6875 d
= TREE_OPERAND (d
, 0);
6877 /* For ref build_outer_var_ref already performs this, so
6878 only new_var needs a dereference. */
6879 if (TREE_CODE (d
) == INDIRECT_REF
)
6881 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6882 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
6884 else if (TREE_CODE (d
) == ADDR_EXPR
)
6886 if (orig_var
== var
)
6888 new_var
= build_fold_addr_expr (new_var
);
6889 ref
= build_fold_addr_expr (ref
);
6894 gcc_assert (orig_var
== var
);
6895 if (omp_is_reference (var
))
6896 ref
= build_fold_addr_expr (ref
);
6900 tree t
= maybe_lookup_decl (v
, ctx
);
6904 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
6905 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
6907 if (!integer_zerop (bias
))
6909 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
6910 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
6911 TREE_TYPE (new_var
), new_var
,
6912 unshare_expr (bias
));
6913 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
6914 TREE_TYPE (ref
), ref
, bias
);
6916 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
6917 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
6918 tree m
= create_tmp_var (ptype
);
6919 gimplify_assign (m
, new_var
, stmt_seqp
);
6921 m
= create_tmp_var (ptype
);
6922 gimplify_assign (m
, ref
, stmt_seqp
);
6924 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
6925 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6926 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6927 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
6928 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6929 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
6930 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6932 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6933 tree decl_placeholder
6934 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
6935 SET_DECL_VALUE_EXPR (placeholder
, out
);
6936 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6937 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
6938 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
6939 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
6940 gimple_seq_add_seq (&sub_seq
,
6941 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
6942 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6943 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
6944 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
6948 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
6949 out
= unshare_expr (out
);
6950 gimplify_assign (out
, x
, &sub_seq
);
6952 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
6953 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
6954 gimple_seq_add_stmt (&sub_seq
, g
);
6955 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
6956 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
6957 gimple_seq_add_stmt (&sub_seq
, g
);
6958 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
6959 build_int_cst (TREE_TYPE (i
), 1));
6960 gimple_seq_add_stmt (&sub_seq
, g
);
6961 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
6962 gimple_seq_add_stmt (&sub_seq
, g
);
6963 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
6965 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6967 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6969 if (omp_is_reference (var
)
6970 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
6972 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
6973 SET_DECL_VALUE_EXPR (placeholder
, ref
);
6974 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6975 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
6976 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
6977 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6978 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
6982 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
6983 ref
= build_outer_var_ref (var
, ctx
);
6984 gimplify_assign (ref
, x
, &sub_seq
);
6988 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
6990 gimple_seq_add_stmt (stmt_seqp
, stmt
);
6992 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
6996 gimple_seq_add_seq (stmt_seqp
, *clist
);
7000 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7002 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7006 /* Generate code to implement the COPYPRIVATE clauses. */
7009 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
7014 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7016 tree var
, new_var
, ref
, x
;
7018 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7020 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
7023 var
= OMP_CLAUSE_DECL (c
);
7024 by_ref
= use_pointer_for_field (var
, NULL
);
7026 ref
= build_sender_ref (var
, ctx
);
7027 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
7030 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
7031 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
7033 gimplify_assign (ref
, x
, slist
);
7035 ref
= build_receiver_ref (var
, false, ctx
);
7038 ref
= fold_convert_loc (clause_loc
,
7039 build_pointer_type (TREE_TYPE (new_var
)),
7041 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
7043 if (omp_is_reference (var
))
7045 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
7046 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
7047 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7049 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
7050 gimplify_and_add (x
, rlist
);
7055 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7056 and REDUCTION from the sender (aka parent) side. */
7059 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
7063 int ignored_looptemp
= 0;
7064 bool is_taskloop
= false;
7066 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7067 by GOMP_taskloop. */
7068 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
7070 ignored_looptemp
= 2;
7074 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7076 tree val
, ref
, x
, var
;
7077 bool by_ref
, do_in
= false, do_out
= false;
7078 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7080 switch (OMP_CLAUSE_CODE (c
))
7082 case OMP_CLAUSE_PRIVATE
:
7083 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7086 case OMP_CLAUSE_FIRSTPRIVATE
:
7087 case OMP_CLAUSE_COPYIN
:
7088 case OMP_CLAUSE_LASTPRIVATE
:
7089 case OMP_CLAUSE_IN_REDUCTION
:
7090 case OMP_CLAUSE__REDUCTEMP_
:
7092 case OMP_CLAUSE_REDUCTION
:
7093 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
7096 case OMP_CLAUSE_SHARED
:
7097 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7100 case OMP_CLAUSE__LOOPTEMP_
:
7101 if (ignored_looptemp
)
7111 val
= OMP_CLAUSE_DECL (c
);
7112 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7113 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
7114 && TREE_CODE (val
) == MEM_REF
)
7116 val
= TREE_OPERAND (val
, 0);
7117 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
7118 val
= TREE_OPERAND (val
, 0);
7119 if (TREE_CODE (val
) == INDIRECT_REF
7120 || TREE_CODE (val
) == ADDR_EXPR
)
7121 val
= TREE_OPERAND (val
, 0);
7122 if (is_variable_sized (val
))
7126 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7127 outer taskloop region. */
7128 omp_context
*ctx_for_o
= ctx
;
7130 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
7131 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7132 ctx_for_o
= ctx
->outer
;
7134 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
7136 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
7137 && is_global_var (var
)
7138 && (val
== OMP_CLAUSE_DECL (c
)
7139 || !is_task_ctx (ctx
)
7140 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
7141 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
7142 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
7143 != POINTER_TYPE
)))))
7146 t
= omp_member_access_dummy_var (var
);
7149 var
= DECL_VALUE_EXPR (var
);
7150 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
7152 var
= unshare_and_remap (var
, t
, o
);
7154 var
= unshare_expr (var
);
7157 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
7159 /* Handle taskloop firstprivate/lastprivate, where the
7160 lastprivate on GIMPLE_OMP_TASK is represented as
7161 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7162 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
7163 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
7164 if (use_pointer_for_field (val
, ctx
))
7165 var
= build_fold_addr_expr (var
);
7166 gimplify_assign (x
, var
, ilist
);
7167 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
7171 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7172 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
7173 || val
== OMP_CLAUSE_DECL (c
))
7174 && is_variable_sized (val
))
7176 by_ref
= use_pointer_for_field (val
, NULL
);
7178 switch (OMP_CLAUSE_CODE (c
))
7180 case OMP_CLAUSE_FIRSTPRIVATE
:
7181 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
7183 && is_task_ctx (ctx
))
7184 TREE_NO_WARNING (var
) = 1;
7188 case OMP_CLAUSE_PRIVATE
:
7189 case OMP_CLAUSE_COPYIN
:
7190 case OMP_CLAUSE__LOOPTEMP_
:
7191 case OMP_CLAUSE__REDUCTEMP_
:
7195 case OMP_CLAUSE_LASTPRIVATE
:
7196 if (by_ref
|| omp_is_reference (val
))
7198 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
7205 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
7210 case OMP_CLAUSE_REDUCTION
:
7211 case OMP_CLAUSE_IN_REDUCTION
:
7213 if (val
== OMP_CLAUSE_DECL (c
))
7215 if (is_task_ctx (ctx
))
7216 by_ref
= use_pointer_for_field (val
, ctx
);
7218 do_out
= !(by_ref
|| omp_is_reference (val
));
7221 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
7230 ref
= build_sender_ref (val
, ctx
);
7231 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
7232 gimplify_assign (ref
, x
, ilist
);
7233 if (is_task_ctx (ctx
))
7234 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
7239 ref
= build_sender_ref (val
, ctx
);
7240 gimplify_assign (var
, ref
, olist
);
7245 /* Generate code to implement SHARED from the sender (aka parent)
7246 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7247 list things that got automatically shared. */
7250 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
7252 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
7254 if (ctx
->record_type
== NULL
)
7257 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
7258 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
7260 ovar
= DECL_ABSTRACT_ORIGIN (f
);
7261 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
7264 nvar
= maybe_lookup_decl (ovar
, ctx
);
7265 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
7268 /* If CTX is a nested parallel directive. Find the immediately
7269 enclosing parallel or workshare construct that contains a
7270 mapping for OVAR. */
7271 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7273 t
= omp_member_access_dummy_var (var
);
7276 var
= DECL_VALUE_EXPR (var
);
7277 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
7279 var
= unshare_and_remap (var
, t
, o
);
7281 var
= unshare_expr (var
);
7284 if (use_pointer_for_field (ovar
, ctx
))
7286 x
= build_sender_ref (ovar
, ctx
);
7287 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
7288 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
7290 gcc_assert (is_parallel_ctx (ctx
)
7291 && DECL_ARTIFICIAL (ovar
));
7292 /* _condtemp_ clause. */
7293 var
= build_constructor (TREE_TYPE (x
), NULL
);
7296 var
= build_fold_addr_expr (var
);
7297 gimplify_assign (x
, var
, ilist
);
7301 x
= build_sender_ref (ovar
, ctx
);
7302 gimplify_assign (x
, var
, ilist
);
7304 if (!TREE_READONLY (var
)
7305 /* We don't need to receive a new reference to a result
7306 or parm decl. In fact we may not store to it as we will
7307 invalidate any pending RSO and generate wrong gimple
7309 && !((TREE_CODE (var
) == RESULT_DECL
7310 || TREE_CODE (var
) == PARM_DECL
)
7311 && DECL_BY_REFERENCE (var
)))
7313 x
= build_sender_ref (ovar
, ctx
);
7314 gimplify_assign (var
, x
, olist
);
7320 /* Emit an OpenACC head marker call, encapulating the partitioning and
7321 other information that must be processed by the target compiler.
7322 Return the maximum number of dimensions the associated loop might
7323 be partitioned over. */
7326 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
7327 gimple_seq
*seq
, omp_context
*ctx
)
7329 unsigned levels
= 0;
7331 tree gang_static
= NULL_TREE
;
7332 auto_vec
<tree
, 5> args
;
7334 args
.quick_push (build_int_cst
7335 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
7336 args
.quick_push (ddvar
);
7337 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7339 switch (OMP_CLAUSE_CODE (c
))
7341 case OMP_CLAUSE_GANG
:
7342 tag
|= OLF_DIM_GANG
;
7343 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
7344 /* static:* is represented by -1, and we can ignore it, as
7345 scheduling is always static. */
7346 if (gang_static
&& integer_minus_onep (gang_static
))
7347 gang_static
= NULL_TREE
;
7351 case OMP_CLAUSE_WORKER
:
7352 tag
|= OLF_DIM_WORKER
;
7356 case OMP_CLAUSE_VECTOR
:
7357 tag
|= OLF_DIM_VECTOR
;
7361 case OMP_CLAUSE_SEQ
:
7365 case OMP_CLAUSE_AUTO
:
7369 case OMP_CLAUSE_INDEPENDENT
:
7370 tag
|= OLF_INDEPENDENT
;
7373 case OMP_CLAUSE_TILE
:
7384 if (DECL_P (gang_static
))
7385 gang_static
= build_outer_var_ref (gang_static
, ctx
);
7386 tag
|= OLF_GANG_STATIC
;
7389 /* In a parallel region, loops are implicitly INDEPENDENT. */
7390 omp_context
*tgt
= enclosing_target_ctx (ctx
);
7391 if (!tgt
|| is_oacc_parallel (tgt
))
7392 tag
|= OLF_INDEPENDENT
;
7395 /* Tiling could use all 3 levels. */
7399 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7400 Ensure at least one level, or 2 for possible auto
7402 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
7403 << OLF_DIM_BASE
) | OLF_SEQ
));
7405 if (levels
< 1u + maybe_auto
)
7406 levels
= 1u + maybe_auto
;
7409 args
.quick_push (build_int_cst (integer_type_node
, levels
));
7410 args
.quick_push (build_int_cst (integer_type_node
, tag
));
7412 args
.quick_push (gang_static
);
7414 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
7415 gimple_set_location (call
, loc
);
7416 gimple_set_lhs (call
, ddvar
);
7417 gimple_seq_add_stmt (seq
, call
);
7422 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7423 partitioning level of the enclosed region. */
7426 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
7427 tree tofollow
, gimple_seq
*seq
)
7429 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
7430 : IFN_UNIQUE_OACC_TAIL_MARK
);
7431 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
7432 int nargs
= 2 + (tofollow
!= NULL_TREE
);
7433 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
7434 marker
, ddvar
, tofollow
);
7435 gimple_set_location (call
, loc
);
7436 gimple_set_lhs (call
, ddvar
);
7437 gimple_seq_add_stmt (seq
, call
);
7440 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7441 the loop clauses, from which we extract reductions. Initialize
7445 lower_oacc_head_tail (location_t loc
, tree clauses
,
7446 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
7449 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
7450 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
7452 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
7453 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
7454 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
7457 for (unsigned done
= 1; count
; count
--, done
++)
7459 gimple_seq fork_seq
= NULL
;
7460 gimple_seq join_seq
= NULL
;
7462 tree place
= build_int_cst (integer_type_node
, -1);
7463 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7464 fork_kind
, ddvar
, place
);
7465 gimple_set_location (fork
, loc
);
7466 gimple_set_lhs (fork
, ddvar
);
7468 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7469 join_kind
, ddvar
, place
);
7470 gimple_set_location (join
, loc
);
7471 gimple_set_lhs (join
, ddvar
);
7473 /* Mark the beginning of this level sequence. */
7475 lower_oacc_loop_marker (loc
, ddvar
, true,
7476 build_int_cst (integer_type_node
, count
),
7478 lower_oacc_loop_marker (loc
, ddvar
, false,
7479 build_int_cst (integer_type_node
, done
),
7482 lower_oacc_reductions (loc
, clauses
, place
, inner
,
7483 fork
, join
, &fork_seq
, &join_seq
, ctx
);
7485 /* Append this level to head. */
7486 gimple_seq_add_seq (head
, fork_seq
);
7487 /* Prepend it to tail. */
7488 gimple_seq_add_seq (&join_seq
, *tail
);
7494 /* Mark the end of the sequence. */
7495 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
7496 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
7499 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7500 catch handler and return it. This prevents programs from violating the
7501 structured block semantics with throws. */
7504 maybe_catch_exception (gimple_seq body
)
7509 if (!flag_exceptions
)
7512 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
7513 decl
= lang_hooks
.eh_protect_cleanup_actions ();
7515 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
7517 g
= gimple_build_eh_must_not_throw (decl
);
7518 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
7521 return gimple_seq_alloc_with_stmt (g
);
7525 /* Routines to lower OMP directives into OMP-GIMPLE. */
7527 /* If ctx is a worksharing context inside of a cancellable parallel
7528 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7529 and conditional branch to parallel's cancel_label to handle
7530 cancellation in the implicit barrier. */
7533 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
7536 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
7537 if (gimple_omp_return_nowait_p (omp_return
))
7539 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
7540 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
7541 && outer
->cancellable
)
7543 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
7544 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
7545 tree lhs
= create_tmp_var (c_bool_type
);
7546 gimple_omp_return_set_lhs (omp_return
, lhs
);
7547 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
7548 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
7549 fold_convert (c_bool_type
,
7550 boolean_false_node
),
7551 outer
->cancel_label
, fallthru_label
);
7552 gimple_seq_add_stmt (body
, g
);
7553 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
7555 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
7559 /* Find the first task_reduction or reduction clause or return NULL
7560 if there are none. */
7563 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
7564 enum omp_clause_code ccode
)
7568 clauses
= omp_find_clause (clauses
, ccode
);
7569 if (clauses
== NULL_TREE
)
7571 if (ccode
!= OMP_CLAUSE_REDUCTION
7572 || code
== OMP_TASKLOOP
7573 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
7575 clauses
= OMP_CLAUSE_CHAIN (clauses
);
7579 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
7580 gimple_seq
*, gimple_seq
*);
7582 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7583 CTX is the enclosing OMP context for the current statement. */
7586 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7588 tree block
, control
;
7589 gimple_stmt_iterator tgsi
;
7590 gomp_sections
*stmt
;
7592 gbind
*new_stmt
, *bind
;
7593 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
7595 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
7597 push_gimplify_context ();
7603 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
7604 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
7605 tree rtmp
= NULL_TREE
;
7608 tree type
= build_pointer_type (pointer_sized_int_node
);
7609 tree temp
= create_tmp_var (type
);
7610 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
7611 OMP_CLAUSE_DECL (c
) = temp
;
7612 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
7613 gimple_omp_sections_set_clauses (stmt
, c
);
7614 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
7615 gimple_omp_sections_clauses (stmt
),
7616 &ilist
, &tred_dlist
);
7618 rtmp
= make_ssa_name (type
);
7619 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
7622 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
7623 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
7625 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
7626 &ilist
, &dlist
, ctx
, NULL
);
7628 control
= create_tmp_var (unsigned_type_node
, ".section");
7629 gimple_omp_sections_set_control (stmt
, control
);
7631 new_body
= gimple_omp_body (stmt
);
7632 gimple_omp_set_body (stmt
, NULL
);
7633 tgsi
= gsi_start (new_body
);
7634 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
7639 sec_start
= gsi_stmt (tgsi
);
7640 sctx
= maybe_lookup_ctx (sec_start
);
7643 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
7644 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
7645 GSI_CONTINUE_LINKING
);
7646 gimple_omp_set_body (sec_start
, NULL
);
7648 if (gsi_one_before_end_p (tgsi
))
7650 gimple_seq l
= NULL
;
7651 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
7652 &ilist
, &l
, &clist
, ctx
);
7653 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
7654 gimple_omp_section_set_last (sec_start
);
7657 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
7658 GSI_CONTINUE_LINKING
);
7661 block
= make_node (BLOCK
);
7662 bind
= gimple_build_bind (NULL
, new_body
, block
);
7665 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
7669 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
7670 gcall
*g
= gimple_build_call (fndecl
, 0);
7671 gimple_seq_add_stmt (&olist
, g
);
7672 gimple_seq_add_seq (&olist
, clist
);
7673 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
7674 g
= gimple_build_call (fndecl
, 0);
7675 gimple_seq_add_stmt (&olist
, g
);
7678 block
= make_node (BLOCK
);
7679 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
7680 gsi_replace (gsi_p
, new_stmt
, true);
7682 pop_gimplify_context (new_stmt
);
7683 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
7684 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
7685 if (BLOCK_VARS (block
))
7686 TREE_USED (block
) = 1;
7689 gimple_seq_add_seq (&new_body
, ilist
);
7690 gimple_seq_add_stmt (&new_body
, stmt
);
7691 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
7692 gimple_seq_add_stmt (&new_body
, bind
);
7694 t
= gimple_build_omp_continue (control
, control
);
7695 gimple_seq_add_stmt (&new_body
, t
);
7697 gimple_seq_add_seq (&new_body
, olist
);
7698 if (ctx
->cancellable
)
7699 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7700 gimple_seq_add_seq (&new_body
, dlist
);
7702 new_body
= maybe_catch_exception (new_body
);
7704 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
7705 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7706 t
= gimple_build_omp_return (nowait
);
7707 gimple_seq_add_stmt (&new_body
, t
);
7708 gimple_seq_add_seq (&new_body
, tred_dlist
);
7709 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
7712 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
7714 gimple_bind_set_body (new_stmt
, new_body
);
7718 /* A subroutine of lower_omp_single. Expand the simple form of
7719 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7721 if (GOMP_single_start ())
7723 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7725 FIXME. It may be better to delay expanding the logic of this until
7726 pass_expand_omp. The expanded logic may make the job more difficult
7727 to a synchronization analysis pass. */
7730 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
7732 location_t loc
= gimple_location (single_stmt
);
7733 tree tlabel
= create_artificial_label (loc
);
7734 tree flabel
= create_artificial_label (loc
);
7735 gimple
*call
, *cond
;
7738 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
7739 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
7740 call
= gimple_build_call (decl
, 0);
7741 gimple_call_set_lhs (call
, lhs
);
7742 gimple_seq_add_stmt (pre_p
, call
);
7744 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
7745 fold_convert_loc (loc
, TREE_TYPE (lhs
),
7748 gimple_seq_add_stmt (pre_p
, cond
);
7749 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
7750 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7751 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
7755 /* A subroutine of lower_omp_single. Expand the simple form of
7756 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7758 #pragma omp single copyprivate (a, b, c)
7760 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7763 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7769 GOMP_single_copy_end (©out);
7780 FIXME. It may be better to delay expanding the logic of this until
7781 pass_expand_omp. The expanded logic may make the job more difficult
7782 to a synchronization analysis pass. */
7785 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
7788 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
7789 gimple_seq copyin_seq
;
7790 location_t loc
= gimple_location (single_stmt
);
7792 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
7794 ptr_type
= build_pointer_type (ctx
->record_type
);
7795 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
7797 l0
= create_artificial_label (loc
);
7798 l1
= create_artificial_label (loc
);
7799 l2
= create_artificial_label (loc
);
7801 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
7802 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
7803 t
= fold_convert_loc (loc
, ptr_type
, t
);
7804 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
7806 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
7807 build_int_cst (ptr_type
, 0));
7808 t
= build3 (COND_EXPR
, void_type_node
, t
,
7809 build_and_jump (&l0
), build_and_jump (&l1
));
7810 gimplify_and_add (t
, pre_p
);
7812 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
7814 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7817 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
7820 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
7821 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
7822 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
7823 gimplify_and_add (t
, pre_p
);
7825 t
= build_and_jump (&l2
);
7826 gimplify_and_add (t
, pre_p
);
7828 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
7830 gimple_seq_add_seq (pre_p
, copyin_seq
);
7832 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
7836 /* Expand code for an OpenMP single directive. */
7839 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7842 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
7844 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
7846 push_gimplify_context ();
7848 block
= make_node (BLOCK
);
7849 bind
= gimple_build_bind (NULL
, NULL
, block
);
7850 gsi_replace (gsi_p
, bind
, true);
7853 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
7854 &bind_body
, &dlist
, ctx
, NULL
);
7855 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
7857 gimple_seq_add_stmt (&bind_body
, single_stmt
);
7859 if (ctx
->record_type
)
7860 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
7862 lower_omp_single_simple (single_stmt
, &bind_body
);
7864 gimple_omp_set_body (single_stmt
, NULL
);
7866 gimple_seq_add_seq (&bind_body
, dlist
);
7868 bind_body
= maybe_catch_exception (bind_body
);
7870 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
7871 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7872 gimple
*g
= gimple_build_omp_return (nowait
);
7873 gimple_seq_add_stmt (&bind_body_tail
, g
);
7874 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
7875 if (ctx
->record_type
)
7877 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
7878 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
7879 TREE_THIS_VOLATILE (clobber
) = 1;
7880 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
7881 clobber
), GSI_SAME_STMT
);
7883 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
7884 gimple_bind_set_body (bind
, bind_body
);
7886 pop_gimplify_context (bind
);
7888 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7889 BLOCK_VARS (block
) = ctx
->block_vars
;
7890 if (BLOCK_VARS (block
))
7891 TREE_USED (block
) = 1;
7895 /* Expand code for an OpenMP master directive. */
7898 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7900 tree block
, lab
= NULL
, x
, bfn_decl
;
7901 gimple
*stmt
= gsi_stmt (*gsi_p
);
7903 location_t loc
= gimple_location (stmt
);
7906 push_gimplify_context ();
7908 block
= make_node (BLOCK
);
7909 bind
= gimple_build_bind (NULL
, NULL
, block
);
7910 gsi_replace (gsi_p
, bind
, true);
7911 gimple_bind_add_stmt (bind
, stmt
);
7913 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
7914 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
7915 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
7916 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
7918 gimplify_and_add (x
, &tseq
);
7919 gimple_bind_add_seq (bind
, tseq
);
7921 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
7922 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
7923 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
7924 gimple_omp_set_body (stmt
, NULL
);
7926 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
7928 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
7930 pop_gimplify_context (bind
);
7932 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7933 BLOCK_VARS (block
) = ctx
->block_vars
;
7936 /* Helper function for lower_omp_task_reductions. For a specific PASS
7937 find out the current clause it should be processed, or return false
7938 if all have been processed already. */
7941 omp_task_reduction_iterate (int pass
, enum tree_code code
,
7942 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
7943 tree
*type
, tree
*next
)
7945 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
7947 if (ccode
== OMP_CLAUSE_REDUCTION
7948 && code
!= OMP_TASKLOOP
7949 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
7951 *decl
= OMP_CLAUSE_DECL (*c
);
7952 *type
= TREE_TYPE (*decl
);
7953 if (TREE_CODE (*decl
) == MEM_REF
)
7960 if (omp_is_reference (*decl
))
7961 *type
= TREE_TYPE (*type
);
7962 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
7965 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
7974 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7975 OMP_TASKGROUP only with task modifier). Register mapping of those in
7976 START sequence and reducing them and unregister them in the END sequence. */
7979 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
7980 gimple_seq
*start
, gimple_seq
*end
)
7982 enum omp_clause_code ccode
7983 = (code
== OMP_TASKGROUP
7984 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
7985 tree cancellable
= NULL_TREE
;
7986 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
7987 if (clauses
== NULL_TREE
)
7989 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7991 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
7992 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
7993 && outer
->cancellable
)
7995 cancellable
= error_mark_node
;
7998 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
8001 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8002 tree
*last
= &TYPE_FIELDS (record_type
);
8006 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8008 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8011 DECL_CHAIN (field
) = ifield
;
8012 last
= &DECL_CHAIN (ifield
);
8013 DECL_CONTEXT (field
) = record_type
;
8014 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8015 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8016 DECL_CONTEXT (ifield
) = record_type
;
8017 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
8018 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
8020 for (int pass
= 0; pass
< 2; pass
++)
8022 tree decl
, type
, next
;
8023 for (tree c
= clauses
;
8024 omp_task_reduction_iterate (pass
, code
, ccode
,
8025 &c
, &decl
, &type
, &next
); c
= next
)
8028 tree new_type
= type
;
8030 new_type
= remap_type (type
, &ctx
->outer
->cb
);
8032 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
8033 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
8035 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
8037 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
8038 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
8039 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
8042 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
8043 DECL_CONTEXT (field
) = record_type
;
8044 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8045 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8047 last
= &DECL_CHAIN (field
);
8049 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
8051 DECL_CONTEXT (bfield
) = record_type
;
8052 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
8053 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
8055 last
= &DECL_CHAIN (bfield
);
8059 layout_type (record_type
);
8061 /* Build up an array which registers with the runtime all the reductions
8062 and deregisters them at the end. Format documented in libgomp/task.c. */
8063 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
8064 tree avar
= create_tmp_var_raw (atype
);
8065 gimple_add_tmp_var (avar
);
8066 TREE_ADDRESSABLE (avar
) = 1;
8067 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
8068 NULL_TREE
, NULL_TREE
);
8069 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
8070 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8071 gimple_seq seq
= NULL
;
8072 tree sz
= fold_convert (pointer_sized_int_node
,
8073 TYPE_SIZE_UNIT (record_type
));
8075 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
8076 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
8077 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
8078 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
8079 ctx
->task_reductions
.create (1 + cnt
);
8080 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
8081 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
8083 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
8084 gimple_seq_add_seq (start
, seq
);
8085 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
8086 NULL_TREE
, NULL_TREE
);
8087 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
8088 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8089 NULL_TREE
, NULL_TREE
);
8090 t
= build_int_cst (pointer_sized_int_node
,
8091 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
8092 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8093 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
8094 NULL_TREE
, NULL_TREE
);
8095 t
= build_int_cst (pointer_sized_int_node
, -1);
8096 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8097 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
8098 NULL_TREE
, NULL_TREE
);
8099 t
= build_int_cst (pointer_sized_int_node
, 0);
8100 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8102 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8103 and for each task reduction checks a bool right after the private variable
8104 within that thread's chunk; if the bool is clear, it hasn't been
8105 initialized and thus isn't going to be reduced nor destructed, otherwise
8106 reduce and destruct it. */
8107 tree idx
= create_tmp_var (size_type_node
);
8108 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
8109 tree num_thr_sz
= create_tmp_var (size_type_node
);
8110 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
8111 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
8112 tree lab3
= NULL_TREE
;
8114 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8116 /* For worksharing constructs, only perform it in the master thread,
8117 with the exception of cancelled implicit barriers - then only handle
8118 the current thread. */
8119 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8120 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8121 tree thr_num
= create_tmp_var (integer_type_node
);
8122 g
= gimple_build_call (t
, 0);
8123 gimple_call_set_lhs (g
, thr_num
);
8124 gimple_seq_add_stmt (end
, g
);
8128 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8129 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8130 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8131 if (code
== OMP_FOR
)
8132 c
= gimple_omp_for_clauses (ctx
->stmt
);
8133 else /* if (code == OMP_SECTIONS) */
8134 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8135 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
8137 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
8139 gimple_seq_add_stmt (end
, g
);
8140 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8141 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
8142 gimple_seq_add_stmt (end
, g
);
8143 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
8144 build_one_cst (TREE_TYPE (idx
)));
8145 gimple_seq_add_stmt (end
, g
);
8146 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
8147 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8149 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
8150 gimple_seq_add_stmt (end
, g
);
8151 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8153 if (code
!= OMP_PARALLEL
)
8155 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
8156 tree num_thr
= create_tmp_var (integer_type_node
);
8157 g
= gimple_build_call (t
, 0);
8158 gimple_call_set_lhs (g
, num_thr
);
8159 gimple_seq_add_stmt (end
, g
);
8160 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
8161 gimple_seq_add_stmt (end
, g
);
8163 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8167 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
8168 OMP_CLAUSE__REDUCTEMP_
);
8169 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
8170 t
= fold_convert (size_type_node
, t
);
8171 gimplify_assign (num_thr_sz
, t
, end
);
8173 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8174 NULL_TREE
, NULL_TREE
);
8175 tree data
= create_tmp_var (pointer_sized_int_node
);
8176 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
8177 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
8179 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
8180 ptr
= create_tmp_var (build_pointer_type (record_type
));
8182 ptr
= create_tmp_var (ptr_type_node
);
8183 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
8185 tree field
= TYPE_FIELDS (record_type
);
8188 field
= DECL_CHAIN (DECL_CHAIN (field
));
8189 for (int pass
= 0; pass
< 2; pass
++)
8191 tree decl
, type
, next
;
8192 for (tree c
= clauses
;
8193 omp_task_reduction_iterate (pass
, code
, ccode
,
8194 &c
, &decl
, &type
, &next
); c
= next
)
8196 tree var
= decl
, ref
;
8197 if (TREE_CODE (decl
) == MEM_REF
)
8199 var
= TREE_OPERAND (var
, 0);
8200 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
8201 var
= TREE_OPERAND (var
, 0);
8203 if (TREE_CODE (var
) == ADDR_EXPR
)
8204 var
= TREE_OPERAND (var
, 0);
8205 else if (TREE_CODE (var
) == INDIRECT_REF
)
8206 var
= TREE_OPERAND (var
, 0);
8207 tree orig_var
= var
;
8208 if (is_variable_sized (var
))
8210 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
8211 var
= DECL_VALUE_EXPR (var
);
8212 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
8213 var
= TREE_OPERAND (var
, 0);
8214 gcc_assert (DECL_P (var
));
8216 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8217 if (orig_var
!= var
)
8218 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
8219 else if (TREE_CODE (v
) == ADDR_EXPR
)
8220 t
= build_fold_addr_expr (t
);
8221 else if (TREE_CODE (v
) == INDIRECT_REF
)
8222 t
= build_fold_indirect_ref (t
);
8223 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
8225 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
8226 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
8227 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
8229 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
8230 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
8231 fold_convert (size_type_node
,
8232 TREE_OPERAND (decl
, 1)));
8236 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8237 if (!omp_is_reference (decl
))
8238 t
= build_fold_addr_expr (t
);
8240 t
= fold_convert (pointer_sized_int_node
, t
);
8242 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8243 gimple_seq_add_seq (start
, seq
);
8244 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8245 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8246 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8247 t
= unshare_expr (byte_position (field
));
8248 t
= fold_convert (pointer_sized_int_node
, t
);
8249 ctx
->task_reduction_map
->put (c
, cnt
);
8250 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
8253 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8254 gimple_seq_add_seq (start
, seq
);
8255 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8256 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
8257 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8259 tree bfield
= DECL_CHAIN (field
);
8261 if (code
== OMP_PARALLEL
|| code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8262 /* In parallel or worksharing all threads unconditionally
8263 initialize all their task reduction private variables. */
8264 cond
= boolean_true_node
;
8265 else if (TREE_TYPE (ptr
) == ptr_type_node
)
8267 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8268 unshare_expr (byte_position (bfield
)));
8270 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
8271 gimple_seq_add_seq (end
, seq
);
8272 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
8273 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
8274 build_int_cst (pbool
, 0));
8277 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
8278 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
8279 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8280 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8281 tree condv
= create_tmp_var (boolean_type_node
);
8282 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
8283 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
8285 gimple_seq_add_stmt (end
, g
);
8286 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8287 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
8289 /* If this reduction doesn't need destruction and parallel
8290 has been cancelled, there is nothing to do for this
8291 reduction, so jump around the merge operation. */
8292 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8293 g
= gimple_build_cond (NE_EXPR
, cancellable
,
8294 build_zero_cst (TREE_TYPE (cancellable
)),
8296 gimple_seq_add_stmt (end
, g
);
8297 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8301 if (TREE_TYPE (ptr
) == ptr_type_node
)
8303 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8304 unshare_expr (byte_position (field
)));
8306 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
8307 gimple_seq_add_seq (end
, seq
);
8308 tree pbool
= build_pointer_type (TREE_TYPE (field
));
8309 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
8310 build_int_cst (pbool
, 0));
8313 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
8314 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
8316 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
8317 if (TREE_CODE (decl
) != MEM_REF
&& omp_is_reference (decl
))
8318 ref
= build_simple_mem_ref (ref
);
8319 /* reduction(-:var) sums up the partial results, so it acts
8320 identically to reduction(+:var). */
8321 if (rcode
== MINUS_EXPR
)
8323 if (TREE_CODE (decl
) == MEM_REF
)
8325 tree type
= TREE_TYPE (new_var
);
8326 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8327 tree i
= create_tmp_var (TREE_TYPE (v
));
8328 tree ptype
= build_pointer_type (TREE_TYPE (type
));
8331 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
8332 tree vv
= create_tmp_var (TREE_TYPE (v
));
8333 gimplify_assign (vv
, v
, start
);
8336 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8337 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8338 new_var
= build_fold_addr_expr (new_var
);
8339 new_var
= fold_convert (ptype
, new_var
);
8340 ref
= fold_convert (ptype
, ref
);
8341 tree m
= create_tmp_var (ptype
);
8342 gimplify_assign (m
, new_var
, end
);
8344 m
= create_tmp_var (ptype
);
8345 gimplify_assign (m
, ref
, end
);
8347 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
8348 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
8349 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
8350 gimple_seq_add_stmt (end
, gimple_build_label (body
));
8351 tree priv
= build_simple_mem_ref (new_var
);
8352 tree out
= build_simple_mem_ref (ref
);
8353 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8355 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8356 tree decl_placeholder
8357 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
8358 tree lab6
= NULL_TREE
;
8361 /* If this reduction needs destruction and parallel
8362 has been cancelled, jump around the merge operation
8363 to the destruction. */
8364 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8365 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8366 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8367 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8369 gimple_seq_add_stmt (end
, g
);
8370 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8372 SET_DECL_VALUE_EXPR (placeholder
, out
);
8373 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8374 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
8375 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
8376 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8377 gimple_seq_add_seq (end
,
8378 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8379 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8380 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8382 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8383 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
8386 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8387 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
8390 gimple_seq tseq
= NULL
;
8391 gimplify_stmt (&x
, &tseq
);
8392 gimple_seq_add_seq (end
, tseq
);
8397 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
8398 out
= unshare_expr (out
);
8399 gimplify_assign (out
, x
, end
);
8402 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
8403 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8404 gimple_seq_add_stmt (end
, g
);
8405 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
8406 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8407 gimple_seq_add_stmt (end
, g
);
8408 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
8409 build_int_cst (TREE_TYPE (i
), 1));
8410 gimple_seq_add_stmt (end
, g
);
8411 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
8412 gimple_seq_add_stmt (end
, g
);
8413 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
8415 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8417 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8418 tree oldv
= NULL_TREE
;
8419 tree lab6
= NULL_TREE
;
8422 /* If this reduction needs destruction and parallel
8423 has been cancelled, jump around the merge operation
8424 to the destruction. */
8425 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8426 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8427 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8428 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8430 gimple_seq_add_stmt (end
, g
);
8431 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8433 if (omp_is_reference (decl
)
8434 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
8436 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8437 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8438 tree refv
= create_tmp_var (TREE_TYPE (ref
));
8439 gimplify_assign (refv
, ref
, end
);
8440 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
8441 SET_DECL_VALUE_EXPR (placeholder
, ref
);
8442 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8443 tree d
= maybe_lookup_decl (decl
, ctx
);
8445 if (DECL_HAS_VALUE_EXPR_P (d
))
8446 oldv
= DECL_VALUE_EXPR (d
);
8447 if (omp_is_reference (var
))
8449 tree v
= fold_convert (TREE_TYPE (d
),
8450 build_fold_addr_expr (new_var
));
8451 SET_DECL_VALUE_EXPR (d
, v
);
8454 SET_DECL_VALUE_EXPR (d
, new_var
);
8455 DECL_HAS_VALUE_EXPR_P (d
) = 1;
8456 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8458 SET_DECL_VALUE_EXPR (d
, oldv
);
8461 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
8462 DECL_HAS_VALUE_EXPR_P (d
) = 0;
8464 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8465 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8466 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8467 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8469 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8470 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
8473 gimple_seq tseq
= NULL
;
8474 gimplify_stmt (&x
, &tseq
);
8475 gimple_seq_add_seq (end
, tseq
);
8480 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
8481 ref
= unshare_expr (ref
);
8482 gimplify_assign (ref
, x
, end
);
8484 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8486 field
= DECL_CHAIN (bfield
);
8490 if (code
== OMP_TASKGROUP
)
8492 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
8493 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8494 gimple_seq_add_stmt (start
, g
);
8499 if (code
== OMP_FOR
)
8500 c
= gimple_omp_for_clauses (ctx
->stmt
);
8501 else if (code
== OMP_SECTIONS
)
8502 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8504 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
8505 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
8506 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
8507 build_fold_addr_expr (avar
));
8508 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
8511 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
8512 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
8514 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
8515 gimple_seq_add_stmt (end
, g
);
8516 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
8517 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8519 enum built_in_function bfn
8520 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
8521 t
= builtin_decl_explicit (bfn
);
8522 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
8526 arg
= create_tmp_var (c_bool_type
);
8527 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
8531 arg
= build_int_cst (c_bool_type
, 0);
8532 g
= gimple_build_call (t
, 1, arg
);
8536 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
8537 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8539 gimple_seq_add_stmt (end
, g
);
8540 t
= build_constructor (atype
, NULL
);
8541 TREE_THIS_VOLATILE (t
) = 1;
8542 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
8545 /* Expand code for an OpenMP taskgroup directive. */
8548 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8550 gimple
*stmt
= gsi_stmt (*gsi_p
);
8553 gimple_seq dseq
= NULL
;
8554 tree block
= make_node (BLOCK
);
8556 bind
= gimple_build_bind (NULL
, NULL
, block
);
8557 gsi_replace (gsi_p
, bind
, true);
8558 gimple_bind_add_stmt (bind
, stmt
);
8560 push_gimplify_context ();
8562 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
8564 gimple_bind_add_stmt (bind
, x
);
8566 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
8567 gimple_omp_taskgroup_clauses (stmt
),
8568 gimple_bind_body_ptr (bind
), &dseq
);
8570 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8571 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8572 gimple_omp_set_body (stmt
, NULL
);
8574 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8575 gimple_bind_add_seq (bind
, dseq
);
8577 pop_gimplify_context (bind
);
8579 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8580 BLOCK_VARS (block
) = ctx
->block_vars
;
8584 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8587 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
8590 struct omp_for_data fd
;
8591 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
8594 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
8595 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
8596 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
8600 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8601 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
8602 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
8603 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8605 /* Merge depend clauses from multiple adjacent
8606 #pragma omp ordered depend(sink:...) constructs
8607 into one #pragma omp ordered depend(sink:...), so that
8608 we can optimize them together. */
8609 gimple_stmt_iterator gsi
= *gsi_p
;
8611 while (!gsi_end_p (gsi
))
8613 gimple
*stmt
= gsi_stmt (gsi
);
8614 if (is_gimple_debug (stmt
)
8615 || gimple_code (stmt
) == GIMPLE_NOP
)
8620 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
8622 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
8623 c
= gimple_omp_ordered_clauses (ord_stmt2
);
8625 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
8626 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8629 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
8631 gsi_remove (&gsi
, true);
8635 /* Canonicalize sink dependence clauses into one folded clause if
8638 The basic algorithm is to create a sink vector whose first
8639 element is the GCD of all the first elements, and whose remaining
8640 elements are the minimum of the subsequent columns.
8642 We ignore dependence vectors whose first element is zero because
8643 such dependencies are known to be executed by the same thread.
8645 We take into account the direction of the loop, so a minimum
8646 becomes a maximum if the loop is iterating forwards. We also
8647 ignore sink clauses where the loop direction is unknown, or where
8648 the offsets are clearly invalid because they are not a multiple
8649 of the loop increment.
8653 #pragma omp for ordered(2)
8654 for (i=0; i < N; ++i)
8655 for (j=0; j < M; ++j)
8657 #pragma omp ordered \
8658 depend(sink:i-8,j-2) \
8659 depend(sink:i,j-1) \ // Completely ignored because i+0.
8660 depend(sink:i-4,j-3) \
8661 depend(sink:i-6,j-4)
8662 #pragma omp ordered depend(source)
8667 depend(sink:-gcd(8,4,6),-min(2,3,4))
8672 /* FIXME: Computing GCD's where the first element is zero is
8673 non-trivial in the presence of collapsed loops. Do this later. */
8674 if (fd
.collapse
> 1)
8677 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
8679 /* wide_int is not a POD so it must be default-constructed. */
8680 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
8681 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
8683 tree folded_dep
= NULL_TREE
;
8684 /* TRUE if the first dimension's offset is negative. */
8685 bool neg_offset_p
= false;
8687 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8689 while ((c
= *list_p
) != NULL
)
8691 bool remove
= false;
8693 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
8694 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8695 goto next_ordered_clause
;
8698 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
8699 vec
&& TREE_CODE (vec
) == TREE_LIST
;
8700 vec
= TREE_CHAIN (vec
), ++i
)
8702 gcc_assert (i
< len
);
8704 /* omp_extract_for_data has canonicalized the condition. */
8705 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
8706 || fd
.loops
[i
].cond_code
== GT_EXPR
);
8707 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
8708 bool maybe_lexically_later
= true;
8710 /* While the committee makes up its mind, bail if we have any
8711 non-constant steps. */
8712 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
8713 goto lower_omp_ordered_ret
;
8715 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
8716 if (POINTER_TYPE_P (itype
))
8718 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
8719 TYPE_PRECISION (itype
),
8722 /* Ignore invalid offsets that are not multiples of the step. */
8723 if (!wi::multiple_of_p (wi::abs (offset
),
8724 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
8727 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
8728 "ignoring sink clause with offset that is not "
8729 "a multiple of the loop step");
8731 goto next_ordered_clause
;
8734 /* Calculate the first dimension. The first dimension of
8735 the folded dependency vector is the GCD of the first
8736 elements, while ignoring any first elements whose offset
8740 /* Ignore dependence vectors whose first dimension is 0. */
8744 goto next_ordered_clause
;
8748 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
8750 error_at (OMP_CLAUSE_LOCATION (c
),
8751 "first offset must be in opposite direction "
8752 "of loop iterations");
8753 goto lower_omp_ordered_ret
;
8757 neg_offset_p
= forward
;
8758 /* Initialize the first time around. */
8759 if (folded_dep
== NULL_TREE
)
8762 folded_deps
[0] = offset
;
8765 folded_deps
[0] = wi::gcd (folded_deps
[0],
8769 /* Calculate minimum for the remaining dimensions. */
8772 folded_deps
[len
+ i
- 1] = offset
;
8773 if (folded_dep
== c
)
8774 folded_deps
[i
] = offset
;
8775 else if (maybe_lexically_later
8776 && !wi::eq_p (folded_deps
[i
], offset
))
8778 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
8782 for (j
= 1; j
<= i
; j
++)
8783 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
8786 maybe_lexically_later
= false;
8790 gcc_assert (i
== len
);
8794 next_ordered_clause
:
8796 *list_p
= OMP_CLAUSE_CHAIN (c
);
8798 list_p
= &OMP_CLAUSE_CHAIN (c
);
8804 folded_deps
[0] = -folded_deps
[0];
8806 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
8807 if (POINTER_TYPE_P (itype
))
8810 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
8811 = wide_int_to_tree (itype
, folded_deps
[0]);
8812 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
8813 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
8816 lower_omp_ordered_ret
:
8818 /* Ordered without clauses is #pragma omp threads, while we want
8819 a nop instead if we remove all clauses. */
8820 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
8821 gsi_replace (gsi_p
, gimple_build_nop (), true);
8825 /* Expand code for an OpenMP ordered directive. */
8828 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8831 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
8832 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
8835 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8837 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8840 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
8841 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8842 OMP_CLAUSE_THREADS
);
8844 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8847 /* FIXME: This is needs to be moved to the expansion to verify various
8848 conditions only testable on cfg with dominators computed, and also
8849 all the depend clauses to be merged still might need to be available
8850 for the runtime checks. */
8852 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
8856 push_gimplify_context ();
8858 block
= make_node (BLOCK
);
8859 bind
= gimple_build_bind (NULL
, NULL
, block
);
8860 gsi_replace (gsi_p
, bind
, true);
8861 gimple_bind_add_stmt (bind
, stmt
);
8865 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
8866 build_int_cst (NULL_TREE
, threads
));
8867 cfun
->has_simduid_loops
= true;
8870 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
8872 gimple_bind_add_stmt (bind
, x
);
8874 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
8877 counter
= create_tmp_var (integer_type_node
);
8878 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
8879 gimple_call_set_lhs (g
, counter
);
8880 gimple_bind_add_stmt (bind
, g
);
8882 body
= create_artificial_label (UNKNOWN_LOCATION
);
8883 test
= create_artificial_label (UNKNOWN_LOCATION
);
8884 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
8886 tree simt_pred
= create_tmp_var (integer_type_node
);
8887 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
8888 gimple_call_set_lhs (g
, simt_pred
);
8889 gimple_bind_add_stmt (bind
, g
);
8891 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
8892 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
8893 gimple_bind_add_stmt (bind
, g
);
8895 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
8897 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8898 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8899 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8900 gimple_omp_set_body (stmt
, NULL
);
8904 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
8905 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
8906 gimple_bind_add_stmt (bind
, g
);
8908 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
8909 tree nonneg
= create_tmp_var (integer_type_node
);
8910 gimple_seq tseq
= NULL
;
8911 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
8912 gimple_bind_add_seq (bind
, tseq
);
8914 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
8915 gimple_call_set_lhs (g
, nonneg
);
8916 gimple_bind_add_stmt (bind
, g
);
8918 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
8919 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
8920 gimple_bind_add_stmt (bind
, g
);
8922 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
8925 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
8926 build_int_cst (NULL_TREE
, threads
));
8928 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
8930 gimple_bind_add_stmt (bind
, x
);
8932 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8934 pop_gimplify_context (bind
);
8936 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8937 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8941 /* Expand code for an OpenMP scan directive and the structured block
8942 before the scan directive. */
8945 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8947 gimple
*stmt
= gsi_stmt (*gsi_p
);
8949 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
8950 tree lane
= NULL_TREE
;
8951 gimple_seq before
= NULL
;
8952 omp_context
*octx
= ctx
->outer
;
8954 if (octx
->scan_exclusive
&& !has_clauses
)
8956 gimple_stmt_iterator gsi2
= *gsi_p
;
8958 gimple
*stmt2
= gsi_stmt (gsi2
);
8959 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8960 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8961 the one with exclusive clause(s), comes first. */
8963 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
8964 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
8966 gsi_remove (gsi_p
, false);
8967 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
8968 ctx
= maybe_lookup_ctx (stmt2
);
8970 lower_omp_scan (gsi_p
, ctx
);
8975 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
8976 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
8977 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
8978 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
8979 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
8980 && !gimple_omp_for_combined_p (octx
->stmt
));
8981 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
8982 if (is_for_simd
&& octx
->for_simd_scan_phase
)
8985 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
8986 OMP_CLAUSE__SIMDUID_
))
8988 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
8989 lane
= create_tmp_var (unsigned_type_node
);
8990 tree t
= build_int_cst (integer_type_node
,
8992 : octx
->scan_inclusive
? 2 : 3);
8994 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
8995 gimple_call_set_lhs (g
, lane
);
8996 gimple_seq_add_stmt (&before
, g
);
8999 if (is_simd
|| is_for
)
9001 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
9002 c
; c
= OMP_CLAUSE_CHAIN (c
))
9003 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9004 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9006 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9007 tree var
= OMP_CLAUSE_DECL (c
);
9008 tree new_var
= lookup_decl (var
, octx
);
9010 tree var2
= NULL_TREE
;
9011 tree var3
= NULL_TREE
;
9012 tree var4
= NULL_TREE
;
9013 tree lane0
= NULL_TREE
;
9014 tree new_vard
= new_var
;
9015 if (omp_is_reference (var
))
9017 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9020 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
9022 val
= DECL_VALUE_EXPR (new_vard
);
9023 if (new_vard
!= new_var
)
9025 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
9026 val
= TREE_OPERAND (val
, 0);
9028 if (TREE_CODE (val
) == ARRAY_REF
9029 && VAR_P (TREE_OPERAND (val
, 0)))
9031 tree v
= TREE_OPERAND (val
, 0);
9032 if (lookup_attribute ("omp simd array",
9033 DECL_ATTRIBUTES (v
)))
9035 val
= unshare_expr (val
);
9036 lane0
= TREE_OPERAND (val
, 1);
9037 TREE_OPERAND (val
, 1) = lane
;
9038 var2
= lookup_decl (v
, octx
);
9039 if (octx
->scan_exclusive
)
9040 var4
= lookup_decl (var2
, octx
);
9042 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9043 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
9046 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9047 var2
, lane
, NULL_TREE
, NULL_TREE
);
9048 TREE_THIS_NOTRAP (var2
) = 1;
9049 if (octx
->scan_exclusive
)
9051 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9052 var4
, lane
, NULL_TREE
,
9054 TREE_THIS_NOTRAP (var4
) = 1;
9065 var2
= build_outer_var_ref (var
, octx
);
9066 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9068 var3
= maybe_lookup_decl (new_vard
, octx
);
9069 if (var3
== new_vard
|| var3
== NULL_TREE
)
9071 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
9073 var4
= maybe_lookup_decl (var3
, octx
);
9074 if (var4
== var3
|| var4
== NULL_TREE
)
9076 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
9087 && octx
->scan_exclusive
9089 && var4
== NULL_TREE
)
9090 var4
= create_tmp_var (TREE_TYPE (val
));
9092 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9094 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9099 /* If we've added a separate identity element
9100 variable, copy it over into val. */
9101 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9103 gimplify_and_add (x
, &before
);
9105 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9107 /* Otherwise, assign to it the identity element. */
9108 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9110 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9111 tree ref
= build_outer_var_ref (var
, octx
);
9112 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9113 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9116 if (new_vard
!= new_var
)
9117 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9118 SET_DECL_VALUE_EXPR (new_vard
, val
);
9120 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9121 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9122 lower_omp (&tseq
, octx
);
9124 SET_DECL_VALUE_EXPR (new_vard
, x
);
9125 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9126 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9127 gimple_seq_add_seq (&before
, tseq
);
9129 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9135 if (octx
->scan_exclusive
)
9137 tree v4
= unshare_expr (var4
);
9138 tree v2
= unshare_expr (var2
);
9139 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
9140 gimplify_and_add (x
, &before
);
9142 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9143 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9144 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9146 if (x
&& new_vard
!= new_var
)
9147 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
9149 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9150 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9151 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9152 lower_omp (&tseq
, octx
);
9153 gimple_seq_add_seq (&before
, tseq
);
9154 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9156 SET_DECL_VALUE_EXPR (new_vard
, x
);
9157 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9158 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9159 if (octx
->scan_inclusive
)
9161 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9163 gimplify_and_add (x
, &before
);
9165 else if (lane0
== NULL_TREE
)
9167 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9169 gimplify_and_add (x
, &before
);
9177 /* input phase. Set val to initializer before
9179 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
9180 gimplify_assign (val
, x
, &before
);
9185 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
9186 if (code
== MINUS_EXPR
)
9189 tree x
= build2 (code
, TREE_TYPE (var2
),
9190 unshare_expr (var2
), unshare_expr (val
));
9191 if (octx
->scan_inclusive
)
9193 gimplify_assign (unshare_expr (var2
), x
, &before
);
9194 gimplify_assign (val
, var2
, &before
);
9198 gimplify_assign (unshare_expr (var4
),
9199 unshare_expr (var2
), &before
);
9200 gimplify_assign (var2
, x
, &before
);
9201 if (lane0
== NULL_TREE
)
9202 gimplify_assign (val
, var4
, &before
);
9206 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
9208 tree vexpr
= unshare_expr (var4
);
9209 TREE_OPERAND (vexpr
, 1) = lane0
;
9210 if (new_vard
!= new_var
)
9211 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
9212 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9216 if (is_simd
&& !is_for_simd
)
9218 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
9219 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
9220 gsi_replace (gsi_p
, gimple_build_nop (), true);
9223 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
9226 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (stmt
));
9227 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
9232 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9233 substitution of a couple of function calls. But in the NAMED case,
9234 requires that languages coordinate a symbol name. It is therefore
9235 best put here in common code. */
9237 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
9240 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9243 tree name
, lock
, unlock
;
9244 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
9246 location_t loc
= gimple_location (stmt
);
9249 name
= gimple_omp_critical_name (stmt
);
9254 if (!critical_name_mutexes
)
9255 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
9257 tree
*n
= critical_name_mutexes
->get (name
);
9262 decl
= create_tmp_var_raw (ptr_type_node
);
9264 new_str
= ACONCAT ((".gomp_critical_user_",
9265 IDENTIFIER_POINTER (name
), NULL
));
9266 DECL_NAME (decl
) = get_identifier (new_str
);
9267 TREE_PUBLIC (decl
) = 1;
9268 TREE_STATIC (decl
) = 1;
9269 DECL_COMMON (decl
) = 1;
9270 DECL_ARTIFICIAL (decl
) = 1;
9271 DECL_IGNORED_P (decl
) = 1;
9273 varpool_node::finalize_decl (decl
);
9275 critical_name_mutexes
->put (name
, decl
);
9280 /* If '#pragma omp critical' is inside offloaded region or
9281 inside function marked as offloadable, the symbol must be
9282 marked as offloadable too. */
9284 if (cgraph_node::get (current_function_decl
)->offloadable
)
9285 varpool_node::get_create (decl
)->offloadable
= 1;
9287 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
9288 if (is_gimple_omp_offloaded (octx
->stmt
))
9290 varpool_node::get_create (decl
)->offloadable
= 1;
9294 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
9295 lock
= build_call_expr_loc (loc
, lock
, 1,
9296 build_fold_addr_expr_loc (loc
, decl
));
9298 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
9299 unlock
= build_call_expr_loc (loc
, unlock
, 1,
9300 build_fold_addr_expr_loc (loc
, decl
));
9304 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
9305 lock
= build_call_expr_loc (loc
, lock
, 0);
9307 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
9308 unlock
= build_call_expr_loc (loc
, unlock
, 0);
9311 push_gimplify_context ();
9313 block
= make_node (BLOCK
);
9314 bind
= gimple_build_bind (NULL
, NULL
, block
);
9315 gsi_replace (gsi_p
, bind
, true);
9316 gimple_bind_add_stmt (bind
, stmt
);
9318 tbody
= gimple_bind_body (bind
);
9319 gimplify_and_add (lock
, &tbody
);
9320 gimple_bind_set_body (bind
, tbody
);
9322 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9323 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9324 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9325 gimple_omp_set_body (stmt
, NULL
);
9327 tbody
= gimple_bind_body (bind
);
9328 gimplify_and_add (unlock
, &tbody
);
9329 gimple_bind_set_body (bind
, tbody
);
9331 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9333 pop_gimplify_context (bind
);
9334 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9335 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9338 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9339 for a lastprivate clause. Given a loop control predicate of (V
9340 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9341 is appended to *DLIST, iterator initialization is appended to
9342 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9343 to be emitted in a critical section. */
9346 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
9347 gimple_seq
*dlist
, gimple_seq
*clist
,
9348 struct omp_context
*ctx
)
9350 tree clauses
, cond
, vinit
;
9351 enum tree_code cond_code
;
9354 cond_code
= fd
->loop
.cond_code
;
9355 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
9357 /* When possible, use a strict equality expression. This can let VRP
9358 type optimizations deduce the value and remove a copy. */
9359 if (tree_fits_shwi_p (fd
->loop
.step
))
9361 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
9362 if (step
== 1 || step
== -1)
9363 cond_code
= EQ_EXPR
;
9366 if (gimple_omp_for_kind (fd
->for_stmt
) == GF_OMP_FOR_KIND_GRID_LOOP
9367 || gimple_omp_for_grid_phony (fd
->for_stmt
))
9368 cond
= omp_grid_lastprivate_predicate (fd
);
9371 tree n2
= fd
->loop
.n2
;
9372 if (fd
->collapse
> 1
9373 && TREE_CODE (n2
) != INTEGER_CST
9374 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
9376 struct omp_context
*taskreg_ctx
= NULL
;
9377 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
9379 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
9380 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
9381 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
9383 if (gimple_omp_for_combined_into_p (gfor
))
9385 gcc_assert (ctx
->outer
->outer
9386 && is_parallel_ctx (ctx
->outer
->outer
));
9387 taskreg_ctx
= ctx
->outer
->outer
;
9391 struct omp_for_data outer_fd
;
9392 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
9393 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
9396 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
9397 taskreg_ctx
= ctx
->outer
->outer
;
9399 else if (is_taskreg_ctx (ctx
->outer
))
9400 taskreg_ctx
= ctx
->outer
;
9404 tree taskreg_clauses
9405 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
9406 tree innerc
= omp_find_clause (taskreg_clauses
,
9407 OMP_CLAUSE__LOOPTEMP_
);
9408 gcc_assert (innerc
);
9409 for (i
= 0; i
< fd
->collapse
; i
++)
9411 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9412 OMP_CLAUSE__LOOPTEMP_
);
9413 gcc_assert (innerc
);
9415 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9416 OMP_CLAUSE__LOOPTEMP_
);
9418 n2
= fold_convert (TREE_TYPE (n2
),
9419 lookup_decl (OMP_CLAUSE_DECL (innerc
),
9423 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
9426 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
9428 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
9429 if (!gimple_seq_empty_p (stmts
))
9431 gimple_seq_add_seq (&stmts
, *dlist
);
9434 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9435 vinit
= fd
->loop
.n1
;
9436 if (cond_code
== EQ_EXPR
9437 && tree_fits_shwi_p (fd
->loop
.n2
)
9438 && ! integer_zerop (fd
->loop
.n2
))
9439 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
9441 vinit
= unshare_expr (vinit
);
9443 /* Initialize the iterator variable, so that threads that don't execute
9444 any iterations don't execute the lastprivate clauses by accident. */
9445 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
9449 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9452 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9453 struct walk_stmt_info
*wi
)
9455 gimple
*stmt
= gsi_stmt (*gsi_p
);
9457 *handled_ops_p
= true;
9458 switch (gimple_code (stmt
))
9462 case GIMPLE_OMP_FOR
:
9463 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
9464 && gimple_omp_for_combined_into_p (stmt
))
9465 *handled_ops_p
= false;
9468 case GIMPLE_OMP_SCAN
:
9469 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
9470 return integer_zero_node
;
9477 /* Helper function for lower_omp_for, add transformations for a worksharing
9478 loop with scan directives inside of it.
9479 For worksharing loop not combined with simd, transform:
9480 #pragma omp for reduction(inscan,+:r) private(i)
9481 for (i = 0; i < n; i = i + 1)
9486 #pragma omp scan inclusive(r)
9492 into two worksharing loops + code to merge results:
9494 num_threads = omp_get_num_threads ();
9495 thread_num = omp_get_thread_num ();
9496 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9501 // For UDRs this is UDR init, or if ctors are needed, copy from
9502 // var3 that has been constructed to contain the neutral element.
9506 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9507 // a shared array with num_threads elements and rprivb to a local array
9508 // number of elements equal to the number of (contiguous) iterations the
9509 // current thread will perform. controlb and controlp variables are
9510 // temporaries to handle deallocation of rprivb at the end of second
9512 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9513 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9514 for (i = 0; i < n; i = i + 1)
9517 // For UDRs this is UDR init or copy from var3.
9519 // This is the input phase from user code.
9523 // For UDRs this is UDR merge.
9525 // Rather than handing it over to the user, save to local thread's
9527 rprivb[ivar] = var2;
9528 // For exclusive scan, the above two statements are swapped.
9532 // And remember the final value from this thread's into the shared
9534 rpriva[(sizetype) thread_num] = var2;
9535 // If more than one thread, compute using Work-Efficient prefix sum
9536 // the inclusive parallel scan of the rpriva array.
9537 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9542 num_threadsu = (unsigned int) num_threads;
9543 thread_numup1 = (unsigned int) thread_num + 1;
9546 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9550 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9555 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9556 mul = REALPART_EXPR <cplx>;
9557 ovf = IMAGPART_EXPR <cplx>;
9558 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9561 andvm1 = andv + 4294967295;
9563 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9565 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9566 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9567 rpriva[l] = rpriva[l - k] + rpriva[l];
9569 if (down == 0) goto <D.2121>; else goto <D.2122>;
9577 if (k != 0) goto <D.2108>; else goto <D.2103>;
9579 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9581 // For UDRs this is UDR init or copy from var3.
9585 var2 = rpriva[thread_num - 1];
9588 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9589 reduction(inscan,+:r) private(i)
9590 for (i = 0; i < n; i = i + 1)
9593 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9594 r = var2 + rprivb[ivar];
9597 // This is the scan phase from user code.
9599 // Plus a bump of the iterator.
9605 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
9606 struct omp_for_data
*fd
, omp_context
*ctx
)
9608 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
9609 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
9611 gimple_seq body
= gimple_omp_body (stmt
);
9612 gimple_stmt_iterator input1_gsi
= gsi_none ();
9613 struct walk_stmt_info wi
;
9614 memset (&wi
, 0, sizeof (wi
));
9616 wi
.info
= (void *) &input1_gsi
;
9617 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
9618 gcc_assert (!gsi_end_p (input1_gsi
));
9620 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
9621 gimple_stmt_iterator gsi
= input1_gsi
;
9623 gimple_stmt_iterator scan1_gsi
= gsi
;
9624 gimple
*scan_stmt1
= gsi_stmt (gsi
);
9625 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
9627 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
9628 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
9629 gimple_omp_set_body (input_stmt1
, NULL
);
9630 gimple_omp_set_body (scan_stmt1
, NULL
);
9631 gimple_omp_set_body (stmt
, NULL
);
9633 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
9634 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
9635 gimple_omp_set_body (stmt
, body
);
9636 gimple_omp_set_body (input_stmt1
, input_body
);
9638 gimple_stmt_iterator input2_gsi
= gsi_none ();
9639 memset (&wi
, 0, sizeof (wi
));
9641 wi
.info
= (void *) &input2_gsi
;
9642 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
9643 gcc_assert (!gsi_end_p (input2_gsi
));
9645 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
9648 gimple_stmt_iterator scan2_gsi
= gsi
;
9649 gimple
*scan_stmt2
= gsi_stmt (gsi
);
9650 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
9651 gimple_omp_set_body (scan_stmt2
, scan_body
);
9653 gimple_stmt_iterator input3_gsi
= gsi_none ();
9654 gimple_stmt_iterator scan3_gsi
= gsi_none ();
9655 gimple_stmt_iterator input4_gsi
= gsi_none ();
9656 gimple_stmt_iterator scan4_gsi
= gsi_none ();
9657 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
9658 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
9659 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
9662 memset (&wi
, 0, sizeof (wi
));
9664 wi
.info
= (void *) &input3_gsi
;
9665 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
9666 gcc_assert (!gsi_end_p (input3_gsi
));
9668 input_stmt3
= gsi_stmt (input3_gsi
);
9672 scan_stmt3
= gsi_stmt (gsi
);
9673 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
9675 memset (&wi
, 0, sizeof (wi
));
9677 wi
.info
= (void *) &input4_gsi
;
9678 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
9679 gcc_assert (!gsi_end_p (input4_gsi
));
9681 input_stmt4
= gsi_stmt (input4_gsi
);
9685 scan_stmt4
= gsi_stmt (gsi
);
9686 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
9688 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
9689 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
9692 tree num_threads
= create_tmp_var (integer_type_node
);
9693 tree thread_num
= create_tmp_var (integer_type_node
);
9694 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9695 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9696 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
9697 gimple_call_set_lhs (g
, num_threads
);
9698 gimple_seq_add_stmt (body_p
, g
);
9699 g
= gimple_build_call (threadnum_decl
, 0);
9700 gimple_call_set_lhs (g
, thread_num
);
9701 gimple_seq_add_stmt (body_p
, g
);
9703 tree ivar
= create_tmp_var (sizetype
);
9704 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
9705 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
9706 tree k
= create_tmp_var (unsigned_type_node
);
9707 tree l
= create_tmp_var (unsigned_type_node
);
9709 gimple_seq clist
= NULL
, mdlist
= NULL
;
9710 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
9711 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
9712 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
9713 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
9714 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9715 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9716 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9718 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9719 tree var
= OMP_CLAUSE_DECL (c
);
9720 tree new_var
= lookup_decl (var
, ctx
);
9721 tree var3
= NULL_TREE
;
9722 tree new_vard
= new_var
;
9723 if (omp_is_reference (var
))
9724 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9725 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9727 var3
= maybe_lookup_decl (new_vard
, ctx
);
9728 if (var3
== new_vard
)
9732 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
9733 tree rpriva
= create_tmp_var (ptype
);
9734 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9735 OMP_CLAUSE_DECL (nc
) = rpriva
;
9737 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9739 tree rprivb
= create_tmp_var (ptype
);
9740 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9741 OMP_CLAUSE_DECL (nc
) = rprivb
;
9742 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
9744 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9746 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
9747 if (new_vard
!= new_var
)
9748 TREE_ADDRESSABLE (var2
) = 1;
9749 gimple_add_tmp_var (var2
);
9751 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
9752 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9753 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9754 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9755 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9757 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
9758 thread_num
, integer_minus_one_node
);
9759 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9760 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9761 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9762 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9763 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9765 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
9766 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9767 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9768 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9769 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9771 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
9772 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9773 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9774 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9775 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9776 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9778 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
9779 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9780 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
9781 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9783 tree var4
= is_for_simd
? new_var
: var2
;
9784 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
9787 var5
= lookup_decl (var
, input_simd_ctx
);
9788 var6
= lookup_decl (var
, scan_simd_ctx
);
9789 if (new_vard
!= new_var
)
9791 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
9792 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
9795 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9797 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9800 x
= lang_hooks
.decls
.omp_clause_default_ctor
9801 (c
, var2
, build_outer_var_ref (var
, ctx
));
9803 gimplify_and_add (x
, &clist
);
9805 x
= build_outer_var_ref (var
, ctx
);
9806 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
9808 gimplify_and_add (x
, &thr01_list
);
9810 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9811 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9814 x
= unshare_expr (var4
);
9815 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
9816 gimplify_and_add (x
, &thrn1_list
);
9817 x
= unshare_expr (var4
);
9818 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
9819 gimplify_and_add (x
, &thr02_list
);
9821 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9823 /* Otherwise, assign to it the identity element. */
9824 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9825 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9828 if (new_vard
!= new_var
)
9829 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9830 SET_DECL_VALUE_EXPR (new_vard
, val
);
9831 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9833 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
9834 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9835 lower_omp (&tseq
, ctx
);
9836 gimple_seq_add_seq (&thrn1_list
, tseq
);
9837 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9838 lower_omp (&tseq
, ctx
);
9839 gimple_seq_add_seq (&thr02_list
, tseq
);
9840 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9841 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9842 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9844 SET_DECL_VALUE_EXPR (new_vard
, y
);
9847 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9848 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9852 x
= unshare_expr (var4
);
9853 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
9854 gimplify_and_add (x
, &thrn2_list
);
9858 x
= unshare_expr (rprivb_ref
);
9859 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
9860 gimplify_and_add (x
, &scan1_list
);
9864 if (ctx
->scan_exclusive
)
9866 x
= unshare_expr (rprivb_ref
);
9867 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
9868 gimplify_and_add (x
, &scan1_list
);
9871 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9872 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9873 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9874 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9875 lower_omp (&tseq
, ctx
);
9876 gimple_seq_add_seq (&scan1_list
, tseq
);
9878 if (ctx
->scan_inclusive
)
9880 x
= unshare_expr (rprivb_ref
);
9881 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
9882 gimplify_and_add (x
, &scan1_list
);
9886 x
= unshare_expr (rpriva_ref
);
9887 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
9888 unshare_expr (var4
));
9889 gimplify_and_add (x
, &mdlist
);
9891 x
= unshare_expr (is_for_simd
? var6
: new_var
);
9892 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
9893 gimplify_and_add (x
, &input2_list
);
9896 if (new_vard
!= new_var
)
9897 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9899 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9900 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9901 SET_DECL_VALUE_EXPR (new_vard
, val
);
9902 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9905 SET_DECL_VALUE_EXPR (placeholder
, var6
);
9906 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9909 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9910 lower_omp (&tseq
, ctx
);
9912 SET_DECL_VALUE_EXPR (new_vard
, y
);
9915 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9916 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9920 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
9921 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9922 lower_omp (&tseq
, ctx
);
9924 gimple_seq_add_seq (&input2_list
, tseq
);
9926 x
= build_outer_var_ref (var
, ctx
);
9927 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
9928 gimplify_and_add (x
, &last_list
);
9930 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
9931 gimplify_and_add (x
, &reduc_list
);
9932 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9933 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9935 if (new_vard
!= new_var
)
9936 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9937 SET_DECL_VALUE_EXPR (new_vard
, val
);
9938 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9939 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9940 lower_omp (&tseq
, ctx
);
9941 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9942 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9943 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9945 SET_DECL_VALUE_EXPR (new_vard
, y
);
9948 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9949 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9951 gimple_seq_add_seq (&reduc_list
, tseq
);
9952 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
9953 gimplify_and_add (x
, &reduc_list
);
9955 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
9957 gimplify_and_add (x
, dlist
);
9961 x
= build_outer_var_ref (var
, ctx
);
9962 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
9964 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
9965 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
9967 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
9969 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
9971 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
9972 if (code
== MINUS_EXPR
)
9976 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
9979 if (ctx
->scan_exclusive
)
9980 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
9982 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
9983 gimplify_assign (var2
, x
, &scan1_list
);
9984 if (ctx
->scan_inclusive
)
9985 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
9989 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
9992 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
9993 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
9995 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
9998 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
9999 unshare_expr (rprival_ref
));
10000 gimplify_assign (rprival_ref
, x
, &reduc_list
);
10004 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10005 gimple_seq_add_stmt (&scan1_list
, g
);
10006 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10007 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10008 ? scan_stmt4
: scan_stmt2
), g
);
10010 tree controlb
= create_tmp_var (boolean_type_node
);
10011 tree controlp
= create_tmp_var (ptr_type_node
);
10012 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10013 OMP_CLAUSE_DECL (nc
) = controlb
;
10014 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10016 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10017 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10018 OMP_CLAUSE_DECL (nc
) = controlp
;
10019 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10021 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10022 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10023 OMP_CLAUSE_DECL (nc
) = controlb
;
10024 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10026 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10027 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10028 OMP_CLAUSE_DECL (nc
) = controlp
;
10029 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10031 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10033 *cp1
= gimple_omp_for_clauses (stmt
);
10034 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
10035 *cp2
= gimple_omp_for_clauses (new_stmt
);
10036 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
10040 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
10041 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
10043 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
10045 gsi_remove (&input3_gsi
, true);
10046 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
10048 gsi_remove (&scan3_gsi
, true);
10049 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
10051 gsi_remove (&input4_gsi
, true);
10052 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
10054 gsi_remove (&scan4_gsi
, true);
10058 gimple_omp_set_body (scan_stmt1
, scan1_list
);
10059 gimple_omp_set_body (input_stmt2
, input2_list
);
10062 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
10064 gsi_remove (&input1_gsi
, true);
10065 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
10067 gsi_remove (&scan1_gsi
, true);
10068 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
10070 gsi_remove (&input2_gsi
, true);
10071 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
10073 gsi_remove (&scan2_gsi
, true);
10075 gimple_seq_add_seq (body_p
, clist
);
10077 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10078 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10079 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10080 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10081 gimple_seq_add_stmt (body_p
, g
);
10082 g
= gimple_build_label (lab1
);
10083 gimple_seq_add_stmt (body_p
, g
);
10084 gimple_seq_add_seq (body_p
, thr01_list
);
10085 g
= gimple_build_goto (lab3
);
10086 gimple_seq_add_stmt (body_p
, g
);
10087 g
= gimple_build_label (lab2
);
10088 gimple_seq_add_stmt (body_p
, g
);
10089 gimple_seq_add_seq (body_p
, thrn1_list
);
10090 g
= gimple_build_label (lab3
);
10091 gimple_seq_add_stmt (body_p
, g
);
10093 g
= gimple_build_assign (ivar
, size_zero_node
);
10094 gimple_seq_add_stmt (body_p
, g
);
10096 gimple_seq_add_stmt (body_p
, stmt
);
10097 gimple_seq_add_seq (body_p
, body
);
10098 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
10101 g
= gimple_build_omp_return (true);
10102 gimple_seq_add_stmt (body_p
, g
);
10103 gimple_seq_add_seq (body_p
, mdlist
);
10105 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10106 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10107 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
10108 gimple_seq_add_stmt (body_p
, g
);
10109 g
= gimple_build_label (lab1
);
10110 gimple_seq_add_stmt (body_p
, g
);
10112 g
= omp_build_barrier (NULL
);
10113 gimple_seq_add_stmt (body_p
, g
);
10115 tree down
= create_tmp_var (unsigned_type_node
);
10116 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
10117 gimple_seq_add_stmt (body_p
, g
);
10119 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
10120 gimple_seq_add_stmt (body_p
, g
);
10122 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
10123 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
10124 gimple_seq_add_stmt (body_p
, g
);
10126 tree thread_numu
= create_tmp_var (unsigned_type_node
);
10127 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
10128 gimple_seq_add_stmt (body_p
, g
);
10130 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
10131 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
10132 build_int_cst (unsigned_type_node
, 1));
10133 gimple_seq_add_stmt (body_p
, g
);
10135 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10136 g
= gimple_build_label (lab3
);
10137 gimple_seq_add_stmt (body_p
, g
);
10139 tree twok
= create_tmp_var (unsigned_type_node
);
10140 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10141 gimple_seq_add_stmt (body_p
, g
);
10143 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
10144 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
10145 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
10146 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
10147 gimple_seq_add_stmt (body_p
, g
);
10148 g
= gimple_build_label (lab4
);
10149 gimple_seq_add_stmt (body_p
, g
);
10150 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
10151 gimple_seq_add_stmt (body_p
, g
);
10152 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10153 gimple_seq_add_stmt (body_p
, g
);
10155 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
10156 gimple_seq_add_stmt (body_p
, g
);
10157 g
= gimple_build_label (lab6
);
10158 gimple_seq_add_stmt (body_p
, g
);
10160 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10161 gimple_seq_add_stmt (body_p
, g
);
10163 g
= gimple_build_label (lab5
);
10164 gimple_seq_add_stmt (body_p
, g
);
10166 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10167 gimple_seq_add_stmt (body_p
, g
);
10169 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
10170 DECL_GIMPLE_REG_P (cplx
) = 1;
10171 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
10172 gimple_call_set_lhs (g
, cplx
);
10173 gimple_seq_add_stmt (body_p
, g
);
10174 tree mul
= create_tmp_var (unsigned_type_node
);
10175 g
= gimple_build_assign (mul
, REALPART_EXPR
,
10176 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
10177 gimple_seq_add_stmt (body_p
, g
);
10178 tree ovf
= create_tmp_var (unsigned_type_node
);
10179 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
10180 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
10181 gimple_seq_add_stmt (body_p
, g
);
10183 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
10184 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
10185 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
10187 gimple_seq_add_stmt (body_p
, g
);
10188 g
= gimple_build_label (lab7
);
10189 gimple_seq_add_stmt (body_p
, g
);
10191 tree andv
= create_tmp_var (unsigned_type_node
);
10192 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
10193 gimple_seq_add_stmt (body_p
, g
);
10194 tree andvm1
= create_tmp_var (unsigned_type_node
);
10195 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
10196 build_minus_one_cst (unsigned_type_node
));
10197 gimple_seq_add_stmt (body_p
, g
);
10199 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
10200 gimple_seq_add_stmt (body_p
, g
);
10202 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
10203 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
10204 gimple_seq_add_stmt (body_p
, g
);
10205 g
= gimple_build_label (lab9
);
10206 gimple_seq_add_stmt (body_p
, g
);
10207 gimple_seq_add_seq (body_p
, reduc_list
);
10208 g
= gimple_build_label (lab8
);
10209 gimple_seq_add_stmt (body_p
, g
);
10211 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
10212 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
10213 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
10214 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
10216 gimple_seq_add_stmt (body_p
, g
);
10217 g
= gimple_build_label (lab10
);
10218 gimple_seq_add_stmt (body_p
, g
);
10219 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
10220 gimple_seq_add_stmt (body_p
, g
);
10221 g
= gimple_build_goto (lab12
);
10222 gimple_seq_add_stmt (body_p
, g
);
10223 g
= gimple_build_label (lab11
);
10224 gimple_seq_add_stmt (body_p
, g
);
10225 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10226 gimple_seq_add_stmt (body_p
, g
);
10227 g
= gimple_build_label (lab12
);
10228 gimple_seq_add_stmt (body_p
, g
);
10230 g
= omp_build_barrier (NULL
);
10231 gimple_seq_add_stmt (body_p
, g
);
10233 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
10235 gimple_seq_add_stmt (body_p
, g
);
10237 g
= gimple_build_label (lab2
);
10238 gimple_seq_add_stmt (body_p
, g
);
10240 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10241 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10242 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10243 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10244 gimple_seq_add_stmt (body_p
, g
);
10245 g
= gimple_build_label (lab1
);
10246 gimple_seq_add_stmt (body_p
, g
);
10247 gimple_seq_add_seq (body_p
, thr02_list
);
10248 g
= gimple_build_goto (lab3
);
10249 gimple_seq_add_stmt (body_p
, g
);
10250 g
= gimple_build_label (lab2
);
10251 gimple_seq_add_stmt (body_p
, g
);
10252 gimple_seq_add_seq (body_p
, thrn2_list
);
10253 g
= gimple_build_label (lab3
);
10254 gimple_seq_add_stmt (body_p
, g
);
10256 g
= gimple_build_assign (ivar
, size_zero_node
);
10257 gimple_seq_add_stmt (body_p
, g
);
10258 gimple_seq_add_stmt (body_p
, new_stmt
);
10259 gimple_seq_add_seq (body_p
, new_body
);
10261 gimple_seq new_dlist
= NULL
;
10262 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10263 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10264 tree num_threadsm1
= create_tmp_var (integer_type_node
);
10265 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
10266 integer_minus_one_node
);
10267 gimple_seq_add_stmt (&new_dlist
, g
);
10268 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
10269 gimple_seq_add_stmt (&new_dlist
, g
);
10270 g
= gimple_build_label (lab1
);
10271 gimple_seq_add_stmt (&new_dlist
, g
);
10272 gimple_seq_add_seq (&new_dlist
, last_list
);
10273 g
= gimple_build_label (lab2
);
10274 gimple_seq_add_stmt (&new_dlist
, g
);
10275 gimple_seq_add_seq (&new_dlist
, *dlist
);
10276 *dlist
= new_dlist
;
10279 /* Lower code for an OMP loop directive. */
10282 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10284 tree
*rhs_p
, block
;
10285 struct omp_for_data fd
, *fdp
= NULL
;
10286 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
10288 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
10289 gimple_seq cnt_list
= NULL
, clist
= NULL
;
10290 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
10293 push_gimplify_context ();
10295 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
10297 block
= make_node (BLOCK
);
10298 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
10299 /* Replace at gsi right away, so that 'stmt' is no member
10300 of a sequence anymore as we're going to add to a different
10302 gsi_replace (gsi_p
, new_stmt
, true);
10304 /* Move declaration of temporaries in the loop body before we make
10306 omp_for_body
= gimple_omp_body (stmt
);
10307 if (!gimple_seq_empty_p (omp_for_body
)
10308 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
10311 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
10312 tree vars
= gimple_bind_vars (inner_bind
);
10313 gimple_bind_append_vars (new_stmt
, vars
);
10314 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10315 keep them on the inner_bind and it's block. */
10316 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
10317 if (gimple_bind_block (inner_bind
))
10318 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
10321 if (gimple_omp_for_combined_into_p (stmt
))
10323 omp_extract_for_data (stmt
, &fd
, NULL
);
10326 /* We need two temporaries with fd.loop.v type (istart/iend)
10327 and then (fd.collapse - 1) temporaries with the same
10328 type for count2 ... countN-1 vars if not constant. */
10330 tree type
= fd
.iter_type
;
10331 if (fd
.collapse
> 1
10332 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
10333 count
+= fd
.collapse
- 1;
10335 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
10336 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
10337 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
10339 tree clauses
= *pc
;
10342 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
10343 OMP_CLAUSE__LOOPTEMP_
);
10344 if (ctx
->simt_stmt
)
10345 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
10346 OMP_CLAUSE__LOOPTEMP_
);
10347 for (i
= 0; i
< count
; i
++)
10352 gcc_assert (outerc
);
10353 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
10354 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
10355 OMP_CLAUSE__LOOPTEMP_
);
10359 /* If there are 2 adjacent SIMD stmts, one with _simt_
10360 clause, another without, make sure they have the same
10361 decls in _looptemp_ clauses, because the outer stmt
10362 they are combined into will look up just one inner_stmt. */
10363 if (ctx
->simt_stmt
)
10364 temp
= OMP_CLAUSE_DECL (simtc
);
10366 temp
= create_tmp_var (type
);
10367 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
10369 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
10370 OMP_CLAUSE_DECL (*pc
) = temp
;
10371 pc
= &OMP_CLAUSE_CHAIN (*pc
);
10372 if (ctx
->simt_stmt
)
10373 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
10374 OMP_CLAUSE__LOOPTEMP_
);
10379 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10383 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
10384 OMP_CLAUSE_REDUCTION
);
10385 tree rtmp
= NULL_TREE
;
10388 tree type
= build_pointer_type (pointer_sized_int_node
);
10389 tree temp
= create_tmp_var (type
);
10390 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
10391 OMP_CLAUSE_DECL (c
) = temp
;
10392 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
10393 gimple_omp_for_set_clauses (stmt
, c
);
10394 lower_omp_task_reductions (ctx
, OMP_FOR
,
10395 gimple_omp_for_clauses (stmt
),
10396 &tred_ilist
, &tred_dlist
);
10398 rtmp
= make_ssa_name (type
);
10399 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
10402 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
10405 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
10407 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
10408 gimple_omp_for_pre_body (stmt
));
10410 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10412 /* Lower the header expressions. At this point, we can assume that
10413 the header is of the form:
10415 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10417 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10418 using the .omp_data_s mapping, if needed. */
10419 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
10421 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
10422 if (!is_gimple_min_invariant (*rhs_p
))
10423 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10424 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10425 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10427 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
10428 if (!is_gimple_min_invariant (*rhs_p
))
10429 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10430 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10431 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10433 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
10434 if (!is_gimple_min_invariant (*rhs_p
))
10435 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10438 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
10440 gimple_seq_add_seq (&body
, cnt_list
);
10442 /* Once lowered, extract the bounds and clauses. */
10443 omp_extract_for_data (stmt
, &fd
, NULL
);
10445 if (is_gimple_omp_oacc (ctx
->stmt
)
10446 && !ctx_in_oacc_kernels_region (ctx
))
10447 lower_oacc_head_tail (gimple_location (stmt
),
10448 gimple_omp_for_clauses (stmt
),
10449 &oacc_head
, &oacc_tail
, ctx
);
10451 /* Add OpenACC partitioning and reduction markers just before the loop. */
10453 gimple_seq_add_seq (&body
, oacc_head
);
10455 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
10457 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10458 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10459 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10460 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10462 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
10463 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
10464 OMP_CLAUSE_LINEAR_STEP (c
)
10465 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
10469 bool phony_loop
= (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
10470 && gimple_omp_for_grid_phony (stmt
));
10471 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
10472 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10474 gcc_assert (!phony_loop
);
10475 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
10480 gimple_seq_add_stmt (&body
, stmt
);
10481 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
10485 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
10488 /* After the loop, add exit clauses. */
10489 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
10493 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
10494 gcall
*g
= gimple_build_call (fndecl
, 0);
10495 gimple_seq_add_stmt (&body
, g
);
10496 gimple_seq_add_seq (&body
, clist
);
10497 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
10498 g
= gimple_build_call (fndecl
, 0);
10499 gimple_seq_add_stmt (&body
, g
);
10502 if (ctx
->cancellable
)
10503 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
10505 gimple_seq_add_seq (&body
, dlist
);
10509 gimple_seq_add_seq (&tred_ilist
, body
);
10513 body
= maybe_catch_exception (body
);
10517 /* Region exit marker goes at the end of the loop body. */
10518 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
10519 gimple_seq_add_stmt (&body
, g
);
10521 gimple_seq_add_seq (&body
, tred_dlist
);
10523 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
10526 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
10529 /* Add OpenACC joining and reduction markers just after the loop. */
10531 gimple_seq_add_seq (&body
, oacc_tail
);
10533 pop_gimplify_context (new_stmt
);
10535 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
10536 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
10537 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
10538 if (BLOCK_VARS (block
))
10539 TREE_USED (block
) = 1;
10541 gimple_bind_set_body (new_stmt
, body
);
10542 gimple_omp_set_body (stmt
, NULL
);
10543 gimple_omp_for_set_pre_body (stmt
, NULL
);
10546 /* Callback for walk_stmts. Check if the current statement only contains
10547 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10550 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
10551 bool *handled_ops_p
,
10552 struct walk_stmt_info
*wi
)
10554 int *info
= (int *) wi
->info
;
10555 gimple
*stmt
= gsi_stmt (*gsi_p
);
10557 *handled_ops_p
= true;
10558 switch (gimple_code (stmt
))
10564 case GIMPLE_OMP_FOR
:
10565 case GIMPLE_OMP_SECTIONS
:
10566 *info
= *info
== 0 ? 1 : -1;
10575 struct omp_taskcopy_context
10577 /* This field must be at the beginning, as we do "inheritance": Some
10578 callback functions for tree-inline.c (e.g., omp_copy_decl)
10579 receive a copy_body_data pointer that is up-casted to an
10580 omp_context pointer. */
10586 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
10588 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
10590 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
10591 return create_tmp_var (TREE_TYPE (var
));
10597 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
10599 tree name
, new_fields
= NULL
, type
, f
;
10601 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
10602 name
= DECL_NAME (TYPE_NAME (orig_type
));
10603 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
10604 TYPE_DECL
, name
, type
);
10605 TYPE_NAME (type
) = name
;
10607 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
10609 tree new_f
= copy_node (f
);
10610 DECL_CONTEXT (new_f
) = type
;
10611 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
10612 TREE_CHAIN (new_f
) = new_fields
;
10613 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10614 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10615 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
10617 new_fields
= new_f
;
10618 tcctx
->cb
.decl_map
->put (f
, new_f
);
10620 TYPE_FIELDS (type
) = nreverse (new_fields
);
10621 layout_type (type
);
10625 /* Create task copyfn. */
10628 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
10630 struct function
*child_cfun
;
10631 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
10632 tree record_type
, srecord_type
, bind
, list
;
10633 bool record_needs_remap
= false, srecord_needs_remap
= false;
10635 struct omp_taskcopy_context tcctx
;
10636 location_t loc
= gimple_location (task_stmt
);
10637 size_t looptempno
= 0;
10639 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
10640 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
10641 gcc_assert (child_cfun
->cfg
== NULL
);
10642 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
10644 /* Reset DECL_CONTEXT on function arguments. */
10645 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
10646 DECL_CONTEXT (t
) = child_fn
;
10648 /* Populate the function. */
10649 push_gimplify_context ();
10650 push_cfun (child_cfun
);
10652 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
10653 TREE_SIDE_EFFECTS (bind
) = 1;
10655 DECL_SAVED_TREE (child_fn
) = bind
;
10656 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
10658 /* Remap src and dst argument types if needed. */
10659 record_type
= ctx
->record_type
;
10660 srecord_type
= ctx
->srecord_type
;
10661 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
10662 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10664 record_needs_remap
= true;
10667 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
10668 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10670 srecord_needs_remap
= true;
10674 if (record_needs_remap
|| srecord_needs_remap
)
10676 memset (&tcctx
, '\0', sizeof (tcctx
));
10677 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
10678 tcctx
.cb
.dst_fn
= child_fn
;
10679 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
10680 gcc_checking_assert (tcctx
.cb
.src_node
);
10681 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
10682 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
10683 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
10684 tcctx
.cb
.eh_lp_nr
= 0;
10685 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
10686 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
10689 if (record_needs_remap
)
10690 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
10691 if (srecord_needs_remap
)
10692 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
10695 tcctx
.cb
.decl_map
= NULL
;
10697 arg
= DECL_ARGUMENTS (child_fn
);
10698 TREE_TYPE (arg
) = build_pointer_type (record_type
);
10699 sarg
= DECL_CHAIN (arg
);
10700 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
10702 /* First pass: initialize temporaries used in record_type and srecord_type
10703 sizes and field offsets. */
10704 if (tcctx
.cb
.decl_map
)
10705 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10706 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10710 decl
= OMP_CLAUSE_DECL (c
);
10711 p
= tcctx
.cb
.decl_map
->get (decl
);
10714 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10715 sf
= (tree
) n
->value
;
10716 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10717 src
= build_simple_mem_ref_loc (loc
, sarg
);
10718 src
= omp_build_component_ref (src
, sf
);
10719 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
10720 append_to_statement_list (t
, &list
);
10723 /* Second pass: copy shared var pointers and copy construct non-VLA
10724 firstprivate vars. */
10725 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10726 switch (OMP_CLAUSE_CODE (c
))
10728 splay_tree_key key
;
10729 case OMP_CLAUSE_SHARED
:
10730 decl
= OMP_CLAUSE_DECL (c
);
10731 key
= (splay_tree_key
) decl
;
10732 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
10733 key
= (splay_tree_key
) &DECL_UID (decl
);
10734 n
= splay_tree_lookup (ctx
->field_map
, key
);
10737 f
= (tree
) n
->value
;
10738 if (tcctx
.cb
.decl_map
)
10739 f
= *tcctx
.cb
.decl_map
->get (f
);
10740 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10741 sf
= (tree
) n
->value
;
10742 if (tcctx
.cb
.decl_map
)
10743 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10744 src
= build_simple_mem_ref_loc (loc
, sarg
);
10745 src
= omp_build_component_ref (src
, sf
);
10746 dst
= build_simple_mem_ref_loc (loc
, arg
);
10747 dst
= omp_build_component_ref (dst
, f
);
10748 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10749 append_to_statement_list (t
, &list
);
10751 case OMP_CLAUSE_REDUCTION
:
10752 case OMP_CLAUSE_IN_REDUCTION
:
10753 decl
= OMP_CLAUSE_DECL (c
);
10754 if (TREE_CODE (decl
) == MEM_REF
)
10756 decl
= TREE_OPERAND (decl
, 0);
10757 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
10758 decl
= TREE_OPERAND (decl
, 0);
10759 if (TREE_CODE (decl
) == INDIRECT_REF
10760 || TREE_CODE (decl
) == ADDR_EXPR
)
10761 decl
= TREE_OPERAND (decl
, 0);
10763 key
= (splay_tree_key
) decl
;
10764 n
= splay_tree_lookup (ctx
->field_map
, key
);
10767 f
= (tree
) n
->value
;
10768 if (tcctx
.cb
.decl_map
)
10769 f
= *tcctx
.cb
.decl_map
->get (f
);
10770 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10771 sf
= (tree
) n
->value
;
10772 if (tcctx
.cb
.decl_map
)
10773 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10774 src
= build_simple_mem_ref_loc (loc
, sarg
);
10775 src
= omp_build_component_ref (src
, sf
);
10776 if (decl
!= OMP_CLAUSE_DECL (c
)
10777 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10778 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
10779 src
= build_simple_mem_ref_loc (loc
, src
);
10780 dst
= build_simple_mem_ref_loc (loc
, arg
);
10781 dst
= omp_build_component_ref (dst
, f
);
10782 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10783 append_to_statement_list (t
, &list
);
10785 case OMP_CLAUSE__LOOPTEMP_
:
10786 /* Fields for first two _looptemp_ clauses are initialized by
10787 GOMP_taskloop*, the rest are handled like firstprivate. */
10788 if (looptempno
< 2)
10794 case OMP_CLAUSE__REDUCTEMP_
:
10795 case OMP_CLAUSE_FIRSTPRIVATE
:
10796 decl
= OMP_CLAUSE_DECL (c
);
10797 if (is_variable_sized (decl
))
10799 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10802 f
= (tree
) n
->value
;
10803 if (tcctx
.cb
.decl_map
)
10804 f
= *tcctx
.cb
.decl_map
->get (f
);
10805 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10808 sf
= (tree
) n
->value
;
10809 if (tcctx
.cb
.decl_map
)
10810 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10811 src
= build_simple_mem_ref_loc (loc
, sarg
);
10812 src
= omp_build_component_ref (src
, sf
);
10813 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
10814 src
= build_simple_mem_ref_loc (loc
, src
);
10818 dst
= build_simple_mem_ref_loc (loc
, arg
);
10819 dst
= omp_build_component_ref (dst
, f
);
10820 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
10821 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10823 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
10824 append_to_statement_list (t
, &list
);
10826 case OMP_CLAUSE_PRIVATE
:
10827 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
10829 decl
= OMP_CLAUSE_DECL (c
);
10830 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10831 f
= (tree
) n
->value
;
10832 if (tcctx
.cb
.decl_map
)
10833 f
= *tcctx
.cb
.decl_map
->get (f
);
10834 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10837 sf
= (tree
) n
->value
;
10838 if (tcctx
.cb
.decl_map
)
10839 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10840 src
= build_simple_mem_ref_loc (loc
, sarg
);
10841 src
= omp_build_component_ref (src
, sf
);
10842 if (use_pointer_for_field (decl
, NULL
))
10843 src
= build_simple_mem_ref_loc (loc
, src
);
10847 dst
= build_simple_mem_ref_loc (loc
, arg
);
10848 dst
= omp_build_component_ref (dst
, f
);
10849 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10850 append_to_statement_list (t
, &list
);
10856 /* Last pass: handle VLA firstprivates. */
10857 if (tcctx
.cb
.decl_map
)
10858 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10859 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10863 decl
= OMP_CLAUSE_DECL (c
);
10864 if (!is_variable_sized (decl
))
10866 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10869 f
= (tree
) n
->value
;
10870 f
= *tcctx
.cb
.decl_map
->get (f
);
10871 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
10872 ind
= DECL_VALUE_EXPR (decl
);
10873 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
10874 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
10875 n
= splay_tree_lookup (ctx
->sfield_map
,
10876 (splay_tree_key
) TREE_OPERAND (ind
, 0));
10877 sf
= (tree
) n
->value
;
10878 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10879 src
= build_simple_mem_ref_loc (loc
, sarg
);
10880 src
= omp_build_component_ref (src
, sf
);
10881 src
= build_simple_mem_ref_loc (loc
, src
);
10882 dst
= build_simple_mem_ref_loc (loc
, arg
);
10883 dst
= omp_build_component_ref (dst
, f
);
10884 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
10885 append_to_statement_list (t
, &list
);
10886 n
= splay_tree_lookup (ctx
->field_map
,
10887 (splay_tree_key
) TREE_OPERAND (ind
, 0));
10888 df
= (tree
) n
->value
;
10889 df
= *tcctx
.cb
.decl_map
->get (df
);
10890 ptr
= build_simple_mem_ref_loc (loc
, arg
);
10891 ptr
= omp_build_component_ref (ptr
, df
);
10892 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
10893 build_fold_addr_expr_loc (loc
, dst
));
10894 append_to_statement_list (t
, &list
);
10897 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
10898 append_to_statement_list (t
, &list
);
10900 if (tcctx
.cb
.decl_map
)
10901 delete tcctx
.cb
.decl_map
;
10902 pop_gimplify_context (NULL
);
10903 BIND_EXPR_BODY (bind
) = list
;
10908 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
10912 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
10914 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
10915 gcc_assert (clauses
);
10916 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10917 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
10918 switch (OMP_CLAUSE_DEPEND_KIND (c
))
10920 case OMP_CLAUSE_DEPEND_LAST
:
10921 /* Lowering already done at gimplification. */
10923 case OMP_CLAUSE_DEPEND_IN
:
10926 case OMP_CLAUSE_DEPEND_OUT
:
10927 case OMP_CLAUSE_DEPEND_INOUT
:
10930 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
10933 case OMP_CLAUSE_DEPEND_DEPOBJ
:
10936 case OMP_CLAUSE_DEPEND_SOURCE
:
10937 case OMP_CLAUSE_DEPEND_SINK
:
10940 gcc_unreachable ();
10942 if (cnt
[1] || cnt
[3])
10944 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
10945 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
10946 tree array
= create_tmp_var (type
);
10947 TREE_ADDRESSABLE (array
) = 1;
10948 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
10952 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
10953 gimple_seq_add_stmt (iseq
, g
);
10954 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
10957 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
10958 gimple_seq_add_stmt (iseq
, g
);
10959 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
10961 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
10962 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
10963 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
10964 gimple_seq_add_stmt (iseq
, g
);
10966 for (i
= 0; i
< 4; i
++)
10970 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10971 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
10975 switch (OMP_CLAUSE_DEPEND_KIND (c
))
10977 case OMP_CLAUSE_DEPEND_IN
:
10981 case OMP_CLAUSE_DEPEND_OUT
:
10982 case OMP_CLAUSE_DEPEND_INOUT
:
10986 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
10990 case OMP_CLAUSE_DEPEND_DEPOBJ
:
10995 gcc_unreachable ();
10997 tree t
= OMP_CLAUSE_DECL (c
);
10998 t
= fold_convert (ptr_type_node
, t
);
10999 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
11000 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
11001 NULL_TREE
, NULL_TREE
);
11002 g
= gimple_build_assign (r
, t
);
11003 gimple_seq_add_stmt (iseq
, g
);
11006 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
11007 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
11008 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
11009 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
11011 tree clobber
= build_constructor (type
, NULL
);
11012 TREE_THIS_VOLATILE (clobber
) = 1;
11013 g
= gimple_build_assign (array
, clobber
);
11014 gimple_seq_add_stmt (oseq
, g
);
11017 /* Lower the OpenMP parallel or task directive in the current statement
11018 in GSI_P. CTX holds context information for the directive. */
11021 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11025 gimple
*stmt
= gsi_stmt (*gsi_p
);
11026 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
11027 gimple_seq par_body
;
11028 location_t loc
= gimple_location (stmt
);
11030 clauses
= gimple_omp_taskreg_clauses (stmt
);
11031 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11032 && gimple_omp_task_taskwait_p (stmt
))
11040 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
11041 par_body
= gimple_bind_body (par_bind
);
11043 child_fn
= ctx
->cb
.dst_fn
;
11044 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
11045 && !gimple_omp_parallel_combined_p (stmt
))
11047 struct walk_stmt_info wi
;
11050 memset (&wi
, 0, sizeof (wi
));
11052 wi
.val_only
= true;
11053 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
11055 gimple_omp_parallel_set_combined_p (stmt
, true);
11057 gimple_seq dep_ilist
= NULL
;
11058 gimple_seq dep_olist
= NULL
;
11059 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11060 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11062 push_gimplify_context ();
11063 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11064 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
11065 &dep_ilist
, &dep_olist
);
11068 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11069 && gimple_omp_task_taskwait_p (stmt
))
11073 gsi_replace (gsi_p
, dep_bind
, true);
11074 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11075 gimple_bind_add_stmt (dep_bind
, stmt
);
11076 gimple_bind_add_seq (dep_bind
, dep_olist
);
11077 pop_gimplify_context (dep_bind
);
11082 if (ctx
->srecord_type
)
11083 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
11085 gimple_seq tskred_ilist
= NULL
;
11086 gimple_seq tskred_olist
= NULL
;
11087 if ((is_task_ctx (ctx
)
11088 && gimple_omp_task_taskloop_p (ctx
->stmt
)
11089 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
11090 OMP_CLAUSE_REDUCTION
))
11091 || (is_parallel_ctx (ctx
)
11092 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
11093 OMP_CLAUSE__REDUCTEMP_
)))
11095 if (dep_bind
== NULL
)
11097 push_gimplify_context ();
11098 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11100 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
11102 gimple_omp_taskreg_clauses (ctx
->stmt
),
11103 &tskred_ilist
, &tskred_olist
);
11106 push_gimplify_context ();
11108 gimple_seq par_olist
= NULL
;
11109 gimple_seq par_ilist
= NULL
;
11110 gimple_seq par_rlist
= NULL
;
11111 bool phony_construct
= gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
11112 && gimple_omp_parallel_grid_phony (as_a
<gomp_parallel
*> (stmt
));
11113 if (phony_construct
&& ctx
->record_type
)
11115 gcc_checking_assert (!ctx
->receiver_decl
);
11116 ctx
->receiver_decl
= create_tmp_var
11117 (build_reference_type (ctx
->record_type
), ".omp_rec");
11119 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
11120 lower_omp (&par_body
, ctx
);
11121 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
)
11122 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
11124 /* Declare all the variables created by mapping and the variables
11125 declared in the scope of the parallel body. */
11126 record_vars_into (ctx
->block_vars
, child_fn
);
11127 maybe_remove_omp_member_access_dummy_vars (par_bind
);
11128 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
11130 if (ctx
->record_type
)
11133 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
11134 : ctx
->record_type
, ".omp_data_o");
11135 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11136 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11137 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
11140 gimple_seq olist
= NULL
;
11141 gimple_seq ilist
= NULL
;
11142 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
11143 lower_send_shared_vars (&ilist
, &olist
, ctx
);
11145 if (ctx
->record_type
)
11147 tree clobber
= build_constructor (TREE_TYPE (ctx
->sender_decl
), NULL
);
11148 TREE_THIS_VOLATILE (clobber
) = 1;
11149 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
11153 /* Once all the expansions are done, sequence all the different
11154 fragments inside gimple_omp_body. */
11156 gimple_seq new_body
= NULL
;
11158 if (ctx
->record_type
)
11160 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
11161 /* fixup_child_record_type might have changed receiver_decl's type. */
11162 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
11163 gimple_seq_add_stmt (&new_body
,
11164 gimple_build_assign (ctx
->receiver_decl
, t
));
11167 gimple_seq_add_seq (&new_body
, par_ilist
);
11168 gimple_seq_add_seq (&new_body
, par_body
);
11169 gimple_seq_add_seq (&new_body
, par_rlist
);
11170 if (ctx
->cancellable
)
11171 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
11172 gimple_seq_add_seq (&new_body
, par_olist
);
11173 new_body
= maybe_catch_exception (new_body
);
11174 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
11175 gimple_seq_add_stmt (&new_body
,
11176 gimple_build_omp_continue (integer_zero_node
,
11177 integer_zero_node
));
11178 if (!phony_construct
)
11180 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
11181 gimple_omp_set_body (stmt
, new_body
);
11184 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
11185 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11187 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
11188 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
11189 gimple_bind_add_seq (bind
, ilist
);
11190 if (!phony_construct
)
11191 gimple_bind_add_stmt (bind
, stmt
);
11193 gimple_bind_add_seq (bind
, new_body
);
11194 gimple_bind_add_seq (bind
, olist
);
11196 pop_gimplify_context (NULL
);
11200 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11201 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
11202 gimple_bind_add_stmt (dep_bind
, bind
);
11203 gimple_bind_add_seq (dep_bind
, tskred_olist
);
11204 gimple_bind_add_seq (dep_bind
, dep_olist
);
11205 pop_gimplify_context (dep_bind
);
11209 /* Lower the GIMPLE_OMP_TARGET in the current statement
11210 in GSI_P. CTX holds context information for the directive. */
11213 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11216 tree child_fn
, t
, c
;
11217 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
11218 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
11219 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
11220 location_t loc
= gimple_location (stmt
);
11221 bool offloaded
, data_region
;
11222 unsigned int map_cnt
= 0;
11224 offloaded
= is_gimple_omp_offloaded (stmt
);
11225 switch (gimple_omp_target_kind (stmt
))
11227 case GF_OMP_TARGET_KIND_REGION
:
11228 case GF_OMP_TARGET_KIND_UPDATE
:
11229 case GF_OMP_TARGET_KIND_ENTER_DATA
:
11230 case GF_OMP_TARGET_KIND_EXIT_DATA
:
11231 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
11232 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
11233 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
11234 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
11235 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
11236 data_region
= false;
11238 case GF_OMP_TARGET_KIND_DATA
:
11239 case GF_OMP_TARGET_KIND_OACC_DATA
:
11240 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
11241 data_region
= true;
11244 gcc_unreachable ();
11247 clauses
= gimple_omp_target_clauses (stmt
);
11249 gimple_seq dep_ilist
= NULL
;
11250 gimple_seq dep_olist
= NULL
;
11251 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11253 push_gimplify_context ();
11254 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11255 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
11256 &dep_ilist
, &dep_olist
);
11263 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
11264 tgt_body
= gimple_bind_body (tgt_bind
);
11266 else if (data_region
)
11267 tgt_body
= gimple_omp_body (stmt
);
11268 child_fn
= ctx
->cb
.dst_fn
;
11270 push_gimplify_context ();
11273 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11274 switch (OMP_CLAUSE_CODE (c
))
11280 case OMP_CLAUSE_MAP
:
11282 /* First check what we're prepared to handle in the following. */
11283 switch (OMP_CLAUSE_MAP_KIND (c
))
11285 case GOMP_MAP_ALLOC
:
11287 case GOMP_MAP_FROM
:
11288 case GOMP_MAP_TOFROM
:
11289 case GOMP_MAP_POINTER
:
11290 case GOMP_MAP_TO_PSET
:
11291 case GOMP_MAP_DELETE
:
11292 case GOMP_MAP_RELEASE
:
11293 case GOMP_MAP_ALWAYS_TO
:
11294 case GOMP_MAP_ALWAYS_FROM
:
11295 case GOMP_MAP_ALWAYS_TOFROM
:
11296 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
11297 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
11298 case GOMP_MAP_STRUCT
:
11299 case GOMP_MAP_ALWAYS_POINTER
:
11301 case GOMP_MAP_FORCE_ALLOC
:
11302 case GOMP_MAP_FORCE_TO
:
11303 case GOMP_MAP_FORCE_FROM
:
11304 case GOMP_MAP_FORCE_TOFROM
:
11305 case GOMP_MAP_FORCE_PRESENT
:
11306 case GOMP_MAP_FORCE_DEVICEPTR
:
11307 case GOMP_MAP_DEVICE_RESIDENT
:
11308 case GOMP_MAP_LINK
:
11309 gcc_assert (is_gimple_omp_oacc (stmt
));
11312 gcc_unreachable ();
11316 case OMP_CLAUSE_TO
:
11317 case OMP_CLAUSE_FROM
:
11319 var
= OMP_CLAUSE_DECL (c
);
11322 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
11323 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11324 && (OMP_CLAUSE_MAP_KIND (c
)
11325 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
11330 if (DECL_SIZE (var
)
11331 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
11333 tree var2
= DECL_VALUE_EXPR (var
);
11334 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
11335 var2
= TREE_OPERAND (var2
, 0);
11336 gcc_assert (DECL_P (var2
));
11341 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11342 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11343 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11345 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11347 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
11348 && varpool_node::get_create (var
)->offloadable
)
11351 tree type
= build_pointer_type (TREE_TYPE (var
));
11352 tree new_var
= lookup_decl (var
, ctx
);
11353 x
= create_tmp_var_raw (type
, get_name (new_var
));
11354 gimple_add_tmp_var (x
);
11355 x
= build_simple_mem_ref (x
);
11356 SET_DECL_VALUE_EXPR (new_var
, x
);
11357 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11362 if (!maybe_lookup_field (var
, ctx
))
11365 /* Don't remap oacc parallel reduction variables, because the
11366 intermediate result must be local to each gang. */
11367 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11368 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
11370 x
= build_receiver_ref (var
, true, ctx
);
11371 tree new_var
= lookup_decl (var
, ctx
);
11373 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11374 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11375 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11376 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11377 x
= build_simple_mem_ref (x
);
11378 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11380 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11381 if (omp_is_reference (new_var
)
11382 && TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
)
11384 /* Create a local object to hold the instance
11386 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
11387 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
11388 tree inst
= create_tmp_var (type
, id
);
11389 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
11390 x
= build_fold_addr_expr (inst
);
11392 gimplify_assign (new_var
, x
, &fplist
);
11394 else if (DECL_P (new_var
))
11396 SET_DECL_VALUE_EXPR (new_var
, x
);
11397 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11400 gcc_unreachable ();
11405 case OMP_CLAUSE_FIRSTPRIVATE
:
11406 if (is_oacc_parallel (ctx
))
11407 goto oacc_firstprivate
;
11409 var
= OMP_CLAUSE_DECL (c
);
11410 if (!omp_is_reference (var
)
11411 && !is_gimple_reg_type (TREE_TYPE (var
)))
11413 tree new_var
= lookup_decl (var
, ctx
);
11414 if (is_variable_sized (var
))
11416 tree pvar
= DECL_VALUE_EXPR (var
);
11417 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11418 pvar
= TREE_OPERAND (pvar
, 0);
11419 gcc_assert (DECL_P (pvar
));
11420 tree new_pvar
= lookup_decl (pvar
, ctx
);
11421 x
= build_fold_indirect_ref (new_pvar
);
11422 TREE_THIS_NOTRAP (x
) = 1;
11425 x
= build_receiver_ref (var
, true, ctx
);
11426 SET_DECL_VALUE_EXPR (new_var
, x
);
11427 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11431 case OMP_CLAUSE_PRIVATE
:
11432 if (is_gimple_omp_oacc (ctx
->stmt
))
11434 var
= OMP_CLAUSE_DECL (c
);
11435 if (is_variable_sized (var
))
11437 tree new_var
= lookup_decl (var
, ctx
);
11438 tree pvar
= DECL_VALUE_EXPR (var
);
11439 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11440 pvar
= TREE_OPERAND (pvar
, 0);
11441 gcc_assert (DECL_P (pvar
));
11442 tree new_pvar
= lookup_decl (pvar
, ctx
);
11443 x
= build_fold_indirect_ref (new_pvar
);
11444 TREE_THIS_NOTRAP (x
) = 1;
11445 SET_DECL_VALUE_EXPR (new_var
, x
);
11446 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11450 case OMP_CLAUSE_USE_DEVICE_PTR
:
11451 case OMP_CLAUSE_IS_DEVICE_PTR
:
11452 var
= OMP_CLAUSE_DECL (c
);
11454 if (is_variable_sized (var
))
11456 tree new_var
= lookup_decl (var
, ctx
);
11457 tree pvar
= DECL_VALUE_EXPR (var
);
11458 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11459 pvar
= TREE_OPERAND (pvar
, 0);
11460 gcc_assert (DECL_P (pvar
));
11461 tree new_pvar
= lookup_decl (pvar
, ctx
);
11462 x
= build_fold_indirect_ref (new_pvar
);
11463 TREE_THIS_NOTRAP (x
) = 1;
11464 SET_DECL_VALUE_EXPR (new_var
, x
);
11465 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11467 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11469 tree new_var
= lookup_decl (var
, ctx
);
11470 tree type
= build_pointer_type (TREE_TYPE (var
));
11471 x
= create_tmp_var_raw (type
, get_name (new_var
));
11472 gimple_add_tmp_var (x
);
11473 x
= build_simple_mem_ref (x
);
11474 SET_DECL_VALUE_EXPR (new_var
, x
);
11475 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11479 tree new_var
= lookup_decl (var
, ctx
);
11480 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
11481 gimple_add_tmp_var (x
);
11482 SET_DECL_VALUE_EXPR (new_var
, x
);
11483 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11490 target_nesting_level
++;
11491 lower_omp (&tgt_body
, ctx
);
11492 target_nesting_level
--;
11494 else if (data_region
)
11495 lower_omp (&tgt_body
, ctx
);
11499 /* Declare all the variables created by mapping and the variables
11500 declared in the scope of the target body. */
11501 record_vars_into (ctx
->block_vars
, child_fn
);
11502 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
11503 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
11508 if (ctx
->record_type
)
11511 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
11512 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11513 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11514 t
= make_tree_vec (3);
11515 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
11516 TREE_VEC_ELT (t
, 1)
11517 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
11518 ".omp_data_sizes");
11519 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
11520 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
11521 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
11522 tree tkind_type
= short_unsigned_type_node
;
11523 int talign_shift
= 8;
11524 TREE_VEC_ELT (t
, 2)
11525 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
11526 ".omp_data_kinds");
11527 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
11528 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
11529 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
11530 gimple_omp_target_set_data_arg (stmt
, t
);
11532 vec
<constructor_elt
, va_gc
> *vsize
;
11533 vec
<constructor_elt
, va_gc
> *vkind
;
11534 vec_alloc (vsize
, map_cnt
);
11535 vec_alloc (vkind
, map_cnt
);
11536 unsigned int map_idx
= 0;
11538 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11539 switch (OMP_CLAUSE_CODE (c
))
11541 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
11542 unsigned int talign
;
11547 case OMP_CLAUSE_MAP
:
11548 case OMP_CLAUSE_TO
:
11549 case OMP_CLAUSE_FROM
:
11550 oacc_firstprivate_map
:
11552 ovar
= OMP_CLAUSE_DECL (c
);
11553 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11554 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11555 || (OMP_CLAUSE_MAP_KIND (c
)
11556 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
11558 if (!DECL_P (ovar
))
11560 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11561 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
11563 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
11564 == get_base_address (ovar
));
11565 nc
= OMP_CLAUSE_CHAIN (c
);
11566 ovar
= OMP_CLAUSE_DECL (nc
);
11570 tree x
= build_sender_ref (ovar
, ctx
);
11572 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
11573 gimplify_assign (x
, v
, &ilist
);
11579 if (DECL_SIZE (ovar
)
11580 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
11582 tree ovar2
= DECL_VALUE_EXPR (ovar
);
11583 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
11584 ovar2
= TREE_OPERAND (ovar2
, 0);
11585 gcc_assert (DECL_P (ovar2
));
11588 if (!maybe_lookup_field (ovar
, ctx
))
11592 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
11593 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
11594 talign
= DECL_ALIGN_UNIT (ovar
);
11597 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11598 x
= build_sender_ref (ovar
, ctx
);
11600 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11601 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11602 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11603 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
11605 gcc_assert (offloaded
);
11607 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
11608 mark_addressable (avar
);
11609 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
11610 talign
= DECL_ALIGN_UNIT (avar
);
11611 avar
= build_fold_addr_expr (avar
);
11612 gimplify_assign (x
, avar
, &ilist
);
11614 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11616 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11617 if (!omp_is_reference (var
))
11619 if (is_gimple_reg (var
)
11620 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11621 TREE_NO_WARNING (var
) = 1;
11622 var
= build_fold_addr_expr (var
);
11625 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11626 gimplify_assign (x
, var
, &ilist
);
11628 else if (is_gimple_reg (var
))
11630 gcc_assert (offloaded
);
11631 tree avar
= create_tmp_var (TREE_TYPE (var
));
11632 mark_addressable (avar
);
11633 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
11634 if (GOMP_MAP_COPY_TO_P (map_kind
)
11635 || map_kind
== GOMP_MAP_POINTER
11636 || map_kind
== GOMP_MAP_TO_PSET
11637 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11639 /* If we need to initialize a temporary
11640 with VAR because it is not addressable, and
11641 the variable hasn't been initialized yet, then
11642 we'll get a warning for the store to avar.
11643 Don't warn in that case, the mapping might
11645 TREE_NO_WARNING (var
) = 1;
11646 gimplify_assign (avar
, var
, &ilist
);
11648 avar
= build_fold_addr_expr (avar
);
11649 gimplify_assign (x
, avar
, &ilist
);
11650 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
11651 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11652 && !TYPE_READONLY (TREE_TYPE (var
)))
11654 x
= unshare_expr (x
);
11655 x
= build_simple_mem_ref (x
);
11656 gimplify_assign (var
, x
, &olist
);
11661 var
= build_fold_addr_expr (var
);
11662 gimplify_assign (x
, var
, &ilist
);
11666 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11668 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11669 s
= TREE_TYPE (ovar
);
11670 if (TREE_CODE (s
) == REFERENCE_TYPE
)
11672 s
= TYPE_SIZE_UNIT (s
);
11675 s
= OMP_CLAUSE_SIZE (c
);
11676 if (s
== NULL_TREE
)
11677 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11678 s
= fold_convert (size_type_node
, s
);
11679 purpose
= size_int (map_idx
++);
11680 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11681 if (TREE_CODE (s
) != INTEGER_CST
)
11682 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11684 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
11685 switch (OMP_CLAUSE_CODE (c
))
11687 case OMP_CLAUSE_MAP
:
11688 tkind
= OMP_CLAUSE_MAP_KIND (c
);
11689 tkind_zero
= tkind
;
11690 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
11693 case GOMP_MAP_ALLOC
:
11695 case GOMP_MAP_FROM
:
11696 case GOMP_MAP_TOFROM
:
11697 case GOMP_MAP_ALWAYS_TO
:
11698 case GOMP_MAP_ALWAYS_FROM
:
11699 case GOMP_MAP_ALWAYS_TOFROM
:
11700 case GOMP_MAP_RELEASE
:
11701 case GOMP_MAP_FORCE_TO
:
11702 case GOMP_MAP_FORCE_FROM
:
11703 case GOMP_MAP_FORCE_TOFROM
:
11704 case GOMP_MAP_FORCE_PRESENT
:
11705 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
11707 case GOMP_MAP_DELETE
:
11708 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
11712 if (tkind_zero
!= tkind
)
11714 if (integer_zerop (s
))
11715 tkind
= tkind_zero
;
11716 else if (integer_nonzerop (s
))
11717 tkind_zero
= tkind
;
11720 case OMP_CLAUSE_FIRSTPRIVATE
:
11721 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11722 tkind
= GOMP_MAP_TO
;
11723 tkind_zero
= tkind
;
11725 case OMP_CLAUSE_TO
:
11726 tkind
= GOMP_MAP_TO
;
11727 tkind_zero
= tkind
;
11729 case OMP_CLAUSE_FROM
:
11730 tkind
= GOMP_MAP_FROM
;
11731 tkind_zero
= tkind
;
11734 gcc_unreachable ();
11736 gcc_checking_assert (tkind
11737 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11738 gcc_checking_assert (tkind_zero
11739 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11740 talign
= ceil_log2 (talign
);
11741 tkind
|= talign
<< talign_shift
;
11742 tkind_zero
|= talign
<< talign_shift
;
11743 gcc_checking_assert (tkind
11744 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11745 gcc_checking_assert (tkind_zero
11746 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11747 if (tkind
== tkind_zero
)
11748 x
= build_int_cstu (tkind_type
, tkind
);
11751 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
11752 x
= build3 (COND_EXPR
, tkind_type
,
11753 fold_build2 (EQ_EXPR
, boolean_type_node
,
11754 unshare_expr (s
), size_zero_node
),
11755 build_int_cstu (tkind_type
, tkind_zero
),
11756 build_int_cstu (tkind_type
, tkind
));
11758 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
11763 case OMP_CLAUSE_FIRSTPRIVATE
:
11764 if (is_oacc_parallel (ctx
))
11765 goto oacc_firstprivate_map
;
11766 ovar
= OMP_CLAUSE_DECL (c
);
11767 if (omp_is_reference (ovar
))
11768 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11770 talign
= DECL_ALIGN_UNIT (ovar
);
11771 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11772 x
= build_sender_ref (ovar
, ctx
);
11773 tkind
= GOMP_MAP_FIRSTPRIVATE
;
11774 type
= TREE_TYPE (ovar
);
11775 if (omp_is_reference (ovar
))
11776 type
= TREE_TYPE (type
);
11777 if ((INTEGRAL_TYPE_P (type
)
11778 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
11779 || TREE_CODE (type
) == POINTER_TYPE
)
11781 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
11783 if (omp_is_reference (var
))
11784 t
= build_simple_mem_ref (var
);
11785 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11786 TREE_NO_WARNING (var
) = 1;
11787 if (TREE_CODE (type
) != POINTER_TYPE
)
11788 t
= fold_convert (pointer_sized_int_node
, t
);
11789 t
= fold_convert (TREE_TYPE (x
), t
);
11790 gimplify_assign (x
, t
, &ilist
);
11792 else if (omp_is_reference (var
))
11793 gimplify_assign (x
, var
, &ilist
);
11794 else if (is_gimple_reg (var
))
11796 tree avar
= create_tmp_var (TREE_TYPE (var
));
11797 mark_addressable (avar
);
11798 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11799 TREE_NO_WARNING (var
) = 1;
11800 gimplify_assign (avar
, var
, &ilist
);
11801 avar
= build_fold_addr_expr (avar
);
11802 gimplify_assign (x
, avar
, &ilist
);
11806 var
= build_fold_addr_expr (var
);
11807 gimplify_assign (x
, var
, &ilist
);
11809 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
11811 else if (omp_is_reference (ovar
))
11812 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11814 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11815 s
= fold_convert (size_type_node
, s
);
11816 purpose
= size_int (map_idx
++);
11817 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11818 if (TREE_CODE (s
) != INTEGER_CST
)
11819 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11821 gcc_checking_assert (tkind
11822 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11823 talign
= ceil_log2 (talign
);
11824 tkind
|= talign
<< talign_shift
;
11825 gcc_checking_assert (tkind
11826 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11827 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
11828 build_int_cstu (tkind_type
, tkind
));
11831 case OMP_CLAUSE_USE_DEVICE_PTR
:
11832 case OMP_CLAUSE_IS_DEVICE_PTR
:
11833 ovar
= OMP_CLAUSE_DECL (c
);
11834 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11835 x
= build_sender_ref (ovar
, ctx
);
11836 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
11837 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
11839 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
11840 type
= TREE_TYPE (ovar
);
11841 if (TREE_CODE (type
) == ARRAY_TYPE
)
11842 var
= build_fold_addr_expr (var
);
11845 if (omp_is_reference (ovar
))
11847 type
= TREE_TYPE (type
);
11848 if (TREE_CODE (type
) != ARRAY_TYPE
)
11849 var
= build_simple_mem_ref (var
);
11850 var
= fold_convert (TREE_TYPE (x
), var
);
11853 gimplify_assign (x
, var
, &ilist
);
11855 purpose
= size_int (map_idx
++);
11856 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11857 gcc_checking_assert (tkind
11858 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11859 gcc_checking_assert (tkind
11860 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11861 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
11862 build_int_cstu (tkind_type
, tkind
));
11866 gcc_assert (map_idx
== map_cnt
);
11868 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
11869 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
11870 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
11871 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
11872 for (int i
= 1; i
<= 2; i
++)
11873 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
11875 gimple_seq initlist
= NULL
;
11876 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
11877 TREE_VEC_ELT (t
, i
)),
11878 &initlist
, true, NULL_TREE
);
11879 gimple_seq_add_seq (&ilist
, initlist
);
11881 tree clobber
= build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, i
)),
11883 TREE_THIS_VOLATILE (clobber
) = 1;
11884 gimple_seq_add_stmt (&olist
,
11885 gimple_build_assign (TREE_VEC_ELT (t
, i
),
11889 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
11890 TREE_THIS_VOLATILE (clobber
) = 1;
11891 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
11895 /* Once all the expansions are done, sequence all the different
11896 fragments inside gimple_omp_body. */
11901 && ctx
->record_type
)
11903 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
11904 /* fixup_child_record_type might have changed receiver_decl's type. */
11905 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
11906 gimple_seq_add_stmt (&new_body
,
11907 gimple_build_assign (ctx
->receiver_decl
, t
));
11909 gimple_seq_add_seq (&new_body
, fplist
);
11911 if (offloaded
|| data_region
)
11913 tree prev
= NULL_TREE
;
11914 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11915 switch (OMP_CLAUSE_CODE (c
))
11920 case OMP_CLAUSE_FIRSTPRIVATE
:
11921 if (is_gimple_omp_oacc (ctx
->stmt
))
11923 var
= OMP_CLAUSE_DECL (c
);
11924 if (omp_is_reference (var
)
11925 || is_gimple_reg_type (TREE_TYPE (var
)))
11927 tree new_var
= lookup_decl (var
, ctx
);
11929 type
= TREE_TYPE (var
);
11930 if (omp_is_reference (var
))
11931 type
= TREE_TYPE (type
);
11932 if ((INTEGRAL_TYPE_P (type
)
11933 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
11934 || TREE_CODE (type
) == POINTER_TYPE
)
11936 x
= build_receiver_ref (var
, false, ctx
);
11937 if (TREE_CODE (type
) != POINTER_TYPE
)
11938 x
= fold_convert (pointer_sized_int_node
, x
);
11939 x
= fold_convert (type
, x
);
11940 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
11942 if (omp_is_reference (var
))
11944 tree v
= create_tmp_var_raw (type
, get_name (var
));
11945 gimple_add_tmp_var (v
);
11946 TREE_ADDRESSABLE (v
) = 1;
11947 gimple_seq_add_stmt (&new_body
,
11948 gimple_build_assign (v
, x
));
11949 x
= build_fold_addr_expr (v
);
11951 gimple_seq_add_stmt (&new_body
,
11952 gimple_build_assign (new_var
, x
));
11956 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
11957 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
11959 gimple_seq_add_stmt (&new_body
,
11960 gimple_build_assign (new_var
, x
));
11963 else if (is_variable_sized (var
))
11965 tree pvar
= DECL_VALUE_EXPR (var
);
11966 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11967 pvar
= TREE_OPERAND (pvar
, 0);
11968 gcc_assert (DECL_P (pvar
));
11969 tree new_var
= lookup_decl (pvar
, ctx
);
11970 x
= build_receiver_ref (var
, false, ctx
);
11971 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11972 gimple_seq_add_stmt (&new_body
,
11973 gimple_build_assign (new_var
, x
));
11976 case OMP_CLAUSE_PRIVATE
:
11977 if (is_gimple_omp_oacc (ctx
->stmt
))
11979 var
= OMP_CLAUSE_DECL (c
);
11980 if (omp_is_reference (var
))
11982 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
11983 tree new_var
= lookup_decl (var
, ctx
);
11984 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
11985 if (TREE_CONSTANT (x
))
11987 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
11989 gimple_add_tmp_var (x
);
11990 TREE_ADDRESSABLE (x
) = 1;
11991 x
= build_fold_addr_expr_loc (clause_loc
, x
);
11996 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
11997 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11998 gimple_seq_add_stmt (&new_body
,
11999 gimple_build_assign (new_var
, x
));
12002 case OMP_CLAUSE_USE_DEVICE_PTR
:
12003 case OMP_CLAUSE_IS_DEVICE_PTR
:
12004 var
= OMP_CLAUSE_DECL (c
);
12005 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
12006 x
= build_sender_ref (var
, ctx
);
12008 x
= build_receiver_ref (var
, false, ctx
);
12009 if (is_variable_sized (var
))
12011 tree pvar
= DECL_VALUE_EXPR (var
);
12012 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12013 pvar
= TREE_OPERAND (pvar
, 0);
12014 gcc_assert (DECL_P (pvar
));
12015 tree new_var
= lookup_decl (pvar
, ctx
);
12016 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12017 gimple_seq_add_stmt (&new_body
,
12018 gimple_build_assign (new_var
, x
));
12020 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12022 tree new_var
= lookup_decl (var
, ctx
);
12023 new_var
= DECL_VALUE_EXPR (new_var
);
12024 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
12025 new_var
= TREE_OPERAND (new_var
, 0);
12026 gcc_assert (DECL_P (new_var
));
12027 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12028 gimple_seq_add_stmt (&new_body
,
12029 gimple_build_assign (new_var
, x
));
12033 tree type
= TREE_TYPE (var
);
12034 tree new_var
= lookup_decl (var
, ctx
);
12035 if (omp_is_reference (var
))
12037 type
= TREE_TYPE (type
);
12038 if (TREE_CODE (type
) != ARRAY_TYPE
)
12040 tree v
= create_tmp_var_raw (type
, get_name (var
));
12041 gimple_add_tmp_var (v
);
12042 TREE_ADDRESSABLE (v
) = 1;
12043 x
= fold_convert (type
, x
);
12044 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12046 gimple_seq_add_stmt (&new_body
,
12047 gimple_build_assign (v
, x
));
12048 x
= build_fold_addr_expr (v
);
12051 new_var
= DECL_VALUE_EXPR (new_var
);
12052 x
= fold_convert (TREE_TYPE (new_var
), x
);
12053 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12054 gimple_seq_add_stmt (&new_body
,
12055 gimple_build_assign (new_var
, x
));
12059 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12060 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12061 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12062 or references to VLAs. */
12063 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12064 switch (OMP_CLAUSE_CODE (c
))
12069 case OMP_CLAUSE_MAP
:
12070 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12071 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12073 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12074 poly_int64 offset
= 0;
12076 var
= OMP_CLAUSE_DECL (c
);
12078 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
12079 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
12081 && varpool_node::get_create (var
)->offloadable
)
12083 if (TREE_CODE (var
) == INDIRECT_REF
12084 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
12085 var
= TREE_OPERAND (var
, 0);
12086 if (TREE_CODE (var
) == COMPONENT_REF
)
12088 var
= get_addr_base_and_unit_offset (var
, &offset
);
12089 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
12091 else if (DECL_SIZE (var
)
12092 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12094 tree var2
= DECL_VALUE_EXPR (var
);
12095 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12096 var2
= TREE_OPERAND (var2
, 0);
12097 gcc_assert (DECL_P (var2
));
12100 tree new_var
= lookup_decl (var
, ctx
), x
;
12101 tree type
= TREE_TYPE (new_var
);
12103 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
12104 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
12107 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
12109 new_var
= build2 (MEM_REF
, type
,
12110 build_fold_addr_expr (new_var
),
12111 build_int_cst (build_pointer_type (type
),
12114 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
12116 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
12117 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
12118 new_var
= build2 (MEM_REF
, type
,
12119 build_fold_addr_expr (new_var
),
12120 build_int_cst (build_pointer_type (type
),
12124 is_ref
= omp_is_reference (var
);
12125 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12127 bool ref_to_array
= false;
12130 type
= TREE_TYPE (type
);
12131 if (TREE_CODE (type
) == ARRAY_TYPE
)
12133 type
= build_pointer_type (type
);
12134 ref_to_array
= true;
12137 else if (TREE_CODE (type
) == ARRAY_TYPE
)
12139 tree decl2
= DECL_VALUE_EXPR (new_var
);
12140 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
12141 decl2
= TREE_OPERAND (decl2
, 0);
12142 gcc_assert (DECL_P (decl2
));
12144 type
= TREE_TYPE (new_var
);
12146 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
12147 x
= fold_convert_loc (clause_loc
, type
, x
);
12148 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
12150 tree bias
= OMP_CLAUSE_SIZE (c
);
12152 bias
= lookup_decl (bias
, ctx
);
12153 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
12154 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
12156 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
12157 TREE_TYPE (x
), x
, bias
);
12160 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12161 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12162 if (is_ref
&& !ref_to_array
)
12164 tree t
= create_tmp_var_raw (type
, get_name (var
));
12165 gimple_add_tmp_var (t
);
12166 TREE_ADDRESSABLE (t
) = 1;
12167 gimple_seq_add_stmt (&new_body
,
12168 gimple_build_assign (t
, x
));
12169 x
= build_fold_addr_expr_loc (clause_loc
, t
);
12171 gimple_seq_add_stmt (&new_body
,
12172 gimple_build_assign (new_var
, x
));
12175 else if (OMP_CLAUSE_CHAIN (c
)
12176 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
12178 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12179 == GOMP_MAP_FIRSTPRIVATE_POINTER
12180 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12181 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
12184 case OMP_CLAUSE_PRIVATE
:
12185 var
= OMP_CLAUSE_DECL (c
);
12186 if (is_variable_sized (var
))
12188 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12189 tree new_var
= lookup_decl (var
, ctx
);
12190 tree pvar
= DECL_VALUE_EXPR (var
);
12191 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12192 pvar
= TREE_OPERAND (pvar
, 0);
12193 gcc_assert (DECL_P (pvar
));
12194 tree new_pvar
= lookup_decl (pvar
, ctx
);
12195 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12196 tree al
= size_int (DECL_ALIGN (var
));
12197 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
12198 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12199 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
12200 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12201 gimple_seq_add_stmt (&new_body
,
12202 gimple_build_assign (new_pvar
, x
));
12204 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
12206 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12207 tree new_var
= lookup_decl (var
, ctx
);
12208 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12209 if (TREE_CONSTANT (x
))
12214 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12215 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
12216 tree al
= size_int (TYPE_ALIGN (rtype
));
12217 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12220 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12221 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12222 gimple_seq_add_stmt (&new_body
,
12223 gimple_build_assign (new_var
, x
));
12228 gimple_seq fork_seq
= NULL
;
12229 gimple_seq join_seq
= NULL
;
12231 if (is_oacc_parallel (ctx
))
12233 /* If there are reductions on the offloaded region itself, treat
12234 them as a dummy GANG loop. */
12235 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
12237 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
12238 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
12241 gimple_seq_add_seq (&new_body
, fork_seq
);
12242 gimple_seq_add_seq (&new_body
, tgt_body
);
12243 gimple_seq_add_seq (&new_body
, join_seq
);
12246 new_body
= maybe_catch_exception (new_body
);
12248 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12249 gimple_omp_set_body (stmt
, new_body
);
12252 bind
= gimple_build_bind (NULL
, NULL
,
12253 tgt_bind
? gimple_bind_block (tgt_bind
)
12255 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12256 gimple_bind_add_seq (bind
, ilist
);
12257 gimple_bind_add_stmt (bind
, stmt
);
12258 gimple_bind_add_seq (bind
, olist
);
12260 pop_gimplify_context (NULL
);
12264 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12265 gimple_bind_add_stmt (dep_bind
, bind
);
12266 gimple_bind_add_seq (dep_bind
, dep_olist
);
12267 pop_gimplify_context (dep_bind
);
12271 /* Expand code for an OpenMP teams directive. */
12274 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12276 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
12277 push_gimplify_context ();
12279 tree block
= make_node (BLOCK
);
12280 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
12281 gsi_replace (gsi_p
, bind
, true);
12282 gimple_seq bind_body
= NULL
;
12283 gimple_seq dlist
= NULL
;
12284 gimple_seq olist
= NULL
;
12286 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12287 OMP_CLAUSE_NUM_TEAMS
);
12288 if (num_teams
== NULL_TREE
)
12289 num_teams
= build_int_cst (unsigned_type_node
, 0);
12292 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
12293 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
12294 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
12296 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12297 OMP_CLAUSE_THREAD_LIMIT
);
12298 if (thread_limit
== NULL_TREE
)
12299 thread_limit
= build_int_cst (unsigned_type_node
, 0);
12302 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
12303 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
12304 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
12308 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
12309 &bind_body
, &dlist
, ctx
, NULL
);
12310 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
12311 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
12313 if (!gimple_omp_teams_grid_phony (teams_stmt
))
12315 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
12316 location_t loc
= gimple_location (teams_stmt
);
12317 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
12318 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
12319 gimple_set_location (call
, loc
);
12320 gimple_seq_add_stmt (&bind_body
, call
);
12323 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
12324 gimple_omp_set_body (teams_stmt
, NULL
);
12325 gimple_seq_add_seq (&bind_body
, olist
);
12326 gimple_seq_add_seq (&bind_body
, dlist
);
12327 if (!gimple_omp_teams_grid_phony (teams_stmt
))
12328 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
12329 gimple_bind_set_body (bind
, bind_body
);
12331 pop_gimplify_context (bind
);
12333 gimple_bind_append_vars (bind
, ctx
->block_vars
);
12334 BLOCK_VARS (block
) = ctx
->block_vars
;
12335 if (BLOCK_VARS (block
))
12336 TREE_USED (block
) = 1;
12339 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
12342 lower_omp_grid_body (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12344 gimple
*stmt
= gsi_stmt (*gsi_p
);
12345 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
12346 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt
),
12347 gimple_build_omp_return (false));
12351 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12352 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12353 of OMP context, but with task_shared_vars set. */
12356 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
12361 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12362 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
12365 if (task_shared_vars
12367 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
12370 /* If a global variable has been privatized, TREE_CONSTANT on
12371 ADDR_EXPR might be wrong. */
12372 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
12373 recompute_tree_invariant_for_addr_expr (t
);
12375 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
12379 /* Data to be communicated between lower_omp_regimplify_operands and
12380 lower_omp_regimplify_operands_p. */
12382 struct lower_omp_regimplify_operands_data
12388 /* Helper function for lower_omp_regimplify_operands. Find
12389 omp_member_access_dummy_var vars and adjust temporarily their
12390 DECL_VALUE_EXPRs if needed. */
12393 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
12396 tree t
= omp_member_access_dummy_var (*tp
);
12399 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
12400 lower_omp_regimplify_operands_data
*ldata
12401 = (lower_omp_regimplify_operands_data
*) wi
->info
;
12402 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
12405 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
12406 ldata
->decls
->safe_push (*tp
);
12407 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
12408 SET_DECL_VALUE_EXPR (*tp
, v
);
12411 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
12415 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12416 of omp_member_access_dummy_var vars during regimplification. */
12419 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
12420 gimple_stmt_iterator
*gsi_p
)
12422 auto_vec
<tree
, 10> decls
;
12425 struct walk_stmt_info wi
;
12426 memset (&wi
, '\0', sizeof (wi
));
12427 struct lower_omp_regimplify_operands_data data
;
12429 data
.decls
= &decls
;
12431 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
12433 gimple_regimplify_operands (stmt
, gsi_p
);
12434 while (!decls
.is_empty ())
12436 tree t
= decls
.pop ();
12437 tree v
= decls
.pop ();
12438 SET_DECL_VALUE_EXPR (t
, v
);
12443 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12445 gimple
*stmt
= gsi_stmt (*gsi_p
);
12446 struct walk_stmt_info wi
;
12449 if (gimple_has_location (stmt
))
12450 input_location
= gimple_location (stmt
);
12452 if (task_shared_vars
)
12453 memset (&wi
, '\0', sizeof (wi
));
12455 /* If we have issued syntax errors, avoid doing any heavy lifting.
12456 Just replace the OMP directives with a NOP to avoid
12457 confusing RTL expansion. */
12458 if (seen_error () && is_gimple_omp (stmt
))
12460 gsi_replace (gsi_p
, gimple_build_nop (), true);
12464 switch (gimple_code (stmt
))
12468 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
12469 if ((ctx
|| task_shared_vars
)
12470 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
12471 lower_omp_regimplify_p
,
12472 ctx
? NULL
: &wi
, NULL
)
12473 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
12474 lower_omp_regimplify_p
,
12475 ctx
? NULL
: &wi
, NULL
)))
12476 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
12480 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
12482 case GIMPLE_EH_FILTER
:
12483 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
12486 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
12487 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
12489 case GIMPLE_TRANSACTION
:
12490 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
12494 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
12495 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
12497 case GIMPLE_OMP_PARALLEL
:
12498 case GIMPLE_OMP_TASK
:
12499 ctx
= maybe_lookup_ctx (stmt
);
12501 if (ctx
->cancellable
)
12502 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12503 lower_omp_taskreg (gsi_p
, ctx
);
12505 case GIMPLE_OMP_FOR
:
12506 ctx
= maybe_lookup_ctx (stmt
);
12508 if (ctx
->cancellable
)
12509 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12510 lower_omp_for (gsi_p
, ctx
);
12512 case GIMPLE_OMP_SECTIONS
:
12513 ctx
= maybe_lookup_ctx (stmt
);
12515 if (ctx
->cancellable
)
12516 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12517 lower_omp_sections (gsi_p
, ctx
);
12519 case GIMPLE_OMP_SINGLE
:
12520 ctx
= maybe_lookup_ctx (stmt
);
12522 lower_omp_single (gsi_p
, ctx
);
12524 case GIMPLE_OMP_MASTER
:
12525 ctx
= maybe_lookup_ctx (stmt
);
12527 lower_omp_master (gsi_p
, ctx
);
12529 case GIMPLE_OMP_TASKGROUP
:
12530 ctx
= maybe_lookup_ctx (stmt
);
12532 lower_omp_taskgroup (gsi_p
, ctx
);
12534 case GIMPLE_OMP_ORDERED
:
12535 ctx
= maybe_lookup_ctx (stmt
);
12537 lower_omp_ordered (gsi_p
, ctx
);
12539 case GIMPLE_OMP_SCAN
:
12540 ctx
= maybe_lookup_ctx (stmt
);
12542 lower_omp_scan (gsi_p
, ctx
);
12544 case GIMPLE_OMP_CRITICAL
:
12545 ctx
= maybe_lookup_ctx (stmt
);
12547 lower_omp_critical (gsi_p
, ctx
);
12549 case GIMPLE_OMP_ATOMIC_LOAD
:
12550 if ((ctx
|| task_shared_vars
)
12551 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12552 as_a
<gomp_atomic_load
*> (stmt
)),
12553 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
12554 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12556 case GIMPLE_OMP_TARGET
:
12557 ctx
= maybe_lookup_ctx (stmt
);
12559 lower_omp_target (gsi_p
, ctx
);
12561 case GIMPLE_OMP_TEAMS
:
12562 ctx
= maybe_lookup_ctx (stmt
);
12564 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
12565 lower_omp_taskreg (gsi_p
, ctx
);
12567 lower_omp_teams (gsi_p
, ctx
);
12569 case GIMPLE_OMP_GRID_BODY
:
12570 ctx
= maybe_lookup_ctx (stmt
);
12572 lower_omp_grid_body (gsi_p
, ctx
);
12576 call_stmt
= as_a
<gcall
*> (stmt
);
12577 fndecl
= gimple_call_fndecl (call_stmt
);
12579 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
12580 switch (DECL_FUNCTION_CODE (fndecl
))
12582 case BUILT_IN_GOMP_BARRIER
:
12586 case BUILT_IN_GOMP_CANCEL
:
12587 case BUILT_IN_GOMP_CANCELLATION_POINT
:
12590 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
12591 cctx
= cctx
->outer
;
12592 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
12593 if (!cctx
->cancellable
)
12595 if (DECL_FUNCTION_CODE (fndecl
)
12596 == BUILT_IN_GOMP_CANCELLATION_POINT
)
12598 stmt
= gimple_build_nop ();
12599 gsi_replace (gsi_p
, stmt
, false);
12603 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
12605 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
12606 gimple_call_set_fndecl (call_stmt
, fndecl
);
12607 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
12610 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
12611 gimple_call_set_lhs (call_stmt
, lhs
);
12612 tree fallthru_label
;
12613 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
12615 g
= gimple_build_label (fallthru_label
);
12616 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12617 g
= gimple_build_cond (NE_EXPR
, lhs
,
12618 fold_convert (TREE_TYPE (lhs
),
12619 boolean_false_node
),
12620 cctx
->cancel_label
, fallthru_label
);
12621 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12628 case GIMPLE_ASSIGN
:
12629 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
12631 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
12632 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
12633 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
12634 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
12635 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
12636 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
12637 && (gimple_omp_target_kind (up
->stmt
)
12638 == GF_OMP_TARGET_KIND_DATA
)))
12640 else if (!up
->lastprivate_conditional_map
)
12642 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
12643 if (TREE_CODE (lhs
) == MEM_REF
12644 && DECL_P (TREE_OPERAND (lhs
, 0))
12645 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
12646 0))) == REFERENCE_TYPE
)
12647 lhs
= TREE_OPERAND (lhs
, 0);
12649 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
12652 if (up
->combined_into_simd_safelen1
)
12655 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
12658 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
12659 clauses
= gimple_omp_for_clauses (up
->stmt
);
12661 clauses
= gimple_omp_sections_clauses (up
->stmt
);
12662 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
12663 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
12664 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
12665 OMP_CLAUSE__CONDTEMP_
);
12666 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
12667 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
12668 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12675 if ((ctx
|| task_shared_vars
)
12676 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
12679 /* Just remove clobbers, this should happen only if we have
12680 "privatized" local addressable variables in SIMD regions,
12681 the clobber isn't needed in that case and gimplifying address
12682 of the ARRAY_REF into a pointer and creating MEM_REF based
12683 clobber would create worse code than we get with the clobber
12685 if (gimple_clobber_p (stmt
))
12687 gsi_replace (gsi_p
, gimple_build_nop (), true);
12690 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12697 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
12699 location_t saved_location
= input_location
;
12700 gimple_stmt_iterator gsi
;
12701 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
12702 lower_omp_1 (&gsi
, ctx
);
12703 /* During gimplification, we haven't folded statments inside offloading
12704 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12705 if (target_nesting_level
|| taskreg_nesting_level
)
12706 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
12708 input_location
= saved_location
;
12711 /* Main entry point. */
12713 static unsigned int
12714 execute_lower_omp (void)
12720 /* This pass always runs, to provide PROP_gimple_lomp.
12721 But often, there is nothing to do. */
12722 if (flag_openacc
== 0 && flag_openmp
== 0
12723 && flag_openmp_simd
== 0)
12726 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
12727 delete_omp_context
);
12729 body
= gimple_body (current_function_decl
);
12731 if (hsa_gen_requested_p ())
12732 omp_grid_gridify_all_targets (&body
);
12734 scan_omp (&body
, NULL
);
12735 gcc_assert (taskreg_nesting_level
== 0);
12736 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
12737 finish_taskreg_scan (ctx
);
12738 taskreg_contexts
.release ();
12740 if (all_contexts
->root
)
12742 if (task_shared_vars
)
12743 push_gimplify_context ();
12744 lower_omp (&body
, NULL
);
12745 if (task_shared_vars
)
12746 pop_gimplify_context (NULL
);
12751 splay_tree_delete (all_contexts
);
12752 all_contexts
= NULL
;
12754 BITMAP_FREE (task_shared_vars
);
12755 BITMAP_FREE (global_nonaddressable_vars
);
12757 /* If current function is a method, remove artificial dummy VAR_DECL created
12758 for non-static data member privatization, they aren't needed for
12759 debuginfo nor anything else, have been already replaced everywhere in the
12760 IL and cause problems with LTO. */
12761 if (DECL_ARGUMENTS (current_function_decl
)
12762 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
12763 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
12765 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
12771 const pass_data pass_data_lower_omp
=
12773 GIMPLE_PASS
, /* type */
12774 "omplower", /* name */
12775 OPTGROUP_OMP
, /* optinfo_flags */
12776 TV_NONE
, /* tv_id */
12777 PROP_gimple_any
, /* properties_required */
12778 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
12779 0, /* properties_destroyed */
12780 0, /* todo_flags_start */
12781 0, /* todo_flags_finish */
12784 class pass_lower_omp
: public gimple_opt_pass
12787 pass_lower_omp (gcc::context
*ctxt
)
12788 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
12791 /* opt_pass methods: */
12792 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
12794 }; // class pass_lower_omp
12796 } // anon namespace
12799 make_pass_lower_omp (gcc::context
*ctxt
)
12801 return new pass_lower_omp (ctxt
);
12804 /* The following is a utility to diagnose structured block violations.
12805 It is not part of the "omplower" pass, as that's invoked too late. It
12806 should be invoked by the respective front ends after gimplification. */
12808 static splay_tree all_labels
;
12810 /* Check for mismatched contexts and generate an error if needed. Return
12811 true if an error is detected. */
12814 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
12815 gimple
*branch_ctx
, gimple
*label_ctx
)
12817 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
12818 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
12820 if (label_ctx
== branch_ctx
)
12823 const char* kind
= NULL
;
12827 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
12828 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
12830 gcc_checking_assert (kind
== NULL
);
12836 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
12840 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
12841 so we could traverse it and issue a correct "exit" or "enter" error
12842 message upon a structured block violation.
12844 We built the context by building a list with tree_cons'ing, but there is
12845 no easy counterpart in gimple tuples. It seems like far too much work
12846 for issuing exit/enter error messages. If someone really misses the
12847 distinct error message... patches welcome. */
12850 /* Try to avoid confusing the user by producing and error message
12851 with correct "exit" or "enter" verbiage. We prefer "exit"
12852 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
12853 if (branch_ctx
== NULL
)
12859 if (TREE_VALUE (label_ctx
) == branch_ctx
)
12864 label_ctx
= TREE_CHAIN (label_ctx
);
12869 error ("invalid exit from %s structured block", kind
);
12871 error ("invalid entry to %s structured block", kind
);
12874 /* If it's obvious we have an invalid entry, be specific about the error. */
12875 if (branch_ctx
== NULL
)
12876 error ("invalid entry to %s structured block", kind
);
12879 /* Otherwise, be vague and lazy, but efficient. */
12880 error ("invalid branch to/from %s structured block", kind
);
12883 gsi_replace (gsi_p
, gimple_build_nop (), false);
12887 /* Pass 1: Create a minimal tree of structured blocks, and record
12888 where each label is found. */
12891 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
12892 struct walk_stmt_info
*wi
)
12894 gimple
*context
= (gimple
*) wi
->info
;
12895 gimple
*inner_context
;
12896 gimple
*stmt
= gsi_stmt (*gsi_p
);
12898 *handled_ops_p
= true;
12900 switch (gimple_code (stmt
))
12904 case GIMPLE_OMP_PARALLEL
:
12905 case GIMPLE_OMP_TASK
:
12906 case GIMPLE_OMP_SECTIONS
:
12907 case GIMPLE_OMP_SINGLE
:
12908 case GIMPLE_OMP_SECTION
:
12909 case GIMPLE_OMP_MASTER
:
12910 case GIMPLE_OMP_ORDERED
:
12911 case GIMPLE_OMP_SCAN
:
12912 case GIMPLE_OMP_CRITICAL
:
12913 case GIMPLE_OMP_TARGET
:
12914 case GIMPLE_OMP_TEAMS
:
12915 case GIMPLE_OMP_TASKGROUP
:
12916 /* The minimal context here is just the current OMP construct. */
12917 inner_context
= stmt
;
12918 wi
->info
= inner_context
;
12919 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
12920 wi
->info
= context
;
12923 case GIMPLE_OMP_FOR
:
12924 inner_context
= stmt
;
12925 wi
->info
= inner_context
;
12926 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12928 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
12929 diagnose_sb_1
, NULL
, wi
);
12930 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
12931 wi
->info
= context
;
12935 splay_tree_insert (all_labels
,
12936 (splay_tree_key
) gimple_label_label (
12937 as_a
<glabel
*> (stmt
)),
12938 (splay_tree_value
) context
);
12948 /* Pass 2: Check each branch and see if its context differs from that of
12949 the destination label's context. */
12952 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
12953 struct walk_stmt_info
*wi
)
12955 gimple
*context
= (gimple
*) wi
->info
;
12957 gimple
*stmt
= gsi_stmt (*gsi_p
);
12959 *handled_ops_p
= true;
12961 switch (gimple_code (stmt
))
12965 case GIMPLE_OMP_PARALLEL
:
12966 case GIMPLE_OMP_TASK
:
12967 case GIMPLE_OMP_SECTIONS
:
12968 case GIMPLE_OMP_SINGLE
:
12969 case GIMPLE_OMP_SECTION
:
12970 case GIMPLE_OMP_MASTER
:
12971 case GIMPLE_OMP_ORDERED
:
12972 case GIMPLE_OMP_SCAN
:
12973 case GIMPLE_OMP_CRITICAL
:
12974 case GIMPLE_OMP_TARGET
:
12975 case GIMPLE_OMP_TEAMS
:
12976 case GIMPLE_OMP_TASKGROUP
:
12978 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
12979 wi
->info
= context
;
12982 case GIMPLE_OMP_FOR
:
12984 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12986 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
12987 diagnose_sb_2
, NULL
, wi
);
12988 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
12989 wi
->info
= context
;
12994 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
12995 tree lab
= gimple_cond_true_label (cond_stmt
);
12998 n
= splay_tree_lookup (all_labels
,
12999 (splay_tree_key
) lab
);
13000 diagnose_sb_0 (gsi_p
, context
,
13001 n
? (gimple
*) n
->value
: NULL
);
13003 lab
= gimple_cond_false_label (cond_stmt
);
13006 n
= splay_tree_lookup (all_labels
,
13007 (splay_tree_key
) lab
);
13008 diagnose_sb_0 (gsi_p
, context
,
13009 n
? (gimple
*) n
->value
: NULL
);
13016 tree lab
= gimple_goto_dest (stmt
);
13017 if (TREE_CODE (lab
) != LABEL_DECL
)
13020 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13021 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
13025 case GIMPLE_SWITCH
:
13027 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
13029 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
13031 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
13032 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13033 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
13039 case GIMPLE_RETURN
:
13040 diagnose_sb_0 (gsi_p
, context
, NULL
);
13050 static unsigned int
13051 diagnose_omp_structured_block_errors (void)
13053 struct walk_stmt_info wi
;
13054 gimple_seq body
= gimple_body (current_function_decl
);
13056 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
13058 memset (&wi
, 0, sizeof (wi
));
13059 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
13061 memset (&wi
, 0, sizeof (wi
));
13062 wi
.want_locations
= true;
13063 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
13065 gimple_set_body (current_function_decl
, body
);
13067 splay_tree_delete (all_labels
);
13075 const pass_data pass_data_diagnose_omp_blocks
=
13077 GIMPLE_PASS
, /* type */
13078 "*diagnose_omp_blocks", /* name */
13079 OPTGROUP_OMP
, /* optinfo_flags */
13080 TV_NONE
, /* tv_id */
13081 PROP_gimple_any
, /* properties_required */
13082 0, /* properties_provided */
13083 0, /* properties_destroyed */
13084 0, /* todo_flags_start */
13085 0, /* todo_flags_finish */
13088 class pass_diagnose_omp_blocks
: public gimple_opt_pass
13091 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13092 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
13095 /* opt_pass methods: */
13096 virtual bool gate (function
*)
13098 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
13100 virtual unsigned int execute (function
*)
13102 return diagnose_omp_structured_block_errors ();
13105 }; // class pass_diagnose_omp_blocks
13107 } // anon namespace
13110 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13112 return new pass_diagnose_omp_blocks (ctxt
);
13116 #include "gt-omp-low.h"