1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context
*outer
;
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map
;
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec
<tree
> task_reductions
;
121 /* A hash map from the reduction clauses to the registered array
123 hash_map
<tree
, unsigned> *task_reduction_map
;
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
129 /* A tree_list of the reduction clauses in this context. This is
130 only used for checking the consistency of OpenACC reduction
131 clauses in scan_omp_for and is not guaranteed to contain a valid
132 value outside of this function. */
133 tree local_reduction_clauses
;
135 /* A tree_list of the reduction clauses in outer contexts. This is
136 only used for checking the consistency of OpenACC reduction
137 clauses in scan_omp_for and is not guaranteed to contain a valid
138 value outside of this function. */
139 tree outer_reduction_clauses
;
141 /* Nesting depth of this context. Used to beautify error messages re
142 invalid gotos. The outermost ctx is depth 1, with depth 0 being
143 reserved for the main body of the function. */
146 /* True if this parallel directive is nested within another. */
149 /* True if this construct can be cancelled. */
152 /* True if lower_omp_1 should look up lastprivate conditional in parent
154 bool combined_into_simd_safelen1
;
156 /* True if there is nested scan context with inclusive clause. */
159 /* True if there is nested scan context with exclusive clause. */
162 /* True in the second simd loop of for simd with inscan reductions. */
163 bool for_simd_scan_phase
;
165 /* True if there is order(concurrent) clause on the construct. */
166 bool order_concurrent
;
168 /* True if there is bind clause on the construct (i.e. a loop construct). */
172 static splay_tree all_contexts
;
173 static int taskreg_nesting_level
;
174 static int target_nesting_level
;
175 static bitmap task_shared_vars
;
176 static bitmap global_nonaddressable_vars
;
177 static vec
<omp_context
*> taskreg_contexts
;
179 static void scan_omp (gimple_seq
*, omp_context
*);
180 static tree
scan_omp_1_op (tree
*, int *, void *);
182 #define WALK_SUBSTMTS \
186 case GIMPLE_EH_FILTER: \
187 case GIMPLE_TRANSACTION: \
188 /* The sub-statements for these should be walked. */ \
189 *handled_ops_p = false; \
192 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
196 is_oacc_parallel_or_serial (omp_context
*ctx
)
198 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
199 return ((outer_type
== GIMPLE_OMP_TARGET
)
200 && ((gimple_omp_target_kind (ctx
->stmt
)
201 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
202 || (gimple_omp_target_kind (ctx
->stmt
)
203 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
206 /* Return true if CTX corresponds to an oacc kernels region. */
209 is_oacc_kernels (omp_context
*ctx
)
211 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
212 return ((outer_type
== GIMPLE_OMP_TARGET
)
213 && (gimple_omp_target_kind (ctx
->stmt
)
214 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
217 /* If DECL is the artificial dummy VAR_DECL created for non-static
218 data member privatization, return the underlying "this" parameter,
219 otherwise return NULL. */
222 omp_member_access_dummy_var (tree decl
)
225 || !DECL_ARTIFICIAL (decl
)
226 || !DECL_IGNORED_P (decl
)
227 || !DECL_HAS_VALUE_EXPR_P (decl
)
228 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
231 tree v
= DECL_VALUE_EXPR (decl
);
232 if (TREE_CODE (v
) != COMPONENT_REF
)
236 switch (TREE_CODE (v
))
242 case POINTER_PLUS_EXPR
:
243 v
= TREE_OPERAND (v
, 0);
246 if (DECL_CONTEXT (v
) == current_function_decl
247 && DECL_ARTIFICIAL (v
)
248 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
256 /* Helper for unshare_and_remap, called through walk_tree. */
259 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
261 tree
*pair
= (tree
*) data
;
264 *tp
= unshare_expr (pair
[1]);
267 else if (IS_TYPE_OR_DECL_P (*tp
))
272 /* Return unshare_expr (X) with all occurrences of FROM
276 unshare_and_remap (tree x
, tree from
, tree to
)
278 tree pair
[2] = { from
, to
};
279 x
= unshare_expr (x
);
280 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
284 /* Convenience function for calling scan_omp_1_op on tree operands. */
287 scan_omp_op (tree
*tp
, omp_context
*ctx
)
289 struct walk_stmt_info wi
;
291 memset (&wi
, 0, sizeof (wi
));
293 wi
.want_locations
= true;
295 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
298 static void lower_omp (gimple_seq
*, omp_context
*);
299 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
300 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
302 /* Return true if CTX is for an omp parallel. */
305 is_parallel_ctx (omp_context
*ctx
)
307 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
311 /* Return true if CTX is for an omp task. */
314 is_task_ctx (omp_context
*ctx
)
316 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
320 /* Return true if CTX is for an omp taskloop. */
323 is_taskloop_ctx (omp_context
*ctx
)
325 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
326 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
330 /* Return true if CTX is for a host omp teams. */
333 is_host_teams_ctx (omp_context
*ctx
)
335 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
336 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
339 /* Return true if CTX is for an omp parallel or omp task or host omp teams
340 (the last one is strictly not a task region in OpenMP speak, but we
341 need to treat it similarly). */
344 is_taskreg_ctx (omp_context
*ctx
)
346 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
349 /* Return true if EXPR is variable sized. */
352 is_variable_sized (const_tree expr
)
354 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
357 /* Lookup variables. The "maybe" form
358 allows for the variable form to not have been entered, otherwise we
359 assert that the variable must have been entered. */
362 lookup_decl (tree var
, omp_context
*ctx
)
364 tree
*n
= ctx
->cb
.decl_map
->get (var
);
369 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
371 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
372 return n
? *n
: NULL_TREE
;
376 lookup_field (tree var
, omp_context
*ctx
)
379 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
380 return (tree
) n
->value
;
384 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
387 n
= splay_tree_lookup (ctx
->sfield_map
388 ? ctx
->sfield_map
: ctx
->field_map
, key
);
389 return (tree
) n
->value
;
393 lookup_sfield (tree var
, omp_context
*ctx
)
395 return lookup_sfield ((splay_tree_key
) var
, ctx
);
399 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
402 n
= splay_tree_lookup (ctx
->field_map
, key
);
403 return n
? (tree
) n
->value
: NULL_TREE
;
407 maybe_lookup_field (tree var
, omp_context
*ctx
)
409 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
412 /* Return true if DECL should be copied by pointer. SHARED_CTX is
413 the parallel context if DECL is to be shared. */
416 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
418 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
419 || TYPE_ATOMIC (TREE_TYPE (decl
)))
422 /* We can only use copy-in/copy-out semantics for shared variables
423 when we know the value is not accessible from an outer scope. */
426 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
428 /* ??? Trivially accessible from anywhere. But why would we even
429 be passing an address in this case? Should we simply assert
430 this to be false, or should we have a cleanup pass that removes
431 these from the list of mappings? */
432 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
435 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
436 without analyzing the expression whether or not its location
437 is accessible to anyone else. In the case of nested parallel
438 regions it certainly may be. */
439 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
442 /* Do not use copy-in/copy-out for variables that have their
444 if (is_global_var (decl
))
446 /* For file scope vars, track whether we've seen them as
447 non-addressable initially and in that case, keep the same
448 answer for the duration of the pass, even when they are made
449 addressable later on e.g. through reduction expansion. Global
450 variables which weren't addressable before the pass will not
451 have their privatized copies address taken. See PR91216. */
452 if (!TREE_ADDRESSABLE (decl
))
454 if (!global_nonaddressable_vars
)
455 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
456 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
458 else if (!global_nonaddressable_vars
459 || !bitmap_bit_p (global_nonaddressable_vars
,
463 else if (TREE_ADDRESSABLE (decl
))
466 /* lower_send_shared_vars only uses copy-in, but not copy-out
468 if (TREE_READONLY (decl
)
469 || ((TREE_CODE (decl
) == RESULT_DECL
470 || TREE_CODE (decl
) == PARM_DECL
)
471 && DECL_BY_REFERENCE (decl
)))
474 /* Disallow copy-in/out in nested parallel if
475 decl is shared in outer parallel, otherwise
476 each thread could store the shared variable
477 in its own copy-in location, making the
478 variable no longer really shared. */
479 if (shared_ctx
->is_nested
)
483 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
484 if ((is_taskreg_ctx (up
)
485 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
486 && is_gimple_omp_offloaded (up
->stmt
)))
487 && maybe_lookup_decl (decl
, up
))
494 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
496 for (c
= gimple_omp_target_clauses (up
->stmt
);
497 c
; c
= OMP_CLAUSE_CHAIN (c
))
498 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
499 && OMP_CLAUSE_DECL (c
) == decl
)
503 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
504 c
; c
= OMP_CLAUSE_CHAIN (c
))
505 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
506 && OMP_CLAUSE_DECL (c
) == decl
)
510 goto maybe_mark_addressable_and_ret
;
514 /* For tasks avoid using copy-in/out. As tasks can be
515 deferred or executed in different thread, when GOMP_task
516 returns, the task hasn't necessarily terminated. */
517 if (is_task_ctx (shared_ctx
))
520 maybe_mark_addressable_and_ret
:
521 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
522 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
524 /* Taking address of OUTER in lower_send_shared_vars
525 might need regimplification of everything that uses the
527 if (!task_shared_vars
)
528 task_shared_vars
= BITMAP_ALLOC (NULL
);
529 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
530 TREE_ADDRESSABLE (outer
) = 1;
539 /* Construct a new automatic decl similar to VAR. */
542 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
544 tree copy
= copy_var_decl (var
, name
, type
);
546 DECL_CONTEXT (copy
) = current_function_decl
;
547 DECL_CHAIN (copy
) = ctx
->block_vars
;
548 /* If VAR is listed in task_shared_vars, it means it wasn't
549 originally addressable and is just because task needs to take
550 it's address. But we don't need to take address of privatizations
552 if (TREE_ADDRESSABLE (var
)
553 && ((task_shared_vars
554 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
555 || (global_nonaddressable_vars
556 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
557 TREE_ADDRESSABLE (copy
) = 0;
558 ctx
->block_vars
= copy
;
564 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
566 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
569 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
572 omp_build_component_ref (tree obj
, tree field
)
574 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
575 if (TREE_THIS_VOLATILE (field
))
576 TREE_THIS_VOLATILE (ret
) |= 1;
577 if (TREE_READONLY (field
))
578 TREE_READONLY (ret
) |= 1;
582 /* Build tree nodes to access the field for VAR on the receiver side. */
585 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
587 tree x
, field
= lookup_field (var
, ctx
);
589 /* If the receiver record type was remapped in the child function,
590 remap the field into the new record type. */
591 x
= maybe_lookup_field (field
, ctx
);
595 x
= build_simple_mem_ref (ctx
->receiver_decl
);
596 TREE_THIS_NOTRAP (x
) = 1;
597 x
= omp_build_component_ref (x
, field
);
600 x
= build_simple_mem_ref (x
);
601 TREE_THIS_NOTRAP (x
) = 1;
607 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
608 of a parallel, this is a component reference; for workshare constructs
609 this is some variable. */
612 build_outer_var_ref (tree var
, omp_context
*ctx
,
613 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
616 omp_context
*outer
= ctx
->outer
;
617 while (outer
&& gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
618 outer
= outer
->outer
;
620 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
622 else if (is_variable_sized (var
))
624 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
625 x
= build_outer_var_ref (x
, ctx
, code
);
626 x
= build_simple_mem_ref (x
);
628 else if (is_taskreg_ctx (ctx
))
630 bool by_ref
= use_pointer_for_field (var
, NULL
);
631 x
= build_receiver_ref (var
, by_ref
, ctx
);
633 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
634 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
636 || (code
== OMP_CLAUSE_PRIVATE
637 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
638 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
639 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
641 /* #pragma omp simd isn't a worksharing construct, and can reference
642 even private vars in its linear etc. clauses.
643 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
644 to private vars in all worksharing constructs. */
646 if (outer
&& is_taskreg_ctx (outer
))
647 x
= lookup_decl (var
, outer
);
649 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
653 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
657 = splay_tree_lookup (outer
->field_map
,
658 (splay_tree_key
) &DECL_UID (var
));
661 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
664 x
= lookup_decl (var
, outer
);
668 tree field
= (tree
) n
->value
;
669 /* If the receiver record type was remapped in the child function,
670 remap the field into the new record type. */
671 x
= maybe_lookup_field (field
, outer
);
675 x
= build_simple_mem_ref (outer
->receiver_decl
);
676 x
= omp_build_component_ref (x
, field
);
677 if (use_pointer_for_field (var
, outer
))
678 x
= build_simple_mem_ref (x
);
682 x
= lookup_decl (var
, outer
);
683 else if (omp_is_reference (var
))
684 /* This can happen with orphaned constructs. If var is reference, it is
685 possible it is shared and as such valid. */
687 else if (omp_member_access_dummy_var (var
))
694 tree t
= omp_member_access_dummy_var (var
);
697 x
= DECL_VALUE_EXPR (var
);
698 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
700 x
= unshare_and_remap (x
, t
, o
);
702 x
= unshare_expr (x
);
706 if (omp_is_reference (var
))
707 x
= build_simple_mem_ref (x
);
712 /* Build tree nodes to access the field for VAR on the sender side. */
715 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
717 tree field
= lookup_sfield (key
, ctx
);
718 return omp_build_component_ref (ctx
->sender_decl
, field
);
722 build_sender_ref (tree var
, omp_context
*ctx
)
724 return build_sender_ref ((splay_tree_key
) var
, ctx
);
727 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
728 BASE_POINTERS_RESTRICT, declare the field with restrict. */
731 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
733 tree field
, type
, sfield
= NULL_TREE
;
734 splay_tree_key key
= (splay_tree_key
) var
;
736 if ((mask
& 16) != 0)
738 key
= (splay_tree_key
) &DECL_NAME (var
);
739 gcc_checking_assert (key
!= (splay_tree_key
) var
);
743 key
= (splay_tree_key
) &DECL_UID (var
);
744 gcc_checking_assert (key
!= (splay_tree_key
) var
);
746 gcc_assert ((mask
& 1) == 0
747 || !splay_tree_lookup (ctx
->field_map
, key
));
748 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
749 || !splay_tree_lookup (ctx
->sfield_map
, key
));
750 gcc_assert ((mask
& 3) == 3
751 || !is_gimple_omp_oacc (ctx
->stmt
));
753 type
= TREE_TYPE (var
);
754 if ((mask
& 16) != 0)
755 type
= lang_hooks
.decls
.omp_array_data (var
, true);
757 /* Prevent redeclaring the var in the split-off function with a restrict
758 pointer type. Note that we only clear type itself, restrict qualifiers in
759 the pointed-to type will be ignored by points-to analysis. */
760 if (POINTER_TYPE_P (type
)
761 && TYPE_RESTRICT (type
))
762 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
766 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
767 type
= build_pointer_type (build_pointer_type (type
));
770 type
= build_pointer_type (type
);
771 else if ((mask
& 3) == 1 && omp_is_reference (var
))
772 type
= TREE_TYPE (type
);
774 field
= build_decl (DECL_SOURCE_LOCATION (var
),
775 FIELD_DECL
, DECL_NAME (var
), type
);
777 /* Remember what variable this field was created for. This does have a
778 side effect of making dwarf2out ignore this member, so for helpful
779 debugging we clear it later in delete_omp_context. */
780 DECL_ABSTRACT_ORIGIN (field
) = var
;
781 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
783 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
784 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
785 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
788 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
792 insert_field_into_struct (ctx
->record_type
, field
);
793 if (ctx
->srecord_type
)
795 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
796 FIELD_DECL
, DECL_NAME (var
), type
);
797 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
798 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
799 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
800 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
801 insert_field_into_struct (ctx
->srecord_type
, sfield
);
806 if (ctx
->srecord_type
== NULL_TREE
)
810 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
811 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
812 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
814 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
815 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
816 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
817 insert_field_into_struct (ctx
->srecord_type
, sfield
);
818 splay_tree_insert (ctx
->sfield_map
,
819 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
820 (splay_tree_value
) sfield
);
824 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
825 : ctx
->srecord_type
, field
);
829 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
830 if ((mask
& 2) && ctx
->sfield_map
)
831 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
835 install_var_local (tree var
, omp_context
*ctx
)
837 tree new_var
= omp_copy_decl_1 (var
, ctx
);
838 insert_decl_map (&ctx
->cb
, var
, new_var
);
842 /* Adjust the replacement for DECL in CTX for the new context. This means
843 copying the DECL_VALUE_EXPR, and fixing up the type. */
846 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
850 new_decl
= lookup_decl (decl
, ctx
);
852 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
854 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
855 && DECL_HAS_VALUE_EXPR_P (decl
))
857 tree ve
= DECL_VALUE_EXPR (decl
);
858 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
859 SET_DECL_VALUE_EXPR (new_decl
, ve
);
860 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
863 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
865 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
866 if (size
== error_mark_node
)
867 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
868 DECL_SIZE (new_decl
) = size
;
870 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
871 if (size
== error_mark_node
)
872 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
873 DECL_SIZE_UNIT (new_decl
) = size
;
877 /* The callback for remap_decl. Search all containing contexts for a
878 mapping of the variable; this avoids having to duplicate the splay
879 tree ahead of time. We know a mapping doesn't already exist in the
880 given context. Create new mappings to implement default semantics. */
883 omp_copy_decl (tree var
, copy_body_data
*cb
)
885 omp_context
*ctx
= (omp_context
*) cb
;
888 if (TREE_CODE (var
) == LABEL_DECL
)
890 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
892 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
893 DECL_CONTEXT (new_var
) = current_function_decl
;
894 insert_decl_map (&ctx
->cb
, var
, new_var
);
898 while (!is_taskreg_ctx (ctx
))
903 new_var
= maybe_lookup_decl (var
, ctx
);
908 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
911 return error_mark_node
;
914 /* Create a new context, with OUTER_CTX being the surrounding context. */
917 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
919 omp_context
*ctx
= XCNEW (omp_context
);
921 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
922 (splay_tree_value
) ctx
);
927 ctx
->outer
= outer_ctx
;
928 ctx
->cb
= outer_ctx
->cb
;
929 ctx
->cb
.block
= NULL
;
930 ctx
->depth
= outer_ctx
->depth
+ 1;
934 ctx
->cb
.src_fn
= current_function_decl
;
935 ctx
->cb
.dst_fn
= current_function_decl
;
936 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
937 gcc_checking_assert (ctx
->cb
.src_node
);
938 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
939 ctx
->cb
.src_cfun
= cfun
;
940 ctx
->cb
.copy_decl
= omp_copy_decl
;
941 ctx
->cb
.eh_lp_nr
= 0;
942 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
943 ctx
->cb
.adjust_array_error_bounds
= true;
944 ctx
->cb
.dont_remap_vla_if_no_change
= true;
948 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
953 static gimple_seq
maybe_catch_exception (gimple_seq
);
955 /* Finalize task copyfn. */
958 finalize_task_copyfn (gomp_task
*task_stmt
)
960 struct function
*child_cfun
;
962 gimple_seq seq
= NULL
, new_seq
;
965 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
966 if (child_fn
== NULL_TREE
)
969 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
970 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
972 push_cfun (child_cfun
);
973 bind
= gimplify_body (child_fn
, false);
974 gimple_seq_add_stmt (&seq
, bind
);
975 new_seq
= maybe_catch_exception (seq
);
978 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
980 gimple_seq_add_stmt (&seq
, bind
);
982 gimple_set_body (child_fn
, seq
);
985 /* Inform the callgraph about the new function. */
986 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
987 node
->parallelized_function
= 1;
988 cgraph_node::add_new_function (child_fn
, false);
991 /* Destroy a omp_context data structures. Called through the splay tree
992 value delete callback. */
995 delete_omp_context (splay_tree_value value
)
997 omp_context
*ctx
= (omp_context
*) value
;
999 delete ctx
->cb
.decl_map
;
1002 splay_tree_delete (ctx
->field_map
);
1003 if (ctx
->sfield_map
)
1004 splay_tree_delete (ctx
->sfield_map
);
1006 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1007 it produces corrupt debug information. */
1008 if (ctx
->record_type
)
1011 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1012 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1014 if (ctx
->srecord_type
)
1017 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1018 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1021 if (is_task_ctx (ctx
))
1022 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
1024 if (ctx
->task_reduction_map
)
1026 ctx
->task_reductions
.release ();
1027 delete ctx
->task_reduction_map
;
1030 delete ctx
->lastprivate_conditional_map
;
1035 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1039 fixup_child_record_type (omp_context
*ctx
)
1041 tree f
, type
= ctx
->record_type
;
1043 if (!ctx
->receiver_decl
)
1045 /* ??? It isn't sufficient to just call remap_type here, because
1046 variably_modified_type_p doesn't work the way we expect for
1047 record types. Testing each field for whether it needs remapping
1048 and creating a new record by hand works, however. */
1049 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1050 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1054 tree name
, new_fields
= NULL
;
1056 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1057 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1058 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1059 TYPE_DECL
, name
, type
);
1060 TYPE_NAME (type
) = name
;
1062 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1064 tree new_f
= copy_node (f
);
1065 DECL_CONTEXT (new_f
) = type
;
1066 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1067 DECL_CHAIN (new_f
) = new_fields
;
1068 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1069 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1071 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1075 /* Arrange to be able to look up the receiver field
1076 given the sender field. */
1077 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1078 (splay_tree_value
) new_f
);
1080 TYPE_FIELDS (type
) = nreverse (new_fields
);
1084 /* In a target region we never modify any of the pointers in *.omp_data_i,
1085 so attempt to help the optimizers. */
1086 if (is_gimple_omp_offloaded (ctx
->stmt
))
1087 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1089 TREE_TYPE (ctx
->receiver_decl
)
1090 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1093 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1094 specified by CLAUSES. */
1097 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1100 bool scan_array_reductions
= false;
1102 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1106 switch (OMP_CLAUSE_CODE (c
))
1108 case OMP_CLAUSE_PRIVATE
:
1109 decl
= OMP_CLAUSE_DECL (c
);
1110 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1112 else if (!is_variable_sized (decl
))
1113 install_var_local (decl
, ctx
);
1116 case OMP_CLAUSE_SHARED
:
1117 decl
= OMP_CLAUSE_DECL (c
);
1118 /* Ignore shared directives in teams construct inside of
1119 target construct. */
1120 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1121 && !is_host_teams_ctx (ctx
))
1123 /* Global variables don't need to be copied,
1124 the receiver side will use them directly. */
1125 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1126 if (is_global_var (odecl
))
1128 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1131 gcc_assert (is_taskreg_ctx (ctx
));
1132 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1133 || !is_variable_sized (decl
));
1134 /* Global variables don't need to be copied,
1135 the receiver side will use them directly. */
1136 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1138 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1140 use_pointer_for_field (decl
, ctx
);
1143 by_ref
= use_pointer_for_field (decl
, NULL
);
1144 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1145 || TREE_ADDRESSABLE (decl
)
1147 || omp_is_reference (decl
))
1149 by_ref
= use_pointer_for_field (decl
, ctx
);
1150 install_var_field (decl
, by_ref
, 3, ctx
);
1151 install_var_local (decl
, ctx
);
1154 /* We don't need to copy const scalar vars back. */
1155 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1158 case OMP_CLAUSE_REDUCTION
:
1159 if (is_oacc_parallel_or_serial (ctx
) || is_oacc_kernels (ctx
))
1160 ctx
->local_reduction_clauses
1161 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1164 case OMP_CLAUSE_IN_REDUCTION
:
1165 decl
= OMP_CLAUSE_DECL (c
);
1166 if (TREE_CODE (decl
) == MEM_REF
)
1168 tree t
= TREE_OPERAND (decl
, 0);
1169 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1170 t
= TREE_OPERAND (t
, 0);
1171 if (TREE_CODE (t
) == INDIRECT_REF
1172 || TREE_CODE (t
) == ADDR_EXPR
)
1173 t
= TREE_OPERAND (t
, 0);
1174 install_var_local (t
, ctx
);
1175 if (is_taskreg_ctx (ctx
)
1176 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1177 || (is_task_ctx (ctx
)
1178 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1179 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1180 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1181 == POINTER_TYPE
)))))
1182 && !is_variable_sized (t
)
1183 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1184 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1185 && !is_task_ctx (ctx
))))
1187 by_ref
= use_pointer_for_field (t
, NULL
);
1188 if (is_task_ctx (ctx
)
1189 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1190 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1192 install_var_field (t
, false, 1, ctx
);
1193 install_var_field (t
, by_ref
, 2, ctx
);
1196 install_var_field (t
, by_ref
, 3, ctx
);
1200 if (is_task_ctx (ctx
)
1201 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1202 && OMP_CLAUSE_REDUCTION_TASK (c
)
1203 && is_parallel_ctx (ctx
)))
1205 /* Global variables don't need to be copied,
1206 the receiver side will use them directly. */
1207 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1209 by_ref
= use_pointer_for_field (decl
, ctx
);
1210 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1211 install_var_field (decl
, by_ref
, 3, ctx
);
1213 install_var_local (decl
, ctx
);
1216 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1217 && OMP_CLAUSE_REDUCTION_TASK (c
))
1219 install_var_local (decl
, ctx
);
1224 case OMP_CLAUSE_LASTPRIVATE
:
1225 /* Let the corresponding firstprivate clause create
1227 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1231 case OMP_CLAUSE_FIRSTPRIVATE
:
1232 case OMP_CLAUSE_LINEAR
:
1233 decl
= OMP_CLAUSE_DECL (c
);
1235 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1236 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1237 && is_gimple_omp_offloaded (ctx
->stmt
))
1239 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1240 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1241 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1242 install_var_field (decl
, true, 3, ctx
);
1244 install_var_field (decl
, false, 3, ctx
);
1246 if (is_variable_sized (decl
))
1248 if (is_task_ctx (ctx
))
1249 install_var_field (decl
, false, 1, ctx
);
1252 else if (is_taskreg_ctx (ctx
))
1255 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1256 by_ref
= use_pointer_for_field (decl
, NULL
);
1258 if (is_task_ctx (ctx
)
1259 && (global
|| by_ref
|| omp_is_reference (decl
)))
1261 install_var_field (decl
, false, 1, ctx
);
1263 install_var_field (decl
, by_ref
, 2, ctx
);
1266 install_var_field (decl
, by_ref
, 3, ctx
);
1268 install_var_local (decl
, ctx
);
1271 case OMP_CLAUSE_USE_DEVICE_PTR
:
1272 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1273 decl
= OMP_CLAUSE_DECL (c
);
1275 /* Fortran array descriptors. */
1276 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1277 install_var_field (decl
, false, 19, ctx
);
1278 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1279 && !omp_is_reference (decl
)
1280 && !omp_is_allocatable_or_ptr (decl
))
1281 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1282 install_var_field (decl
, true, 11, ctx
);
1284 install_var_field (decl
, false, 11, ctx
);
1285 if (DECL_SIZE (decl
)
1286 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1288 tree decl2
= DECL_VALUE_EXPR (decl
);
1289 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1290 decl2
= TREE_OPERAND (decl2
, 0);
1291 gcc_assert (DECL_P (decl2
));
1292 install_var_local (decl2
, ctx
);
1294 install_var_local (decl
, ctx
);
1297 case OMP_CLAUSE_IS_DEVICE_PTR
:
1298 decl
= OMP_CLAUSE_DECL (c
);
1301 case OMP_CLAUSE__LOOPTEMP_
:
1302 case OMP_CLAUSE__REDUCTEMP_
:
1303 gcc_assert (is_taskreg_ctx (ctx
));
1304 decl
= OMP_CLAUSE_DECL (c
);
1305 install_var_field (decl
, false, 3, ctx
);
1306 install_var_local (decl
, ctx
);
1309 case OMP_CLAUSE_COPYPRIVATE
:
1310 case OMP_CLAUSE_COPYIN
:
1311 decl
= OMP_CLAUSE_DECL (c
);
1312 by_ref
= use_pointer_for_field (decl
, NULL
);
1313 install_var_field (decl
, by_ref
, 3, ctx
);
1316 case OMP_CLAUSE_FINAL
:
1318 case OMP_CLAUSE_NUM_THREADS
:
1319 case OMP_CLAUSE_NUM_TEAMS
:
1320 case OMP_CLAUSE_THREAD_LIMIT
:
1321 case OMP_CLAUSE_DEVICE
:
1322 case OMP_CLAUSE_SCHEDULE
:
1323 case OMP_CLAUSE_DIST_SCHEDULE
:
1324 case OMP_CLAUSE_DEPEND
:
1325 case OMP_CLAUSE_PRIORITY
:
1326 case OMP_CLAUSE_GRAINSIZE
:
1327 case OMP_CLAUSE_NUM_TASKS
:
1328 case OMP_CLAUSE_NUM_GANGS
:
1329 case OMP_CLAUSE_NUM_WORKERS
:
1330 case OMP_CLAUSE_VECTOR_LENGTH
:
1332 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1336 case OMP_CLAUSE_FROM
:
1337 case OMP_CLAUSE_MAP
:
1339 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1340 decl
= OMP_CLAUSE_DECL (c
);
1341 /* Global variables with "omp declare target" attribute
1342 don't need to be copied, the receiver side will use them
1343 directly. However, global variables with "omp declare target link"
1344 attribute need to be copied. Or when ALWAYS modifier is used. */
1345 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1347 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1348 && (OMP_CLAUSE_MAP_KIND (c
)
1349 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1350 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1351 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1352 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1353 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1354 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1355 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1356 && varpool_node::get_create (decl
)->offloadable
1357 && !lookup_attribute ("omp declare target link",
1358 DECL_ATTRIBUTES (decl
)))
1360 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1361 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1363 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1364 not offloaded; there is nothing to map for those. */
1365 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1366 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1367 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1370 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1371 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1372 || (OMP_CLAUSE_MAP_KIND (c
)
1373 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1375 if (TREE_CODE (decl
) == COMPONENT_REF
1376 || (TREE_CODE (decl
) == INDIRECT_REF
1377 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1378 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1379 == REFERENCE_TYPE
)))
1381 if (DECL_SIZE (decl
)
1382 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1384 tree decl2
= DECL_VALUE_EXPR (decl
);
1385 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1386 decl2
= TREE_OPERAND (decl2
, 0);
1387 gcc_assert (DECL_P (decl2
));
1388 install_var_local (decl2
, ctx
);
1390 install_var_local (decl
, ctx
);
1395 if (DECL_SIZE (decl
)
1396 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1398 tree decl2
= DECL_VALUE_EXPR (decl
);
1399 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1400 decl2
= TREE_OPERAND (decl2
, 0);
1401 gcc_assert (DECL_P (decl2
));
1402 install_var_field (decl2
, true, 3, ctx
);
1403 install_var_local (decl2
, ctx
);
1404 install_var_local (decl
, ctx
);
1408 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1409 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1410 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1411 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1412 install_var_field (decl
, true, 7, ctx
);
1414 install_var_field (decl
, true, 3, ctx
);
1415 if (is_gimple_omp_offloaded (ctx
->stmt
)
1416 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1417 install_var_local (decl
, ctx
);
1422 tree base
= get_base_address (decl
);
1423 tree nc
= OMP_CLAUSE_CHAIN (c
);
1426 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1427 && OMP_CLAUSE_DECL (nc
) == base
1428 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1429 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1431 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1432 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1438 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1439 decl
= OMP_CLAUSE_DECL (c
);
1441 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1442 (splay_tree_key
) decl
));
1444 = build_decl (OMP_CLAUSE_LOCATION (c
),
1445 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1446 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1447 insert_field_into_struct (ctx
->record_type
, field
);
1448 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1449 (splay_tree_value
) field
);
1454 case OMP_CLAUSE_ORDER
:
1455 ctx
->order_concurrent
= true;
1458 case OMP_CLAUSE_BIND
:
1462 case OMP_CLAUSE_NOWAIT
:
1463 case OMP_CLAUSE_ORDERED
:
1464 case OMP_CLAUSE_COLLAPSE
:
1465 case OMP_CLAUSE_UNTIED
:
1466 case OMP_CLAUSE_MERGEABLE
:
1467 case OMP_CLAUSE_PROC_BIND
:
1468 case OMP_CLAUSE_SAFELEN
:
1469 case OMP_CLAUSE_SIMDLEN
:
1470 case OMP_CLAUSE_THREADS
:
1471 case OMP_CLAUSE_SIMD
:
1472 case OMP_CLAUSE_NOGROUP
:
1473 case OMP_CLAUSE_DEFAULTMAP
:
1474 case OMP_CLAUSE_ASYNC
:
1475 case OMP_CLAUSE_WAIT
:
1476 case OMP_CLAUSE_GANG
:
1477 case OMP_CLAUSE_WORKER
:
1478 case OMP_CLAUSE_VECTOR
:
1479 case OMP_CLAUSE_INDEPENDENT
:
1480 case OMP_CLAUSE_AUTO
:
1481 case OMP_CLAUSE_SEQ
:
1482 case OMP_CLAUSE_TILE
:
1483 case OMP_CLAUSE__SIMT_
:
1484 case OMP_CLAUSE_DEFAULT
:
1485 case OMP_CLAUSE_NONTEMPORAL
:
1486 case OMP_CLAUSE_IF_PRESENT
:
1487 case OMP_CLAUSE_FINALIZE
:
1488 case OMP_CLAUSE_TASK_REDUCTION
:
1491 case OMP_CLAUSE_ALIGNED
:
1492 decl
= OMP_CLAUSE_DECL (c
);
1493 if (is_global_var (decl
)
1494 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1495 install_var_local (decl
, ctx
);
1498 case OMP_CLAUSE__CONDTEMP_
:
1499 decl
= OMP_CLAUSE_DECL (c
);
1500 if (is_parallel_ctx (ctx
))
1502 install_var_field (decl
, false, 3, ctx
);
1503 install_var_local (decl
, ctx
);
1505 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1506 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1507 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1508 install_var_local (decl
, ctx
);
1511 case OMP_CLAUSE__CACHE_
:
1517 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1519 switch (OMP_CLAUSE_CODE (c
))
1521 case OMP_CLAUSE_LASTPRIVATE
:
1522 /* Let the corresponding firstprivate clause create
1524 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1525 scan_array_reductions
= true;
1526 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1530 case OMP_CLAUSE_FIRSTPRIVATE
:
1531 case OMP_CLAUSE_PRIVATE
:
1532 case OMP_CLAUSE_LINEAR
:
1533 case OMP_CLAUSE_IS_DEVICE_PTR
:
1534 decl
= OMP_CLAUSE_DECL (c
);
1535 if (is_variable_sized (decl
))
1537 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1538 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1539 && is_gimple_omp_offloaded (ctx
->stmt
))
1541 tree decl2
= DECL_VALUE_EXPR (decl
);
1542 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1543 decl2
= TREE_OPERAND (decl2
, 0);
1544 gcc_assert (DECL_P (decl2
));
1545 install_var_local (decl2
, ctx
);
1546 fixup_remapped_decl (decl2
, ctx
, false);
1548 install_var_local (decl
, ctx
);
1550 fixup_remapped_decl (decl
, ctx
,
1551 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1552 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1553 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1554 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1555 scan_array_reductions
= true;
1558 case OMP_CLAUSE_REDUCTION
:
1559 case OMP_CLAUSE_IN_REDUCTION
:
1560 decl
= OMP_CLAUSE_DECL (c
);
1561 if (TREE_CODE (decl
) != MEM_REF
)
1563 if (is_variable_sized (decl
))
1564 install_var_local (decl
, ctx
);
1565 fixup_remapped_decl (decl
, ctx
, false);
1567 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1568 scan_array_reductions
= true;
1571 case OMP_CLAUSE_TASK_REDUCTION
:
1572 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1573 scan_array_reductions
= true;
1576 case OMP_CLAUSE_SHARED
:
1577 /* Ignore shared directives in teams construct inside of
1578 target construct. */
1579 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1580 && !is_host_teams_ctx (ctx
))
1582 decl
= OMP_CLAUSE_DECL (c
);
1583 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1585 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1587 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1590 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1591 install_var_field (decl
, by_ref
, 11, ctx
);
1594 fixup_remapped_decl (decl
, ctx
, false);
1597 case OMP_CLAUSE_MAP
:
1598 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1600 decl
= OMP_CLAUSE_DECL (c
);
1602 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1603 && (OMP_CLAUSE_MAP_KIND (c
)
1604 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1605 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1606 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1607 && varpool_node::get_create (decl
)->offloadable
)
1611 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1612 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1613 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1614 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1616 tree new_decl
= lookup_decl (decl
, ctx
);
1617 TREE_TYPE (new_decl
)
1618 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1620 else if (DECL_SIZE (decl
)
1621 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1623 tree decl2
= DECL_VALUE_EXPR (decl
);
1624 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1625 decl2
= TREE_OPERAND (decl2
, 0);
1626 gcc_assert (DECL_P (decl2
));
1627 fixup_remapped_decl (decl2
, ctx
, false);
1628 fixup_remapped_decl (decl
, ctx
, true);
1631 fixup_remapped_decl (decl
, ctx
, false);
1635 case OMP_CLAUSE_COPYPRIVATE
:
1636 case OMP_CLAUSE_COPYIN
:
1637 case OMP_CLAUSE_DEFAULT
:
1639 case OMP_CLAUSE_NUM_THREADS
:
1640 case OMP_CLAUSE_NUM_TEAMS
:
1641 case OMP_CLAUSE_THREAD_LIMIT
:
1642 case OMP_CLAUSE_DEVICE
:
1643 case OMP_CLAUSE_SCHEDULE
:
1644 case OMP_CLAUSE_DIST_SCHEDULE
:
1645 case OMP_CLAUSE_NOWAIT
:
1646 case OMP_CLAUSE_ORDERED
:
1647 case OMP_CLAUSE_COLLAPSE
:
1648 case OMP_CLAUSE_UNTIED
:
1649 case OMP_CLAUSE_FINAL
:
1650 case OMP_CLAUSE_MERGEABLE
:
1651 case OMP_CLAUSE_PROC_BIND
:
1652 case OMP_CLAUSE_SAFELEN
:
1653 case OMP_CLAUSE_SIMDLEN
:
1654 case OMP_CLAUSE_ALIGNED
:
1655 case OMP_CLAUSE_DEPEND
:
1656 case OMP_CLAUSE__LOOPTEMP_
:
1657 case OMP_CLAUSE__REDUCTEMP_
:
1659 case OMP_CLAUSE_FROM
:
1660 case OMP_CLAUSE_PRIORITY
:
1661 case OMP_CLAUSE_GRAINSIZE
:
1662 case OMP_CLAUSE_NUM_TASKS
:
1663 case OMP_CLAUSE_THREADS
:
1664 case OMP_CLAUSE_SIMD
:
1665 case OMP_CLAUSE_NOGROUP
:
1666 case OMP_CLAUSE_DEFAULTMAP
:
1667 case OMP_CLAUSE_ORDER
:
1668 case OMP_CLAUSE_BIND
:
1669 case OMP_CLAUSE_USE_DEVICE_PTR
:
1670 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1671 case OMP_CLAUSE_NONTEMPORAL
:
1672 case OMP_CLAUSE_ASYNC
:
1673 case OMP_CLAUSE_WAIT
:
1674 case OMP_CLAUSE_NUM_GANGS
:
1675 case OMP_CLAUSE_NUM_WORKERS
:
1676 case OMP_CLAUSE_VECTOR_LENGTH
:
1677 case OMP_CLAUSE_GANG
:
1678 case OMP_CLAUSE_WORKER
:
1679 case OMP_CLAUSE_VECTOR
:
1680 case OMP_CLAUSE_INDEPENDENT
:
1681 case OMP_CLAUSE_AUTO
:
1682 case OMP_CLAUSE_SEQ
:
1683 case OMP_CLAUSE_TILE
:
1684 case OMP_CLAUSE__SIMT_
:
1685 case OMP_CLAUSE_IF_PRESENT
:
1686 case OMP_CLAUSE_FINALIZE
:
1687 case OMP_CLAUSE__CONDTEMP_
:
1690 case OMP_CLAUSE__CACHE_
:
1696 gcc_checking_assert (!scan_array_reductions
1697 || !is_gimple_omp_oacc (ctx
->stmt
));
1698 if (scan_array_reductions
)
1700 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1701 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1702 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1703 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1704 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1706 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1707 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1709 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1710 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1711 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1712 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1713 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1714 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1718 /* Create a new name for omp child function. Returns an identifier. */
1721 create_omp_child_function_name (bool task_copy
)
1723 return clone_function_name_numbered (current_function_decl
,
1724 task_copy
? "_omp_cpyfn" : "_omp_fn");
1727 /* Return true if CTX may belong to offloaded code: either if current function
1728 is offloaded, or any enclosing context corresponds to a target region. */
1731 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1733 if (cgraph_node::get (current_function_decl
)->offloadable
)
1735 for (; ctx
; ctx
= ctx
->outer
)
1736 if (is_gimple_omp_offloaded (ctx
->stmt
))
1741 /* Build a decl for the omp child function. It'll not contain a body
1742 yet, just the bare decl. */
1745 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1747 tree decl
, type
, name
, t
;
1749 name
= create_omp_child_function_name (task_copy
);
1751 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1752 ptr_type_node
, NULL_TREE
);
1754 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1756 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1758 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1761 ctx
->cb
.dst_fn
= decl
;
1763 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1765 TREE_STATIC (decl
) = 1;
1766 TREE_USED (decl
) = 1;
1767 DECL_ARTIFICIAL (decl
) = 1;
1768 DECL_IGNORED_P (decl
) = 0;
1769 TREE_PUBLIC (decl
) = 0;
1770 DECL_UNINLINABLE (decl
) = 1;
1771 DECL_EXTERNAL (decl
) = 0;
1772 DECL_CONTEXT (decl
) = NULL_TREE
;
1773 DECL_INITIAL (decl
) = make_node (BLOCK
);
1774 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1775 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1776 /* Remove omp declare simd attribute from the new attributes. */
1777 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1779 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1782 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1783 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1784 *p
= TREE_CHAIN (*p
);
1787 tree chain
= TREE_CHAIN (*p
);
1788 *p
= copy_node (*p
);
1789 p
= &TREE_CHAIN (*p
);
1793 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1794 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1795 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1796 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1797 DECL_FUNCTION_VERSIONED (decl
)
1798 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1800 if (omp_maybe_offloaded_ctx (ctx
))
1802 cgraph_node::get_create (decl
)->offloadable
= 1;
1803 if (ENABLE_OFFLOADING
)
1804 g
->have_offload
= true;
1807 if (cgraph_node::get_create (decl
)->offloadable
1808 && !lookup_attribute ("omp declare target",
1809 DECL_ATTRIBUTES (current_function_decl
)))
1811 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1812 ? "omp target entrypoint"
1813 : "omp declare target");
1814 DECL_ATTRIBUTES (decl
)
1815 = tree_cons (get_identifier (target_attr
),
1816 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1819 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1820 RESULT_DECL
, NULL_TREE
, void_type_node
);
1821 DECL_ARTIFICIAL (t
) = 1;
1822 DECL_IGNORED_P (t
) = 1;
1823 DECL_CONTEXT (t
) = decl
;
1824 DECL_RESULT (decl
) = t
;
1826 tree data_name
= get_identifier (".omp_data_i");
1827 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1829 DECL_ARTIFICIAL (t
) = 1;
1830 DECL_NAMELESS (t
) = 1;
1831 DECL_ARG_TYPE (t
) = ptr_type_node
;
1832 DECL_CONTEXT (t
) = current_function_decl
;
1834 TREE_READONLY (t
) = 1;
1835 DECL_ARGUMENTS (decl
) = t
;
1837 ctx
->receiver_decl
= t
;
1840 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1841 PARM_DECL
, get_identifier (".omp_data_o"),
1843 DECL_ARTIFICIAL (t
) = 1;
1844 DECL_NAMELESS (t
) = 1;
1845 DECL_ARG_TYPE (t
) = ptr_type_node
;
1846 DECL_CONTEXT (t
) = current_function_decl
;
1848 TREE_ADDRESSABLE (t
) = 1;
1849 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1850 DECL_ARGUMENTS (decl
) = t
;
1853 /* Allocate memory for the function structure. The call to
1854 allocate_struct_function clobbers CFUN, so we need to restore
1856 push_struct_function (decl
);
1857 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1858 init_tree_ssa (cfun
);
1862 /* Callback for walk_gimple_seq. Check if combined parallel
1863 contains gimple_omp_for_combined_into_p OMP_FOR. */
1866 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1867 bool *handled_ops_p
,
1868 struct walk_stmt_info
*wi
)
1870 gimple
*stmt
= gsi_stmt (*gsi_p
);
1872 *handled_ops_p
= true;
1873 switch (gimple_code (stmt
))
1877 case GIMPLE_OMP_FOR
:
1878 if (gimple_omp_for_combined_into_p (stmt
)
1879 && gimple_omp_for_kind (stmt
)
1880 == *(const enum gf_mask
*) (wi
->info
))
1883 return integer_zero_node
;
1892 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1895 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1896 omp_context
*outer_ctx
)
1898 struct walk_stmt_info wi
;
1900 memset (&wi
, 0, sizeof (wi
));
1902 wi
.info
= (void *) &msk
;
1903 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1904 if (wi
.info
!= (void *) &msk
)
1906 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1907 struct omp_for_data fd
;
1908 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1909 /* We need two temporaries with fd.loop.v type (istart/iend)
1910 and then (fd.collapse - 1) temporaries with the same
1911 type for count2 ... countN-1 vars if not constant. */
1912 size_t count
= 2, i
;
1913 tree type
= fd
.iter_type
;
1915 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1917 count
+= fd
.collapse
- 1;
1918 /* If there are lastprivate clauses on the inner
1919 GIMPLE_OMP_FOR, add one more temporaries for the total number
1920 of iterations (product of count1 ... countN-1). */
1921 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1922 OMP_CLAUSE_LASTPRIVATE
))
1924 else if (msk
== GF_OMP_FOR_KIND_FOR
1925 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1926 OMP_CLAUSE_LASTPRIVATE
))
1929 for (i
= 0; i
< count
; i
++)
1931 tree temp
= create_tmp_var (type
);
1932 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1933 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1934 OMP_CLAUSE_DECL (c
) = temp
;
1935 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1936 gimple_omp_taskreg_set_clauses (stmt
, c
);
1939 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
1940 && omp_find_clause (gimple_omp_task_clauses (stmt
),
1941 OMP_CLAUSE_REDUCTION
))
1943 tree type
= build_pointer_type (pointer_sized_int_node
);
1944 tree temp
= create_tmp_var (type
);
1945 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1946 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1947 OMP_CLAUSE_DECL (c
) = temp
;
1948 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
1949 gimple_omp_task_set_clauses (stmt
, c
);
1953 /* Scan an OpenMP parallel directive. */
1956 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1960 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1962 /* Ignore parallel directives with empty bodies, unless there
1963 are copyin clauses. */
1965 && empty_body_p (gimple_omp_body (stmt
))
1966 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1967 OMP_CLAUSE_COPYIN
) == NULL
)
1969 gsi_replace (gsi
, gimple_build_nop (), false);
1973 if (gimple_omp_parallel_combined_p (stmt
))
1974 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1975 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1976 OMP_CLAUSE_REDUCTION
);
1977 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
1978 if (OMP_CLAUSE_REDUCTION_TASK (c
))
1980 tree type
= build_pointer_type (pointer_sized_int_node
);
1981 tree temp
= create_tmp_var (type
);
1982 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1984 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1985 OMP_CLAUSE_DECL (c
) = temp
;
1986 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
1987 gimple_omp_parallel_set_clauses (stmt
, c
);
1990 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
1993 ctx
= new_omp_context (stmt
, outer_ctx
);
1994 taskreg_contexts
.safe_push (ctx
);
1995 if (taskreg_nesting_level
> 1)
1996 ctx
->is_nested
= true;
1997 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1998 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1999 name
= create_tmp_var_name (".omp_data_s");
2000 name
= build_decl (gimple_location (stmt
),
2001 TYPE_DECL
, name
, ctx
->record_type
);
2002 DECL_ARTIFICIAL (name
) = 1;
2003 DECL_NAMELESS (name
) = 1;
2004 TYPE_NAME (ctx
->record_type
) = name
;
2005 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2006 create_omp_child_function (ctx
, false);
2007 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2009 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2010 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2012 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2013 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2016 /* Scan an OpenMP task directive. */
2019 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2023 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2025 /* Ignore task directives with empty bodies, unless they have depend
2028 && gimple_omp_body (stmt
)
2029 && empty_body_p (gimple_omp_body (stmt
))
2030 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2032 gsi_replace (gsi
, gimple_build_nop (), false);
2036 if (gimple_omp_task_taskloop_p (stmt
))
2037 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2039 ctx
= new_omp_context (stmt
, outer_ctx
);
2041 if (gimple_omp_task_taskwait_p (stmt
))
2043 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2047 taskreg_contexts
.safe_push (ctx
);
2048 if (taskreg_nesting_level
> 1)
2049 ctx
->is_nested
= true;
2050 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2051 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2052 name
= create_tmp_var_name (".omp_data_s");
2053 name
= build_decl (gimple_location (stmt
),
2054 TYPE_DECL
, name
, ctx
->record_type
);
2055 DECL_ARTIFICIAL (name
) = 1;
2056 DECL_NAMELESS (name
) = 1;
2057 TYPE_NAME (ctx
->record_type
) = name
;
2058 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2059 create_omp_child_function (ctx
, false);
2060 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2062 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2064 if (ctx
->srecord_type
)
2066 name
= create_tmp_var_name (".omp_data_a");
2067 name
= build_decl (gimple_location (stmt
),
2068 TYPE_DECL
, name
, ctx
->srecord_type
);
2069 DECL_ARTIFICIAL (name
) = 1;
2070 DECL_NAMELESS (name
) = 1;
2071 TYPE_NAME (ctx
->srecord_type
) = name
;
2072 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2073 create_omp_child_function (ctx
, true);
2076 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2078 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2080 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2081 t
= build_int_cst (long_integer_type_node
, 0);
2082 gimple_omp_task_set_arg_size (stmt
, t
);
2083 t
= build_int_cst (long_integer_type_node
, 1);
2084 gimple_omp_task_set_arg_align (stmt
, t
);
2088 /* Helper function for finish_taskreg_scan, called through walk_tree.
2089 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2090 tree, replace it in the expression. */
2093 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2097 omp_context
*ctx
= (omp_context
*) data
;
2098 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2101 if (DECL_HAS_VALUE_EXPR_P (t
))
2102 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2107 else if (IS_TYPE_OR_DECL_P (*tp
))
2112 /* If any decls have been made addressable during scan_omp,
2113 adjust their fields if needed, and layout record types
2114 of parallel/task constructs. */
2117 finish_taskreg_scan (omp_context
*ctx
)
2119 if (ctx
->record_type
== NULL_TREE
)
2122 /* If any task_shared_vars were needed, verify all
2123 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2124 statements if use_pointer_for_field hasn't changed
2125 because of that. If it did, update field types now. */
2126 if (task_shared_vars
)
2130 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2131 c
; c
= OMP_CLAUSE_CHAIN (c
))
2132 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2133 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2135 tree decl
= OMP_CLAUSE_DECL (c
);
2137 /* Global variables don't need to be copied,
2138 the receiver side will use them directly. */
2139 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2141 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2142 || !use_pointer_for_field (decl
, ctx
))
2144 tree field
= lookup_field (decl
, ctx
);
2145 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2146 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2148 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2149 TREE_THIS_VOLATILE (field
) = 0;
2150 DECL_USER_ALIGN (field
) = 0;
2151 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2152 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2153 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2154 if (ctx
->srecord_type
)
2156 tree sfield
= lookup_sfield (decl
, ctx
);
2157 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2158 TREE_THIS_VOLATILE (sfield
) = 0;
2159 DECL_USER_ALIGN (sfield
) = 0;
2160 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2161 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2162 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2167 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2169 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2170 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2173 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2174 expects to find it at the start of data. */
2175 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2176 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2180 *p
= DECL_CHAIN (*p
);
2184 p
= &DECL_CHAIN (*p
);
2185 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2186 TYPE_FIELDS (ctx
->record_type
) = f
;
2188 layout_type (ctx
->record_type
);
2189 fixup_child_record_type (ctx
);
2191 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2193 layout_type (ctx
->record_type
);
2194 fixup_child_record_type (ctx
);
2198 location_t loc
= gimple_location (ctx
->stmt
);
2199 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2200 /* Move VLA fields to the end. */
2201 p
= &TYPE_FIELDS (ctx
->record_type
);
2203 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2204 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2207 *p
= TREE_CHAIN (*p
);
2208 TREE_CHAIN (*q
) = NULL_TREE
;
2209 q
= &TREE_CHAIN (*q
);
2212 p
= &DECL_CHAIN (*p
);
2214 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2216 /* Move fields corresponding to first and second _looptemp_
2217 clause first. There are filled by GOMP_taskloop
2218 and thus need to be in specific positions. */
2219 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2220 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2221 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2222 OMP_CLAUSE__LOOPTEMP_
);
2223 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2224 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2225 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2226 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2227 p
= &TYPE_FIELDS (ctx
->record_type
);
2229 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2230 *p
= DECL_CHAIN (*p
);
2232 p
= &DECL_CHAIN (*p
);
2233 DECL_CHAIN (f1
) = f2
;
2236 DECL_CHAIN (f2
) = f3
;
2237 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2240 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2241 TYPE_FIELDS (ctx
->record_type
) = f1
;
2242 if (ctx
->srecord_type
)
2244 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2245 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2247 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2248 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2250 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2251 *p
= DECL_CHAIN (*p
);
2253 p
= &DECL_CHAIN (*p
);
2254 DECL_CHAIN (f1
) = f2
;
2255 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2258 DECL_CHAIN (f2
) = f3
;
2259 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2262 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2263 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2266 layout_type (ctx
->record_type
);
2267 fixup_child_record_type (ctx
);
2268 if (ctx
->srecord_type
)
2269 layout_type (ctx
->srecord_type
);
2270 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2271 TYPE_SIZE_UNIT (ctx
->record_type
));
2272 if (TREE_CODE (t
) != INTEGER_CST
)
2274 t
= unshare_expr (t
);
2275 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2277 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2278 t
= build_int_cst (long_integer_type_node
,
2279 TYPE_ALIGN_UNIT (ctx
->record_type
));
2280 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2284 /* Find the enclosing offload context. */
2286 static omp_context
*
2287 enclosing_target_ctx (omp_context
*ctx
)
2289 for (; ctx
; ctx
= ctx
->outer
)
2290 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2296 /* Return true if ctx is part of an oacc kernels region. */
2299 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2301 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2303 gimple
*stmt
= ctx
->stmt
;
2304 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2305 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2312 /* Check the parallelism clauses inside a kernels regions.
2313 Until kernels handling moves to use the same loop indirection
2314 scheme as parallel, we need to do this checking early. */
2317 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2319 bool checking
= true;
2320 unsigned outer_mask
= 0;
2321 unsigned this_mask
= 0;
2322 bool has_seq
= false, has_auto
= false;
2325 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2329 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2331 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2334 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2336 switch (OMP_CLAUSE_CODE (c
))
2338 case OMP_CLAUSE_GANG
:
2339 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2341 case OMP_CLAUSE_WORKER
:
2342 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2344 case OMP_CLAUSE_VECTOR
:
2345 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2347 case OMP_CLAUSE_SEQ
:
2350 case OMP_CLAUSE_AUTO
:
2360 if (has_seq
&& (this_mask
|| has_auto
))
2361 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2362 " OpenACC loop specifiers");
2363 else if (has_auto
&& this_mask
)
2364 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2365 " OpenACC loop specifiers");
2367 if (this_mask
& outer_mask
)
2368 error_at (gimple_location (stmt
), "inner loop uses same"
2369 " OpenACC parallelism as containing loop");
2372 return outer_mask
| this_mask
;
2375 /* Scan a GIMPLE_OMP_FOR. */
2377 static omp_context
*
2378 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2382 tree clauses
= gimple_omp_for_clauses (stmt
);
2384 ctx
= new_omp_context (stmt
, outer_ctx
);
2386 if (is_gimple_omp_oacc (stmt
))
2388 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2390 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
2391 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2393 char const *check
= NULL
;
2395 switch (OMP_CLAUSE_CODE (c
))
2397 case OMP_CLAUSE_GANG
:
2401 case OMP_CLAUSE_WORKER
:
2405 case OMP_CLAUSE_VECTOR
:
2413 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2414 error_at (gimple_location (stmt
),
2415 "argument not permitted on %qs clause in"
2416 " OpenACC %<parallel%> or %<serial%>", check
);
2419 if (tgt
&& is_oacc_kernels (tgt
))
2421 /* Strip out reductions, as they are not handled yet. */
2422 tree
*prev_ptr
= &clauses
;
2424 while (tree probe
= *prev_ptr
)
2426 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2428 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2429 *prev_ptr
= *next_ptr
;
2431 prev_ptr
= next_ptr
;
2434 gimple_omp_for_set_clauses (stmt
, clauses
);
2435 check_oacc_kernel_gwv (stmt
, ctx
);
2438 /* Collect all variables named in reductions on this loop. Ensure
2439 that, if this loop has a reduction on some variable v, and there is
2440 a reduction on v somewhere in an outer context, then there is a
2441 reduction on v on all intervening loops as well. */
2442 tree local_reduction_clauses
= NULL
;
2443 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2445 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2446 local_reduction_clauses
2447 = tree_cons (NULL
, c
, local_reduction_clauses
);
2449 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2450 ctx
->outer_reduction_clauses
2451 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2452 ctx
->outer
->outer_reduction_clauses
);
2453 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2454 tree local_iter
= local_reduction_clauses
;
2455 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2457 tree local_clause
= TREE_VALUE (local_iter
);
2458 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2459 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2460 bool have_outer_reduction
= false;
2461 tree ctx_iter
= outer_reduction_clauses
;
2462 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2464 tree outer_clause
= TREE_VALUE (ctx_iter
);
2465 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2466 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2467 if (outer_var
== local_var
&& outer_op
!= local_op
)
2469 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2470 "conflicting reduction operations for %qE",
2472 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2473 "location of the previous reduction for %qE",
2476 if (outer_var
== local_var
)
2478 have_outer_reduction
= true;
2482 if (have_outer_reduction
)
2484 /* There is a reduction on outer_var both on this loop and on
2485 some enclosing loop. Walk up the context tree until such a
2486 loop with a reduction on outer_var is found, and complain
2487 about all intervening loops that do not have such a
2489 struct omp_context
*curr_loop
= ctx
->outer
;
2491 while (curr_loop
!= NULL
)
2493 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2494 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2496 tree curr_clause
= TREE_VALUE (curr_iter
);
2497 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2498 if (curr_var
== local_var
)
2505 warning_at (gimple_location (curr_loop
->stmt
), 0,
2506 "nested loop in reduction needs "
2507 "reduction clause for %qE",
2511 curr_loop
= curr_loop
->outer
;
2515 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2516 ctx
->outer_reduction_clauses
2517 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2518 ctx
->outer_reduction_clauses
);
2521 scan_sharing_clauses (clauses
, ctx
);
2523 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2524 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2526 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2527 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2528 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2529 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2531 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2535 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2538 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2539 omp_context
*outer_ctx
)
2541 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2542 gsi_replace (gsi
, bind
, false);
2543 gimple_seq seq
= NULL
;
2544 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2545 tree cond
= create_tmp_var_raw (integer_type_node
);
2546 DECL_CONTEXT (cond
) = current_function_decl
;
2547 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2548 gimple_bind_set_vars (bind
, cond
);
2549 gimple_call_set_lhs (g
, cond
);
2550 gimple_seq_add_stmt (&seq
, g
);
2551 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2552 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2553 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2554 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2555 gimple_seq_add_stmt (&seq
, g
);
2556 g
= gimple_build_label (lab1
);
2557 gimple_seq_add_stmt (&seq
, g
);
2558 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2559 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2560 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2561 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2562 gimple_omp_for_set_clauses (new_stmt
, clause
);
2563 gimple_seq_add_stmt (&seq
, new_stmt
);
2564 g
= gimple_build_goto (lab3
);
2565 gimple_seq_add_stmt (&seq
, g
);
2566 g
= gimple_build_label (lab2
);
2567 gimple_seq_add_stmt (&seq
, g
);
2568 gimple_seq_add_stmt (&seq
, stmt
);
2569 g
= gimple_build_label (lab3
);
2570 gimple_seq_add_stmt (&seq
, g
);
2571 gimple_bind_set_body (bind
, seq
);
2573 scan_omp_for (new_stmt
, outer_ctx
);
2574 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2577 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2578 struct walk_stmt_info
*);
2579 static omp_context
*maybe_lookup_ctx (gimple
*);
2581 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2582 for scan phase loop. */
2585 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2586 omp_context
*outer_ctx
)
2588 /* The only change between inclusive and exclusive scan will be
2589 within the first simd loop, so just use inclusive in the
2590 worksharing loop. */
2591 outer_ctx
->scan_inclusive
= true;
2592 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2593 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2595 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2596 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2597 gsi_replace (gsi
, input_stmt
, false);
2598 gimple_seq input_body
= NULL
;
2599 gimple_seq_add_stmt (&input_body
, stmt
);
2600 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2602 gimple_stmt_iterator input1_gsi
= gsi_none ();
2603 struct walk_stmt_info wi
;
2604 memset (&wi
, 0, sizeof (wi
));
2606 wi
.info
= (void *) &input1_gsi
;
2607 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2608 gcc_assert (!gsi_end_p (input1_gsi
));
2610 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2611 gsi_next (&input1_gsi
);
2612 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2613 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2614 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
2615 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2616 std::swap (input_stmt1
, scan_stmt1
);
2618 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
2619 gimple_omp_set_body (input_stmt1
, NULL
);
2621 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
2622 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
2624 gimple_omp_set_body (input_stmt1
, input_body1
);
2625 gimple_omp_set_body (scan_stmt1
, NULL
);
2627 gimple_stmt_iterator input2_gsi
= gsi_none ();
2628 memset (&wi
, 0, sizeof (wi
));
2630 wi
.info
= (void *) &input2_gsi
;
2631 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
2633 gcc_assert (!gsi_end_p (input2_gsi
));
2635 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
2636 gsi_next (&input2_gsi
);
2637 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
2638 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
2639 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2640 std::swap (input_stmt2
, scan_stmt2
);
2642 gimple_omp_set_body (input_stmt2
, NULL
);
2644 gimple_omp_set_body (input_stmt
, input_body
);
2645 gimple_omp_set_body (scan_stmt
, scan_body
);
2647 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
2648 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
2650 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
2651 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
2653 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
2656 /* Scan an OpenMP sections directive. */
2659 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2663 ctx
= new_omp_context (stmt
, outer_ctx
);
2664 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2665 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2668 /* Scan an OpenMP single directive. */
2671 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2676 ctx
= new_omp_context (stmt
, outer_ctx
);
2677 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2678 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2679 name
= create_tmp_var_name (".omp_copy_s");
2680 name
= build_decl (gimple_location (stmt
),
2681 TYPE_DECL
, name
, ctx
->record_type
);
2682 TYPE_NAME (ctx
->record_type
) = name
;
2684 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2685 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2687 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2688 ctx
->record_type
= NULL
;
2690 layout_type (ctx
->record_type
);
2693 /* Scan a GIMPLE_OMP_TARGET. */
2696 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2700 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2701 tree clauses
= gimple_omp_target_clauses (stmt
);
2703 ctx
= new_omp_context (stmt
, outer_ctx
);
2704 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2705 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2706 name
= create_tmp_var_name (".omp_data_t");
2707 name
= build_decl (gimple_location (stmt
),
2708 TYPE_DECL
, name
, ctx
->record_type
);
2709 DECL_ARTIFICIAL (name
) = 1;
2710 DECL_NAMELESS (name
) = 1;
2711 TYPE_NAME (ctx
->record_type
) = name
;
2712 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2716 create_omp_child_function (ctx
, false);
2717 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2720 scan_sharing_clauses (clauses
, ctx
);
2721 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2723 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2724 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2727 TYPE_FIELDS (ctx
->record_type
)
2728 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2731 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2732 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2734 field
= DECL_CHAIN (field
))
2735 gcc_assert (DECL_ALIGN (field
) == align
);
2737 layout_type (ctx
->record_type
);
2739 fixup_child_record_type (ctx
);
2743 /* Scan an OpenMP teams directive. */
2746 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2748 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2750 if (!gimple_omp_teams_host (stmt
))
2752 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2753 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2756 taskreg_contexts
.safe_push (ctx
);
2757 gcc_assert (taskreg_nesting_level
== 1);
2758 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2759 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2760 tree name
= create_tmp_var_name (".omp_data_s");
2761 name
= build_decl (gimple_location (stmt
),
2762 TYPE_DECL
, name
, ctx
->record_type
);
2763 DECL_ARTIFICIAL (name
) = 1;
2764 DECL_NAMELESS (name
) = 1;
2765 TYPE_NAME (ctx
->record_type
) = name
;
2766 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2767 create_omp_child_function (ctx
, false);
2768 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2770 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2771 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2773 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2774 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2777 /* Check nesting restrictions. */
2779 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2783 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2784 inside an OpenACC CTX. */
2785 if (!(is_gimple_omp (stmt
)
2786 && is_gimple_omp_oacc (stmt
))
2787 /* Except for atomic codes that we share with OpenMP. */
2788 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2789 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2791 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2793 error_at (gimple_location (stmt
),
2794 "non-OpenACC construct inside of OpenACC routine");
2798 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2799 if (is_gimple_omp (octx
->stmt
)
2800 && is_gimple_omp_oacc (octx
->stmt
))
2802 error_at (gimple_location (stmt
),
2803 "non-OpenACC construct inside of OpenACC region");
2810 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
2812 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
2814 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2815 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
2819 if (ctx
->order_concurrent
2820 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
2821 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2822 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2824 error_at (gimple_location (stmt
),
2825 "OpenMP constructs other than %<parallel%>, %<loop%>"
2826 " or %<simd%> may not be nested inside a region with"
2827 " the %<order(concurrent)%> clause");
2830 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2832 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2833 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2835 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2836 && (ctx
->outer
== NULL
2837 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2838 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2839 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2840 != GF_OMP_FOR_KIND_FOR
)
2841 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2843 error_at (gimple_location (stmt
),
2844 "%<ordered simd threads%> must be closely "
2845 "nested inside of %<for simd%> region");
2851 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2852 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
2853 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
2855 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
2856 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
2858 error_at (gimple_location (stmt
),
2859 "OpenMP constructs other than "
2860 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2861 "not be nested inside %<simd%> region");
2864 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2866 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2867 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
2868 && omp_find_clause (gimple_omp_for_clauses (stmt
),
2869 OMP_CLAUSE_BIND
) == NULL_TREE
))
2870 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2872 error_at (gimple_location (stmt
),
2873 "only %<distribute%>, %<parallel%> or %<loop%> "
2874 "regions are allowed to be strictly nested inside "
2875 "%<teams%> region");
2879 else if (ctx
->order_concurrent
2880 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
2881 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
2882 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
2883 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
2886 error_at (gimple_location (stmt
),
2887 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2888 "%<simd%> may not be nested inside a %<loop%> region");
2890 error_at (gimple_location (stmt
),
2891 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2892 "%<simd%> may not be nested inside a region with "
2893 "the %<order(concurrent)%> clause");
2897 switch (gimple_code (stmt
))
2899 case GIMPLE_OMP_FOR
:
2900 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
2902 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2904 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2906 error_at (gimple_location (stmt
),
2907 "%<distribute%> region must be strictly nested "
2908 "inside %<teams%> construct");
2913 /* We split taskloop into task and nested taskloop in it. */
2914 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2916 /* For now, hope this will change and loop bind(parallel) will not
2917 be allowed in lots of contexts. */
2918 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
2919 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
2921 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2926 switch (gimple_code (ctx
->stmt
))
2928 case GIMPLE_OMP_FOR
:
2929 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2930 == GF_OMP_FOR_KIND_OACC_LOOP
);
2933 case GIMPLE_OMP_TARGET
:
2934 switch (gimple_omp_target_kind (ctx
->stmt
))
2936 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2937 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2938 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
2949 else if (oacc_get_fn_attrib (current_function_decl
))
2953 error_at (gimple_location (stmt
),
2954 "OpenACC loop directive must be associated with"
2955 " an OpenACC compute region");
2961 if (is_gimple_call (stmt
)
2962 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2963 == BUILT_IN_GOMP_CANCEL
2964 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2965 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2967 const char *bad
= NULL
;
2968 const char *kind
= NULL
;
2969 const char *construct
2970 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2971 == BUILT_IN_GOMP_CANCEL
)
2973 : "cancellation point";
2976 error_at (gimple_location (stmt
), "orphaned %qs construct",
2980 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2981 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2985 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2987 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2988 == BUILT_IN_GOMP_CANCEL
2989 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2990 ctx
->cancellable
= true;
2994 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2995 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2997 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2998 == BUILT_IN_GOMP_CANCEL
2999 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3001 ctx
->cancellable
= true;
3002 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3004 warning_at (gimple_location (stmt
), 0,
3005 "%<cancel for%> inside "
3006 "%<nowait%> for construct");
3007 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3008 OMP_CLAUSE_ORDERED
))
3009 warning_at (gimple_location (stmt
), 0,
3010 "%<cancel for%> inside "
3011 "%<ordered%> for construct");
3016 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3017 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3019 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3020 == BUILT_IN_GOMP_CANCEL
3021 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3023 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3025 ctx
->cancellable
= true;
3026 if (omp_find_clause (gimple_omp_sections_clauses
3029 warning_at (gimple_location (stmt
), 0,
3030 "%<cancel sections%> inside "
3031 "%<nowait%> sections construct");
3035 gcc_assert (ctx
->outer
3036 && gimple_code (ctx
->outer
->stmt
)
3037 == GIMPLE_OMP_SECTIONS
);
3038 ctx
->outer
->cancellable
= true;
3039 if (omp_find_clause (gimple_omp_sections_clauses
3042 warning_at (gimple_location (stmt
), 0,
3043 "%<cancel sections%> inside "
3044 "%<nowait%> sections construct");
3050 if (!is_task_ctx (ctx
)
3051 && (!is_taskloop_ctx (ctx
)
3052 || ctx
->outer
== NULL
3053 || !is_task_ctx (ctx
->outer
)))
3057 for (omp_context
*octx
= ctx
->outer
;
3058 octx
; octx
= octx
->outer
)
3060 switch (gimple_code (octx
->stmt
))
3062 case GIMPLE_OMP_TASKGROUP
:
3064 case GIMPLE_OMP_TARGET
:
3065 if (gimple_omp_target_kind (octx
->stmt
)
3066 != GF_OMP_TARGET_KIND_REGION
)
3069 case GIMPLE_OMP_PARALLEL
:
3070 case GIMPLE_OMP_TEAMS
:
3071 error_at (gimple_location (stmt
),
3072 "%<%s taskgroup%> construct not closely "
3073 "nested inside of %<taskgroup%> region",
3076 case GIMPLE_OMP_TASK
:
3077 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3079 && is_taskloop_ctx (octx
->outer
))
3082 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3083 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3092 ctx
->cancellable
= true;
3097 error_at (gimple_location (stmt
), "invalid arguments");
3102 error_at (gimple_location (stmt
),
3103 "%<%s %s%> construct not closely nested inside of %qs",
3104 construct
, kind
, bad
);
3109 case GIMPLE_OMP_SECTIONS
:
3110 case GIMPLE_OMP_SINGLE
:
3111 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3112 switch (gimple_code (ctx
->stmt
))
3114 case GIMPLE_OMP_FOR
:
3115 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3116 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3119 case GIMPLE_OMP_SECTIONS
:
3120 case GIMPLE_OMP_SINGLE
:
3121 case GIMPLE_OMP_ORDERED
:
3122 case GIMPLE_OMP_MASTER
:
3123 case GIMPLE_OMP_TASK
:
3124 case GIMPLE_OMP_CRITICAL
:
3125 if (is_gimple_call (stmt
))
3127 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3128 != BUILT_IN_GOMP_BARRIER
)
3130 error_at (gimple_location (stmt
),
3131 "barrier region may not be closely nested inside "
3132 "of work-sharing, %<loop%>, %<critical%>, "
3133 "%<ordered%>, %<master%>, explicit %<task%> or "
3134 "%<taskloop%> region");
3137 error_at (gimple_location (stmt
),
3138 "work-sharing region may not be closely nested inside "
3139 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3140 "%<master%>, explicit %<task%> or %<taskloop%> region");
3142 case GIMPLE_OMP_PARALLEL
:
3143 case GIMPLE_OMP_TEAMS
:
3145 case GIMPLE_OMP_TARGET
:
3146 if (gimple_omp_target_kind (ctx
->stmt
)
3147 == GF_OMP_TARGET_KIND_REGION
)
3154 case GIMPLE_OMP_MASTER
:
3155 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3156 switch (gimple_code (ctx
->stmt
))
3158 case GIMPLE_OMP_FOR
:
3159 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3160 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3163 case GIMPLE_OMP_SECTIONS
:
3164 case GIMPLE_OMP_SINGLE
:
3165 case GIMPLE_OMP_TASK
:
3166 error_at (gimple_location (stmt
),
3167 "%<master%> region may not be closely nested inside "
3168 "of work-sharing, %<loop%>, explicit %<task%> or "
3169 "%<taskloop%> region");
3171 case GIMPLE_OMP_PARALLEL
:
3172 case GIMPLE_OMP_TEAMS
:
3174 case GIMPLE_OMP_TARGET
:
3175 if (gimple_omp_target_kind (ctx
->stmt
)
3176 == GF_OMP_TARGET_KIND_REGION
)
3183 case GIMPLE_OMP_TASK
:
3184 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3185 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3186 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3187 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3189 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3190 error_at (OMP_CLAUSE_LOCATION (c
),
3191 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3192 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3196 case GIMPLE_OMP_ORDERED
:
3197 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3198 c
; c
= OMP_CLAUSE_CHAIN (c
))
3200 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
3202 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3203 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3206 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3207 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
3208 || kind
== OMP_CLAUSE_DEPEND_SINK
)
3211 /* Look for containing ordered(N) loop. */
3213 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3215 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3216 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3218 error_at (OMP_CLAUSE_LOCATION (c
),
3219 "%<ordered%> construct with %<depend%> clause "
3220 "must be closely nested inside an %<ordered%> "
3224 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
3226 error_at (OMP_CLAUSE_LOCATION (c
),
3227 "%<ordered%> construct with %<depend%> clause "
3228 "must be closely nested inside a loop with "
3229 "%<ordered%> clause with a parameter");
3235 error_at (OMP_CLAUSE_LOCATION (c
),
3236 "invalid depend kind in omp %<ordered%> %<depend%>");
3240 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3241 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3243 /* ordered simd must be closely nested inside of simd region,
3244 and simd region must not encounter constructs other than
3245 ordered simd, therefore ordered simd may be either orphaned,
3246 or ctx->stmt must be simd. The latter case is handled already
3250 error_at (gimple_location (stmt
),
3251 "%<ordered%> %<simd%> must be closely nested inside "
3256 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3257 switch (gimple_code (ctx
->stmt
))
3259 case GIMPLE_OMP_CRITICAL
:
3260 case GIMPLE_OMP_TASK
:
3261 case GIMPLE_OMP_ORDERED
:
3262 ordered_in_taskloop
:
3263 error_at (gimple_location (stmt
),
3264 "%<ordered%> region may not be closely nested inside "
3265 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3266 "%<taskloop%> region");
3268 case GIMPLE_OMP_FOR
:
3269 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3270 goto ordered_in_taskloop
;
3272 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3273 OMP_CLAUSE_ORDERED
);
3276 error_at (gimple_location (stmt
),
3277 "%<ordered%> region must be closely nested inside "
3278 "a loop region with an %<ordered%> clause");
3281 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3282 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3284 error_at (gimple_location (stmt
),
3285 "%<ordered%> region without %<depend%> clause may "
3286 "not be closely nested inside a loop region with "
3287 "an %<ordered%> clause with a parameter");
3291 case GIMPLE_OMP_TARGET
:
3292 if (gimple_omp_target_kind (ctx
->stmt
)
3293 != GF_OMP_TARGET_KIND_REGION
)
3296 case GIMPLE_OMP_PARALLEL
:
3297 case GIMPLE_OMP_TEAMS
:
3298 error_at (gimple_location (stmt
),
3299 "%<ordered%> region must be closely nested inside "
3300 "a loop region with an %<ordered%> clause");
3306 case GIMPLE_OMP_CRITICAL
:
3309 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3310 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3311 if (gomp_critical
*other_crit
3312 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3313 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3315 error_at (gimple_location (stmt
),
3316 "%<critical%> region may not be nested inside "
3317 "a %<critical%> region with the same name");
3322 case GIMPLE_OMP_TEAMS
:
3325 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3326 || (gimple_omp_target_kind (ctx
->stmt
)
3327 != GF_OMP_TARGET_KIND_REGION
))
3329 /* Teams construct can appear either strictly nested inside of
3330 target construct with no intervening stmts, or can be encountered
3331 only by initial task (so must not appear inside any OpenMP
3333 error_at (gimple_location (stmt
),
3334 "%<teams%> construct must be closely nested inside of "
3335 "%<target%> construct or not nested in any OpenMP "
3340 case GIMPLE_OMP_TARGET
:
3341 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3342 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3343 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3344 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3346 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3347 error_at (OMP_CLAUSE_LOCATION (c
),
3348 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3349 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3352 if (is_gimple_omp_offloaded (stmt
)
3353 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3355 error_at (gimple_location (stmt
),
3356 "OpenACC region inside of OpenACC routine, nested "
3357 "parallelism not supported yet");
3360 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3362 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3364 if (is_gimple_omp (stmt
)
3365 && is_gimple_omp_oacc (stmt
)
3366 && is_gimple_omp (ctx
->stmt
))
3368 error_at (gimple_location (stmt
),
3369 "OpenACC construct inside of non-OpenACC region");
3375 const char *stmt_name
, *ctx_stmt_name
;
3376 switch (gimple_omp_target_kind (stmt
))
3378 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3379 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3380 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3381 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3382 stmt_name
= "target enter data"; break;
3383 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3384 stmt_name
= "target exit data"; break;
3385 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3386 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3387 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3388 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3389 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3390 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
3391 stmt_name
= "enter/exit data"; break;
3392 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3393 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3395 default: gcc_unreachable ();
3397 switch (gimple_omp_target_kind (ctx
->stmt
))
3399 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3400 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3401 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3402 ctx_stmt_name
= "parallel"; break;
3403 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3404 ctx_stmt_name
= "kernels"; break;
3405 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3406 ctx_stmt_name
= "serial"; break;
3407 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3408 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3409 ctx_stmt_name
= "host_data"; break;
3410 default: gcc_unreachable ();
3413 /* OpenACC/OpenMP mismatch? */
3414 if (is_gimple_omp_oacc (stmt
)
3415 != is_gimple_omp_oacc (ctx
->stmt
))
3417 error_at (gimple_location (stmt
),
3418 "%s %qs construct inside of %s %qs region",
3419 (is_gimple_omp_oacc (stmt
)
3420 ? "OpenACC" : "OpenMP"), stmt_name
,
3421 (is_gimple_omp_oacc (ctx
->stmt
)
3422 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3425 if (is_gimple_omp_offloaded (ctx
->stmt
))
3427 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3428 if (is_gimple_omp_oacc (ctx
->stmt
))
3430 error_at (gimple_location (stmt
),
3431 "%qs construct inside of %qs region",
3432 stmt_name
, ctx_stmt_name
);
3437 warning_at (gimple_location (stmt
), 0,
3438 "%qs construct inside of %qs region",
3439 stmt_name
, ctx_stmt_name
);
3451 /* Helper function scan_omp.
3453 Callback for walk_tree or operators in walk_gimple_stmt used to
3454 scan for OMP directives in TP. */
3457 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3459 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3460 omp_context
*ctx
= (omp_context
*) wi
->info
;
3463 switch (TREE_CODE (t
))
3471 tree repl
= remap_decl (t
, &ctx
->cb
);
3472 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3478 if (ctx
&& TYPE_P (t
))
3479 *tp
= remap_type (t
, &ctx
->cb
);
3480 else if (!DECL_P (t
))
3485 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3486 if (tem
!= TREE_TYPE (t
))
3488 if (TREE_CODE (t
) == INTEGER_CST
)
3489 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3491 TREE_TYPE (t
) = tem
;
3501 /* Return true if FNDECL is a setjmp or a longjmp. */
3504 setjmp_or_longjmp_p (const_tree fndecl
)
3506 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3507 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3510 tree declname
= DECL_NAME (fndecl
);
3512 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3513 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3514 || !TREE_PUBLIC (fndecl
))
3517 const char *name
= IDENTIFIER_POINTER (declname
);
3518 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3521 /* Return true if FNDECL is an omp_* runtime API call. */
3524 omp_runtime_api_call (const_tree fndecl
)
3526 tree declname
= DECL_NAME (fndecl
);
3528 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3529 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3530 || !TREE_PUBLIC (fndecl
))
3533 const char *name
= IDENTIFIER_POINTER (declname
);
3534 if (strncmp (name
, "omp_", 4) != 0)
3537 static const char *omp_runtime_apis
[] =
3539 /* This array has 3 sections. First omp_* calls that don't
3540 have any suffixes. */
3542 "target_associate_ptr",
3543 "target_disassociate_ptr",
3545 "target_is_present",
3547 "target_memcpy_rect",
3549 /* Now omp_* calls that are available as omp_* and omp_*_. */
3552 "destroy_nest_lock",
3555 "get_affinity_format",
3557 "get_default_device",
3559 "get_initial_device",
3561 "get_max_active_levels",
3562 "get_max_task_priority",
3570 "get_partition_num_places",
3582 "is_initial_device",
3584 "pause_resource_all",
3585 "set_affinity_format",
3593 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3594 "get_ancestor_thread_num",
3595 "get_partition_place_nums",
3596 "get_place_num_procs",
3597 "get_place_proc_ids",
3600 "set_default_device",
3602 "set_max_active_levels",
3609 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
3611 if (omp_runtime_apis
[i
] == NULL
)
3616 size_t len
= strlen (omp_runtime_apis
[i
]);
3617 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
3618 && (name
[4 + len
] == '\0'
3620 && name
[4 + len
] == '_'
3621 && (name
[4 + len
+ 1] == '\0'
3623 && strcmp (name
+ 4 + len
+ 1, "8_") == 0)))))
3629 /* Helper function for scan_omp.
3631 Callback for walk_gimple_stmt used to scan for OMP directives in
3632 the current statement in GSI. */
3635 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3636 struct walk_stmt_info
*wi
)
3638 gimple
*stmt
= gsi_stmt (*gsi
);
3639 omp_context
*ctx
= (omp_context
*) wi
->info
;
3641 if (gimple_has_location (stmt
))
3642 input_location
= gimple_location (stmt
);
3644 /* Check the nesting restrictions. */
3645 bool remove
= false;
3646 if (is_gimple_omp (stmt
))
3647 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3648 else if (is_gimple_call (stmt
))
3650 tree fndecl
= gimple_call_fndecl (stmt
);
3654 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3655 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3656 && setjmp_or_longjmp_p (fndecl
)
3660 error_at (gimple_location (stmt
),
3661 "setjmp/longjmp inside %<simd%> construct");
3663 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3664 switch (DECL_FUNCTION_CODE (fndecl
))
3666 case BUILT_IN_GOMP_BARRIER
:
3667 case BUILT_IN_GOMP_CANCEL
:
3668 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3669 case BUILT_IN_GOMP_TASKYIELD
:
3670 case BUILT_IN_GOMP_TASKWAIT
:
3671 case BUILT_IN_GOMP_TASKGROUP_START
:
3672 case BUILT_IN_GOMP_TASKGROUP_END
:
3673 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3680 omp_context
*octx
= ctx
;
3681 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
3683 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
3686 error_at (gimple_location (stmt
),
3687 "OpenMP runtime API call %qD in a region with "
3688 "%<order(concurrent)%> clause", fndecl
);
3695 stmt
= gimple_build_nop ();
3696 gsi_replace (gsi
, stmt
, false);
3699 *handled_ops_p
= true;
3701 switch (gimple_code (stmt
))
3703 case GIMPLE_OMP_PARALLEL
:
3704 taskreg_nesting_level
++;
3705 scan_omp_parallel (gsi
, ctx
);
3706 taskreg_nesting_level
--;
3709 case GIMPLE_OMP_TASK
:
3710 taskreg_nesting_level
++;
3711 scan_omp_task (gsi
, ctx
);
3712 taskreg_nesting_level
--;
3715 case GIMPLE_OMP_FOR
:
3716 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3717 == GF_OMP_FOR_KIND_SIMD
)
3718 && gimple_omp_for_combined_into_p (stmt
)
3719 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
3721 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
3722 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
3723 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
3725 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3729 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3730 == GF_OMP_FOR_KIND_SIMD
)
3731 && omp_maybe_offloaded_ctx (ctx
)
3732 && omp_max_simt_vf ()
3733 && gimple_omp_for_collapse (stmt
) == 1)
3734 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3736 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3739 case GIMPLE_OMP_SECTIONS
:
3740 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3743 case GIMPLE_OMP_SINGLE
:
3744 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3747 case GIMPLE_OMP_SCAN
:
3748 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
3750 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
3751 ctx
->scan_inclusive
= true;
3752 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
3753 ctx
->scan_exclusive
= true;
3756 case GIMPLE_OMP_SECTION
:
3757 case GIMPLE_OMP_MASTER
:
3758 case GIMPLE_OMP_ORDERED
:
3759 case GIMPLE_OMP_CRITICAL
:
3760 ctx
= new_omp_context (stmt
, ctx
);
3761 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3764 case GIMPLE_OMP_TASKGROUP
:
3765 ctx
= new_omp_context (stmt
, ctx
);
3766 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
3767 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3770 case GIMPLE_OMP_TARGET
:
3771 if (is_gimple_omp_offloaded (stmt
))
3773 taskreg_nesting_level
++;
3774 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3775 taskreg_nesting_level
--;
3778 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3781 case GIMPLE_OMP_TEAMS
:
3782 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
3784 taskreg_nesting_level
++;
3785 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3786 taskreg_nesting_level
--;
3789 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3796 *handled_ops_p
= false;
3798 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3800 var
= DECL_CHAIN (var
))
3801 insert_decl_map (&ctx
->cb
, var
, var
);
3805 *handled_ops_p
= false;
3813 /* Scan all the statements starting at the current statement. CTX
3814 contains context information about the OMP directives and
3815 clauses found during the scan. */
3818 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3820 location_t saved_location
;
3821 struct walk_stmt_info wi
;
3823 memset (&wi
, 0, sizeof (wi
));
3825 wi
.want_locations
= true;
3827 saved_location
= input_location
;
3828 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3829 input_location
= saved_location
;
3832 /* Re-gimplification and code generation routines. */
3834 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3835 of BIND if in a method. */
3838 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3840 if (DECL_ARGUMENTS (current_function_decl
)
3841 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3842 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3845 tree vars
= gimple_bind_vars (bind
);
3846 for (tree
*pvar
= &vars
; *pvar
; )
3847 if (omp_member_access_dummy_var (*pvar
))
3848 *pvar
= DECL_CHAIN (*pvar
);
3850 pvar
= &DECL_CHAIN (*pvar
);
3851 gimple_bind_set_vars (bind
, vars
);
3855 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3856 block and its subblocks. */
3859 remove_member_access_dummy_vars (tree block
)
3861 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3862 if (omp_member_access_dummy_var (*pvar
))
3863 *pvar
= DECL_CHAIN (*pvar
);
3865 pvar
= &DECL_CHAIN (*pvar
);
3867 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3868 remove_member_access_dummy_vars (block
);
3871 /* If a context was created for STMT when it was scanned, return it. */
3873 static omp_context
*
3874 maybe_lookup_ctx (gimple
*stmt
)
3877 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3878 return n
? (omp_context
*) n
->value
: NULL
;
3882 /* Find the mapping for DECL in CTX or the immediately enclosing
3883 context that has a mapping for DECL.
3885 If CTX is a nested parallel directive, we may have to use the decl
3886 mappings created in CTX's parent context. Suppose that we have the
3887 following parallel nesting (variable UIDs showed for clarity):
3890 #omp parallel shared(iD.1562) -> outer parallel
3891 iD.1562 = iD.1562 + 1;
3893 #omp parallel shared (iD.1562) -> inner parallel
3894 iD.1562 = iD.1562 - 1;
3896 Each parallel structure will create a distinct .omp_data_s structure
3897 for copying iD.1562 in/out of the directive:
3899 outer parallel .omp_data_s.1.i -> iD.1562
3900 inner parallel .omp_data_s.2.i -> iD.1562
3902 A shared variable mapping will produce a copy-out operation before
3903 the parallel directive and a copy-in operation after it. So, in
3904 this case we would have:
3907 .omp_data_o.1.i = iD.1562;
3908 #omp parallel shared(iD.1562) -> outer parallel
3909 .omp_data_i.1 = &.omp_data_o.1
3910 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3912 .omp_data_o.2.i = iD.1562; -> **
3913 #omp parallel shared(iD.1562) -> inner parallel
3914 .omp_data_i.2 = &.omp_data_o.2
3915 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3918 ** This is a problem. The symbol iD.1562 cannot be referenced
3919 inside the body of the outer parallel region. But since we are
3920 emitting this copy operation while expanding the inner parallel
3921 directive, we need to access the CTX structure of the outer
3922 parallel directive to get the correct mapping:
3924 .omp_data_o.2.i = .omp_data_i.1->i
3926 Since there may be other workshare or parallel directives enclosing
3927 the parallel directive, it may be necessary to walk up the context
3928 parent chain. This is not a problem in general because nested
3929 parallelism happens only rarely. */
3932 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3937 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3938 t
= maybe_lookup_decl (decl
, up
);
3940 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3942 return t
? t
: decl
;
3946 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3947 in outer contexts. */
3950 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3955 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3956 t
= maybe_lookup_decl (decl
, up
);
3958 return t
? t
: decl
;
3962 /* Construct the initialization value for reduction operation OP. */
3965 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3974 case TRUTH_ORIF_EXPR
:
3975 case TRUTH_XOR_EXPR
:
3977 return build_zero_cst (type
);
3980 case TRUTH_AND_EXPR
:
3981 case TRUTH_ANDIF_EXPR
:
3983 return fold_convert_loc (loc
, type
, integer_one_node
);
3986 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3989 if (SCALAR_FLOAT_TYPE_P (type
))
3991 REAL_VALUE_TYPE max
, min
;
3992 if (HONOR_INFINITIES (type
))
3995 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3998 real_maxval (&min
, 1, TYPE_MODE (type
));
3999 return build_real (type
, min
);
4001 else if (POINTER_TYPE_P (type
))
4004 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4005 return wide_int_to_tree (type
, min
);
4009 gcc_assert (INTEGRAL_TYPE_P (type
));
4010 return TYPE_MIN_VALUE (type
);
4014 if (SCALAR_FLOAT_TYPE_P (type
))
4016 REAL_VALUE_TYPE max
;
4017 if (HONOR_INFINITIES (type
))
4020 real_maxval (&max
, 0, TYPE_MODE (type
));
4021 return build_real (type
, max
);
4023 else if (POINTER_TYPE_P (type
))
4026 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4027 return wide_int_to_tree (type
, max
);
4031 gcc_assert (INTEGRAL_TYPE_P (type
));
4032 return TYPE_MAX_VALUE (type
);
4040 /* Construct the initialization value for reduction CLAUSE. */
4043 omp_reduction_init (tree clause
, tree type
)
4045 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4046 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4049 /* Return alignment to be assumed for var in CLAUSE, which should be
4050 OMP_CLAUSE_ALIGNED. */
4053 omp_clause_aligned_alignment (tree clause
)
4055 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4056 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4058 /* Otherwise return implementation defined alignment. */
4059 unsigned int al
= 1;
4060 opt_scalar_mode mode_iter
;
4061 auto_vector_modes modes
;
4062 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4063 static enum mode_class classes
[]
4064 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4065 for (int i
= 0; i
< 4; i
+= 2)
4066 /* The for loop above dictates that we only walk through scalar classes. */
4067 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4069 scalar_mode mode
= mode_iter
.require ();
4070 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4071 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4073 machine_mode alt_vmode
;
4074 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4075 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4076 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4079 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4080 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4082 type
= build_vector_type_for_mode (type
, vmode
);
4083 if (TYPE_MODE (type
) != vmode
)
4085 if (TYPE_ALIGN_UNIT (type
) > al
)
4086 al
= TYPE_ALIGN_UNIT (type
);
4088 return build_int_cst (integer_type_node
, al
);
4092 /* This structure is part of the interface between lower_rec_simd_input_clauses
4093 and lower_rec_input_clauses. */
4095 class omplow_simd_context
{
4097 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4101 vec
<tree
, va_heap
> simt_eargs
;
4102 gimple_seq simt_dlist
;
4103 poly_uint64_pod max_vf
;
4107 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4111 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4112 omplow_simd_context
*sctx
, tree
&ivar
,
4113 tree
&lvar
, tree
*rvar
= NULL
,
4116 if (known_eq (sctx
->max_vf
, 0U))
4118 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4119 if (maybe_gt (sctx
->max_vf
, 1U))
4121 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4122 OMP_CLAUSE_SAFELEN
);
4125 poly_uint64 safe_len
;
4126 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4127 || maybe_lt (safe_len
, 1U))
4130 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4133 if (maybe_gt (sctx
->max_vf
, 1U))
4135 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4136 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4139 if (known_eq (sctx
->max_vf
, 1U))
4144 if (is_gimple_reg (new_var
))
4146 ivar
= lvar
= new_var
;
4149 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4150 ivar
= lvar
= create_tmp_var (type
);
4151 TREE_ADDRESSABLE (ivar
) = 1;
4152 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4153 NULL
, DECL_ATTRIBUTES (ivar
));
4154 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4155 tree clobber
= build_clobber (type
);
4156 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4157 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4161 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4162 tree avar
= create_tmp_var_raw (atype
);
4163 if (TREE_ADDRESSABLE (new_var
))
4164 TREE_ADDRESSABLE (avar
) = 1;
4165 DECL_ATTRIBUTES (avar
)
4166 = tree_cons (get_identifier ("omp simd array"), NULL
,
4167 DECL_ATTRIBUTES (avar
));
4168 gimple_add_tmp_var (avar
);
4170 if (rvar
&& !ctx
->for_simd_scan_phase
)
4172 /* For inscan reductions, create another array temporary,
4173 which will hold the reduced value. */
4174 iavar
= create_tmp_var_raw (atype
);
4175 if (TREE_ADDRESSABLE (new_var
))
4176 TREE_ADDRESSABLE (iavar
) = 1;
4177 DECL_ATTRIBUTES (iavar
)
4178 = tree_cons (get_identifier ("omp simd array"), NULL
,
4179 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4180 DECL_ATTRIBUTES (iavar
)));
4181 gimple_add_tmp_var (iavar
);
4182 ctx
->cb
.decl_map
->put (avar
, iavar
);
4183 if (sctx
->lastlane
== NULL_TREE
)
4184 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4185 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4186 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4187 TREE_THIS_NOTRAP (*rvar
) = 1;
4189 if (ctx
->scan_exclusive
)
4191 /* And for exclusive scan yet another one, which will
4192 hold the value during the scan phase. */
4193 tree savar
= create_tmp_var_raw (atype
);
4194 if (TREE_ADDRESSABLE (new_var
))
4195 TREE_ADDRESSABLE (savar
) = 1;
4196 DECL_ATTRIBUTES (savar
)
4197 = tree_cons (get_identifier ("omp simd array"), NULL
,
4198 tree_cons (get_identifier ("omp simd inscan "
4200 DECL_ATTRIBUTES (savar
)));
4201 gimple_add_tmp_var (savar
);
4202 ctx
->cb
.decl_map
->put (iavar
, savar
);
4203 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4204 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4205 TREE_THIS_NOTRAP (*rvar2
) = 1;
4208 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4209 NULL_TREE
, NULL_TREE
);
4210 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4211 NULL_TREE
, NULL_TREE
);
4212 TREE_THIS_NOTRAP (ivar
) = 1;
4213 TREE_THIS_NOTRAP (lvar
) = 1;
4215 if (DECL_P (new_var
))
4217 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4218 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4223 /* Helper function of lower_rec_input_clauses. For a reference
4224 in simd reduction, add an underlying variable it will reference. */
4227 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4229 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4230 if (TREE_CONSTANT (z
))
4232 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4233 get_name (new_vard
));
4234 gimple_add_tmp_var (z
);
4235 TREE_ADDRESSABLE (z
) = 1;
4236 z
= build_fold_addr_expr_loc (loc
, z
);
4237 gimplify_assign (new_vard
, z
, ilist
);
4241 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4242 code to emit (type) (tskred_temp[idx]). */
4245 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4248 unsigned HOST_WIDE_INT sz
4249 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4250 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4251 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4253 tree v
= create_tmp_var (pointer_sized_int_node
);
4254 gimple
*g
= gimple_build_assign (v
, r
);
4255 gimple_seq_add_stmt (ilist
, g
);
4256 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4258 v
= create_tmp_var (type
);
4259 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4260 gimple_seq_add_stmt (ilist
, g
);
4265 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4266 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4267 private variables. Initialization statements go in ILIST, while calls
4268 to destructors go in DLIST. */
4271 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4272 omp_context
*ctx
, struct omp_for_data
*fd
)
4274 tree c
, copyin_seq
, x
, ptr
;
4275 bool copyin_by_ref
= false;
4276 bool lastprivate_firstprivate
= false;
4277 bool reduction_omp_orig_ref
= false;
4279 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4280 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4281 omplow_simd_context sctx
= omplow_simd_context ();
4282 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4283 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4284 gimple_seq llist
[4] = { };
4285 tree nonconst_simd_if
= NULL_TREE
;
4288 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4290 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4291 with data sharing clauses referencing variable sized vars. That
4292 is unnecessarily hard to support and very unlikely to result in
4293 vectorized code anyway. */
4295 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4296 switch (OMP_CLAUSE_CODE (c
))
4298 case OMP_CLAUSE_LINEAR
:
4299 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4302 case OMP_CLAUSE_PRIVATE
:
4303 case OMP_CLAUSE_FIRSTPRIVATE
:
4304 case OMP_CLAUSE_LASTPRIVATE
:
4305 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4307 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4309 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4310 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4314 case OMP_CLAUSE_REDUCTION
:
4315 case OMP_CLAUSE_IN_REDUCTION
:
4316 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4317 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4319 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4321 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4322 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4327 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4329 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4330 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4332 case OMP_CLAUSE_SIMDLEN
:
4333 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4336 case OMP_CLAUSE__CONDTEMP_
:
4337 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4345 /* Add a placeholder for simduid. */
4346 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4347 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4349 unsigned task_reduction_cnt
= 0;
4350 unsigned task_reduction_cntorig
= 0;
4351 unsigned task_reduction_cnt_full
= 0;
4352 unsigned task_reduction_cntorig_full
= 0;
4353 unsigned task_reduction_other_cnt
= 0;
4354 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4355 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4356 /* Do all the fixed sized types in the first pass, and the variable sized
4357 types in the second pass. This makes sure that the scalar arguments to
4358 the variable sized types are processed before we use them in the
4359 variable sized operations. For task reductions we use 4 passes, in the
4360 first two we ignore them, in the third one gather arguments for
4361 GOMP_task_reduction_remap call and in the last pass actually handle
4362 the task reductions. */
4363 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
4366 if (pass
== 2 && task_reduction_cnt
)
4369 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
4370 + task_reduction_cntorig
);
4371 tskred_avar
= create_tmp_var_raw (tskred_atype
);
4372 gimple_add_tmp_var (tskred_avar
);
4373 TREE_ADDRESSABLE (tskred_avar
) = 1;
4374 task_reduction_cnt_full
= task_reduction_cnt
;
4375 task_reduction_cntorig_full
= task_reduction_cntorig
;
4377 else if (pass
== 3 && task_reduction_cnt
)
4379 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
4381 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
4382 size_int (task_reduction_cntorig
),
4383 build_fold_addr_expr (tskred_avar
));
4384 gimple_seq_add_stmt (ilist
, g
);
4386 if (pass
== 3 && task_reduction_other_cnt
)
4388 /* For reduction clauses, build
4389 tskred_base = (void *) tskred_temp[2]
4390 + omp_get_thread_num () * tskred_temp[1]
4391 or if tskred_temp[1] is known to be constant, that constant
4392 directly. This is the start of the private reduction copy block
4393 for the current thread. */
4394 tree v
= create_tmp_var (integer_type_node
);
4395 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
4396 gimple
*g
= gimple_build_call (x
, 0);
4397 gimple_call_set_lhs (g
, v
);
4398 gimple_seq_add_stmt (ilist
, g
);
4399 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
4400 tskred_temp
= OMP_CLAUSE_DECL (c
);
4401 if (is_taskreg_ctx (ctx
))
4402 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
4403 tree v2
= create_tmp_var (sizetype
);
4404 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
4405 gimple_seq_add_stmt (ilist
, g
);
4406 if (ctx
->task_reductions
[0])
4407 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
4409 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4410 tree v3
= create_tmp_var (sizetype
);
4411 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4412 gimple_seq_add_stmt (ilist
, g
);
4413 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4414 tskred_base
= create_tmp_var (ptr_type_node
);
4415 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4416 gimple_seq_add_stmt (ilist
, g
);
4418 task_reduction_cnt
= 0;
4419 task_reduction_cntorig
= 0;
4420 task_reduction_other_cnt
= 0;
4421 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4423 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4426 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4427 bool task_reduction_p
= false;
4428 bool task_reduction_needs_orig_p
= false;
4429 tree cond
= NULL_TREE
;
4433 case OMP_CLAUSE_PRIVATE
:
4434 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4437 case OMP_CLAUSE_SHARED
:
4438 /* Ignore shared directives in teams construct inside
4439 of target construct. */
4440 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4441 && !is_host_teams_ctx (ctx
))
4443 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
4445 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
4446 || is_global_var (OMP_CLAUSE_DECL (c
)));
4449 case OMP_CLAUSE_FIRSTPRIVATE
:
4450 case OMP_CLAUSE_COPYIN
:
4452 case OMP_CLAUSE_LINEAR
:
4453 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
4454 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4455 lastprivate_firstprivate
= true;
4457 case OMP_CLAUSE_REDUCTION
:
4458 case OMP_CLAUSE_IN_REDUCTION
:
4459 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
4461 task_reduction_p
= true;
4462 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4464 task_reduction_other_cnt
++;
4469 task_reduction_cnt
++;
4470 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4472 var
= OMP_CLAUSE_DECL (c
);
4473 /* If var is a global variable that isn't privatized
4474 in outer contexts, we don't need to look up the
4475 original address, it is always the address of the
4476 global variable itself. */
4478 || omp_is_reference (var
)
4480 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4482 task_reduction_needs_orig_p
= true;
4483 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4484 task_reduction_cntorig
++;
4488 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4489 reduction_omp_orig_ref
= true;
4491 case OMP_CLAUSE__REDUCTEMP_
:
4492 if (!is_taskreg_ctx (ctx
))
4495 case OMP_CLAUSE__LOOPTEMP_
:
4496 /* Handle _looptemp_/_reductemp_ clauses only on
4501 case OMP_CLAUSE_LASTPRIVATE
:
4502 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4504 lastprivate_firstprivate
= true;
4505 if (pass
!= 0 || is_taskloop_ctx (ctx
))
4508 /* Even without corresponding firstprivate, if
4509 decl is Fortran allocatable, it needs outer var
4512 && lang_hooks
.decls
.omp_private_outer_ref
4513 (OMP_CLAUSE_DECL (c
)))
4514 lastprivate_firstprivate
= true;
4516 case OMP_CLAUSE_ALIGNED
:
4519 var
= OMP_CLAUSE_DECL (c
);
4520 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
4521 && !is_global_var (var
))
4523 new_var
= maybe_lookup_decl (var
, ctx
);
4524 if (new_var
== NULL_TREE
)
4525 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4526 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4527 tree alarg
= omp_clause_aligned_alignment (c
);
4528 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4529 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
4530 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4531 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4532 gimplify_and_add (x
, ilist
);
4534 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
4535 && is_global_var (var
))
4537 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
4538 new_var
= lookup_decl (var
, ctx
);
4539 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4540 t
= build_fold_addr_expr_loc (clause_loc
, t
);
4541 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4542 tree alarg
= omp_clause_aligned_alignment (c
);
4543 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4544 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
4545 t
= fold_convert_loc (clause_loc
, ptype
, t
);
4546 x
= create_tmp_var (ptype
);
4547 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
4548 gimplify_and_add (t
, ilist
);
4549 t
= build_simple_mem_ref_loc (clause_loc
, x
);
4550 SET_DECL_VALUE_EXPR (new_var
, t
);
4551 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4554 case OMP_CLAUSE__CONDTEMP_
:
4555 if (is_parallel_ctx (ctx
)
4556 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
4563 if (task_reduction_p
!= (pass
>= 2))
4566 new_var
= var
= OMP_CLAUSE_DECL (c
);
4567 if ((c_kind
== OMP_CLAUSE_REDUCTION
4568 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4569 && TREE_CODE (var
) == MEM_REF
)
4571 var
= TREE_OPERAND (var
, 0);
4572 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
4573 var
= TREE_OPERAND (var
, 0);
4574 if (TREE_CODE (var
) == INDIRECT_REF
4575 || TREE_CODE (var
) == ADDR_EXPR
)
4576 var
= TREE_OPERAND (var
, 0);
4577 if (is_variable_sized (var
))
4579 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
4580 var
= DECL_VALUE_EXPR (var
);
4581 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
4582 var
= TREE_OPERAND (var
, 0);
4583 gcc_assert (DECL_P (var
));
4587 if (c_kind
!= OMP_CLAUSE_COPYIN
)
4588 new_var
= lookup_decl (var
, ctx
);
4590 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
4595 /* C/C++ array section reductions. */
4596 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4597 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4598 && var
!= OMP_CLAUSE_DECL (c
))
4603 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
4604 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
4606 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
4608 tree b
= TREE_OPERAND (orig_var
, 1);
4609 b
= maybe_lookup_decl (b
, ctx
);
4612 b
= TREE_OPERAND (orig_var
, 1);
4613 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
4615 if (integer_zerop (bias
))
4619 bias
= fold_convert_loc (clause_loc
,
4620 TREE_TYPE (b
), bias
);
4621 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4622 TREE_TYPE (b
), b
, bias
);
4624 orig_var
= TREE_OPERAND (orig_var
, 0);
4628 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4629 if (is_global_var (out
)
4630 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
4631 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
4632 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
4637 bool by_ref
= use_pointer_for_field (var
, NULL
);
4638 x
= build_receiver_ref (var
, by_ref
, ctx
);
4639 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
4640 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
4642 x
= build_fold_addr_expr (x
);
4644 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
4645 x
= build_simple_mem_ref (x
);
4646 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
4648 if (var
== TREE_OPERAND (orig_var
, 0))
4649 x
= build_fold_addr_expr (x
);
4651 bias
= fold_convert (sizetype
, bias
);
4652 x
= fold_convert (ptr_type_node
, x
);
4653 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
4654 TREE_TYPE (x
), x
, bias
);
4655 unsigned cnt
= task_reduction_cnt
- 1;
4656 if (!task_reduction_needs_orig_p
)
4657 cnt
+= (task_reduction_cntorig_full
4658 - task_reduction_cntorig
);
4660 cnt
= task_reduction_cntorig
- 1;
4661 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4662 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4663 gimplify_assign (r
, x
, ilist
);
4667 if (TREE_CODE (orig_var
) == INDIRECT_REF
4668 || TREE_CODE (orig_var
) == ADDR_EXPR
)
4669 orig_var
= TREE_OPERAND (orig_var
, 0);
4670 tree d
= OMP_CLAUSE_DECL (c
);
4671 tree type
= TREE_TYPE (d
);
4672 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
4673 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
4674 const char *name
= get_name (orig_var
);
4677 tree xv
= create_tmp_var (ptr_type_node
);
4678 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4680 unsigned cnt
= task_reduction_cnt
- 1;
4681 if (!task_reduction_needs_orig_p
)
4682 cnt
+= (task_reduction_cntorig_full
4683 - task_reduction_cntorig
);
4685 cnt
= task_reduction_cntorig
- 1;
4686 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4687 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4689 gimple
*g
= gimple_build_assign (xv
, x
);
4690 gimple_seq_add_stmt (ilist
, g
);
4694 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4696 if (ctx
->task_reductions
[1 + idx
])
4697 off
= fold_convert (sizetype
,
4698 ctx
->task_reductions
[1 + idx
]);
4700 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4702 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
4704 gimple_seq_add_stmt (ilist
, g
);
4706 x
= fold_convert (build_pointer_type (boolean_type_node
),
4708 if (TREE_CONSTANT (v
))
4709 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
4710 TYPE_SIZE_UNIT (type
));
4713 tree t
= maybe_lookup_decl (v
, ctx
);
4717 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4718 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
4720 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4722 build_int_cst (TREE_TYPE (v
), 1));
4723 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4725 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4726 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4728 cond
= create_tmp_var (TREE_TYPE (x
));
4729 gimplify_assign (cond
, x
, ilist
);
4732 else if (TREE_CONSTANT (v
))
4734 x
= create_tmp_var_raw (type
, name
);
4735 gimple_add_tmp_var (x
);
4736 TREE_ADDRESSABLE (x
) = 1;
4737 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4742 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4743 tree t
= maybe_lookup_decl (v
, ctx
);
4747 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4748 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
4749 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4751 build_int_cst (TREE_TYPE (v
), 1));
4752 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4754 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4755 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
4756 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
4759 tree ptype
= build_pointer_type (TREE_TYPE (type
));
4760 x
= fold_convert_loc (clause_loc
, ptype
, x
);
4761 tree y
= create_tmp_var (ptype
, name
);
4762 gimplify_assign (y
, x
, ilist
);
4766 if (!integer_zerop (bias
))
4768 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4770 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4772 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
4773 pointer_sized_int_node
, yb
, bias
);
4774 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
4775 yb
= create_tmp_var (ptype
, name
);
4776 gimplify_assign (yb
, x
, ilist
);
4780 d
= TREE_OPERAND (d
, 0);
4781 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
4782 d
= TREE_OPERAND (d
, 0);
4783 if (TREE_CODE (d
) == ADDR_EXPR
)
4785 if (orig_var
!= var
)
4787 gcc_assert (is_variable_sized (orig_var
));
4788 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
4790 gimplify_assign (new_var
, x
, ilist
);
4791 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
4792 tree t
= build_fold_indirect_ref (new_var
);
4793 DECL_IGNORED_P (new_var
) = 0;
4794 TREE_THIS_NOTRAP (t
) = 1;
4795 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
4796 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
4800 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
4801 build_int_cst (ptype
, 0));
4802 SET_DECL_VALUE_EXPR (new_var
, x
);
4803 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4808 gcc_assert (orig_var
== var
);
4809 if (TREE_CODE (d
) == INDIRECT_REF
)
4811 x
= create_tmp_var (ptype
, name
);
4812 TREE_ADDRESSABLE (x
) = 1;
4813 gimplify_assign (x
, yb
, ilist
);
4814 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4816 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4817 gimplify_assign (new_var
, x
, ilist
);
4819 /* GOMP_taskgroup_reduction_register memsets the whole
4820 array to zero. If the initializer is zero, we don't
4821 need to initialize it again, just mark it as ever
4822 used unconditionally, i.e. cond = true. */
4824 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
4825 && initializer_zerop (omp_reduction_init (c
,
4828 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
4830 gimple_seq_add_stmt (ilist
, g
);
4833 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4837 if (!is_parallel_ctx (ctx
))
4839 tree condv
= create_tmp_var (boolean_type_node
);
4840 g
= gimple_build_assign (condv
,
4841 build_simple_mem_ref (cond
));
4842 gimple_seq_add_stmt (ilist
, g
);
4843 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
4844 g
= gimple_build_cond (NE_EXPR
, condv
,
4845 boolean_false_node
, end
, lab1
);
4846 gimple_seq_add_stmt (ilist
, g
);
4847 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
4849 g
= gimple_build_assign (build_simple_mem_ref (cond
),
4851 gimple_seq_add_stmt (ilist
, g
);
4854 tree y1
= create_tmp_var (ptype
);
4855 gimplify_assign (y1
, y
, ilist
);
4856 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
4857 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
4858 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
4859 if (task_reduction_needs_orig_p
)
4861 y3
= create_tmp_var (ptype
);
4863 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4864 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4865 size_int (task_reduction_cnt_full
4866 + task_reduction_cntorig
- 1),
4867 NULL_TREE
, NULL_TREE
);
4870 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4871 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
4874 gimplify_assign (y3
, ref
, ilist
);
4876 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
4880 y2
= create_tmp_var (ptype
);
4881 gimplify_assign (y2
, y
, ilist
);
4883 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4885 tree ref
= build_outer_var_ref (var
, ctx
);
4886 /* For ref build_outer_var_ref already performs this. */
4887 if (TREE_CODE (d
) == INDIRECT_REF
)
4888 gcc_assert (omp_is_reference (var
));
4889 else if (TREE_CODE (d
) == ADDR_EXPR
)
4890 ref
= build_fold_addr_expr (ref
);
4891 else if (omp_is_reference (var
))
4892 ref
= build_fold_addr_expr (ref
);
4893 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
4894 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
4895 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4897 y3
= create_tmp_var (ptype
);
4898 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
4902 y4
= create_tmp_var (ptype
);
4903 gimplify_assign (y4
, ref
, dlist
);
4907 tree i
= create_tmp_var (TREE_TYPE (v
));
4908 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
4909 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4910 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
4913 i2
= create_tmp_var (TREE_TYPE (v
));
4914 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
4915 body2
= create_artificial_label (UNKNOWN_LOCATION
);
4916 end2
= create_artificial_label (UNKNOWN_LOCATION
);
4917 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
4919 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4921 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4922 tree decl_placeholder
4923 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
4924 SET_DECL_VALUE_EXPR (decl_placeholder
,
4925 build_simple_mem_ref (y1
));
4926 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
4927 SET_DECL_VALUE_EXPR (placeholder
,
4928 y3
? build_simple_mem_ref (y3
)
4930 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4931 x
= lang_hooks
.decls
.omp_clause_default_ctor
4932 (c
, build_simple_mem_ref (y1
),
4933 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
4935 gimplify_and_add (x
, ilist
);
4936 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4938 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4939 lower_omp (&tseq
, ctx
);
4940 gimple_seq_add_seq (ilist
, tseq
);
4942 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4945 SET_DECL_VALUE_EXPR (decl_placeholder
,
4946 build_simple_mem_ref (y2
));
4947 SET_DECL_VALUE_EXPR (placeholder
,
4948 build_simple_mem_ref (y4
));
4949 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4950 lower_omp (&tseq
, ctx
);
4951 gimple_seq_add_seq (dlist
, tseq
);
4952 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4954 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4955 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
4958 x
= lang_hooks
.decls
.omp_clause_dtor
4959 (c
, build_simple_mem_ref (y2
));
4961 gimplify_and_add (x
, dlist
);
4966 x
= omp_reduction_init (c
, TREE_TYPE (type
));
4967 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4969 /* reduction(-:var) sums up the partial results, so it
4970 acts identically to reduction(+:var). */
4971 if (code
== MINUS_EXPR
)
4974 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
4977 x
= build2 (code
, TREE_TYPE (type
),
4978 build_simple_mem_ref (y4
),
4979 build_simple_mem_ref (y2
));
4980 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
4984 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
4985 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4986 gimple_seq_add_stmt (ilist
, g
);
4989 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
4990 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4991 gimple_seq_add_stmt (ilist
, g
);
4993 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
4994 build_int_cst (TREE_TYPE (i
), 1));
4995 gimple_seq_add_stmt (ilist
, g
);
4996 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
4997 gimple_seq_add_stmt (ilist
, g
);
4998 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5001 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5002 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5003 gimple_seq_add_stmt (dlist
, g
);
5006 g
= gimple_build_assign
5007 (y4
, POINTER_PLUS_EXPR
, y4
,
5008 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5009 gimple_seq_add_stmt (dlist
, g
);
5011 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5012 build_int_cst (TREE_TYPE (i2
), 1));
5013 gimple_seq_add_stmt (dlist
, g
);
5014 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5015 gimple_seq_add_stmt (dlist
, g
);
5016 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5022 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5026 bool by_ref
= use_pointer_for_field (var
, ctx
);
5027 x
= build_receiver_ref (var
, by_ref
, ctx
);
5029 if (!omp_is_reference (var
))
5030 x
= build_fold_addr_expr (x
);
5031 x
= fold_convert (ptr_type_node
, x
);
5032 unsigned cnt
= task_reduction_cnt
- 1;
5033 if (!task_reduction_needs_orig_p
)
5034 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5036 cnt
= task_reduction_cntorig
- 1;
5037 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5038 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5039 gimplify_assign (r
, x
, ilist
);
5044 tree type
= TREE_TYPE (new_var
);
5045 if (!omp_is_reference (var
))
5046 type
= build_pointer_type (type
);
5047 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5049 unsigned cnt
= task_reduction_cnt
- 1;
5050 if (!task_reduction_needs_orig_p
)
5051 cnt
+= (task_reduction_cntorig_full
5052 - task_reduction_cntorig
);
5054 cnt
= task_reduction_cntorig
- 1;
5055 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5056 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5060 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5062 if (ctx
->task_reductions
[1 + idx
])
5063 off
= fold_convert (sizetype
,
5064 ctx
->task_reductions
[1 + idx
]);
5066 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5068 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5071 x
= fold_convert (type
, x
);
5073 if (omp_is_reference (var
))
5075 gimplify_assign (new_var
, x
, ilist
);
5077 new_var
= build_simple_mem_ref (new_var
);
5081 t
= create_tmp_var (type
);
5082 gimplify_assign (t
, x
, ilist
);
5083 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5084 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5086 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5087 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5088 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5089 cond
= create_tmp_var (TREE_TYPE (t
));
5090 gimplify_assign (cond
, t
, ilist
);
5092 else if (is_variable_sized (var
))
5094 /* For variable sized types, we need to allocate the
5095 actual storage here. Call alloca and store the
5096 result in the pointer decl that we created elsewhere. */
5100 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5105 ptr
= DECL_VALUE_EXPR (new_var
);
5106 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5107 ptr
= TREE_OPERAND (ptr
, 0);
5108 gcc_assert (DECL_P (ptr
));
5109 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5111 /* void *tmp = __builtin_alloca */
5112 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5113 stmt
= gimple_build_call (atmp
, 2, x
,
5114 size_int (DECL_ALIGN (var
)));
5115 tmp
= create_tmp_var_raw (ptr_type_node
);
5116 gimple_add_tmp_var (tmp
);
5117 gimple_call_set_lhs (stmt
, tmp
);
5119 gimple_seq_add_stmt (ilist
, stmt
);
5121 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5122 gimplify_assign (ptr
, x
, ilist
);
5125 else if (omp_is_reference (var
)
5126 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5127 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5129 /* For references that are being privatized for Fortran,
5130 allocate new backing storage for the new pointer
5131 variable. This allows us to avoid changing all the
5132 code that expects a pointer to something that expects
5133 a direct variable. */
5137 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5138 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5140 x
= build_receiver_ref (var
, false, ctx
);
5141 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5143 else if (TREE_CONSTANT (x
))
5145 /* For reduction in SIMD loop, defer adding the
5146 initialization of the reference, because if we decide
5147 to use SIMD array for it, the initilization could cause
5148 expansion ICE. Ditto for other privatization clauses. */
5153 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5155 gimple_add_tmp_var (x
);
5156 TREE_ADDRESSABLE (x
) = 1;
5157 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5163 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5164 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5165 tree al
= size_int (TYPE_ALIGN (rtype
));
5166 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5171 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5172 gimplify_assign (new_var
, x
, ilist
);
5175 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5177 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5178 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5179 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5187 switch (OMP_CLAUSE_CODE (c
))
5189 case OMP_CLAUSE_SHARED
:
5190 /* Ignore shared directives in teams construct inside
5191 target construct. */
5192 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5193 && !is_host_teams_ctx (ctx
))
5195 /* Shared global vars are just accessed directly. */
5196 if (is_global_var (new_var
))
5198 /* For taskloop firstprivate/lastprivate, represented
5199 as firstprivate and shared clause on the task, new_var
5200 is the firstprivate var. */
5201 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5203 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5204 needs to be delayed until after fixup_child_record_type so
5205 that we get the correct type during the dereference. */
5206 by_ref
= use_pointer_for_field (var
, ctx
);
5207 x
= build_receiver_ref (var
, by_ref
, ctx
);
5208 SET_DECL_VALUE_EXPR (new_var
, x
);
5209 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5211 /* ??? If VAR is not passed by reference, and the variable
5212 hasn't been initialized yet, then we'll get a warning for
5213 the store into the omp_data_s structure. Ideally, we'd be
5214 able to notice this and not store anything at all, but
5215 we're generating code too early. Suppress the warning. */
5217 TREE_NO_WARNING (var
) = 1;
5220 case OMP_CLAUSE__CONDTEMP_
:
5221 if (is_parallel_ctx (ctx
))
5223 x
= build_receiver_ref (var
, false, ctx
);
5224 SET_DECL_VALUE_EXPR (new_var
, x
);
5225 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5227 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5229 x
= build_zero_cst (TREE_TYPE (var
));
5234 case OMP_CLAUSE_LASTPRIVATE
:
5235 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5239 case OMP_CLAUSE_PRIVATE
:
5240 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5241 x
= build_outer_var_ref (var
, ctx
);
5242 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5244 if (is_task_ctx (ctx
))
5245 x
= build_receiver_ref (var
, false, ctx
);
5247 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5255 nx
= unshare_expr (new_var
);
5257 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5258 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5261 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5263 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5266 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5267 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5268 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5269 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5270 || (gimple_omp_for_index (ctx
->stmt
, 0)
5272 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5273 || omp_is_reference (var
))
5274 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5277 if (omp_is_reference (var
))
5279 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5280 tree new_vard
= TREE_OPERAND (new_var
, 0);
5281 gcc_assert (DECL_P (new_vard
));
5282 SET_DECL_VALUE_EXPR (new_vard
,
5283 build_fold_addr_expr (lvar
));
5284 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5289 tree iv
= unshare_expr (ivar
);
5291 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
5294 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
5298 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
5300 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
5301 unshare_expr (ivar
), x
);
5305 gimplify_and_add (x
, &llist
[0]);
5306 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5307 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5312 gcc_assert (TREE_CODE (v
) == MEM_REF
);
5313 v
= TREE_OPERAND (v
, 0);
5314 gcc_assert (DECL_P (v
));
5316 v
= *ctx
->lastprivate_conditional_map
->get (v
);
5317 tree t
= create_tmp_var (TREE_TYPE (v
));
5318 tree z
= build_zero_cst (TREE_TYPE (v
));
5320 = build_outer_var_ref (var
, ctx
,
5321 OMP_CLAUSE_LASTPRIVATE
);
5322 gimple_seq_add_stmt (dlist
,
5323 gimple_build_assign (t
, z
));
5324 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
5325 tree civar
= DECL_VALUE_EXPR (v
);
5326 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
5327 civar
= unshare_expr (civar
);
5328 TREE_OPERAND (civar
, 1) = sctx
.idx
;
5329 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
5330 unshare_expr (civar
));
5331 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
5332 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
5333 orig_v
, unshare_expr (ivar
)));
5334 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
5336 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
5338 gimple_seq tseq
= NULL
;
5339 gimplify_and_add (x
, &tseq
);
5341 lower_omp (&tseq
, ctx
->outer
);
5342 gimple_seq_add_seq (&llist
[1], tseq
);
5344 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5345 && ctx
->for_simd_scan_phase
)
5347 x
= unshare_expr (ivar
);
5349 = build_outer_var_ref (var
, ctx
,
5350 OMP_CLAUSE_LASTPRIVATE
);
5351 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5353 gimplify_and_add (x
, &llist
[0]);
5357 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5359 gimplify_and_add (y
, &llist
[1]);
5363 if (omp_is_reference (var
))
5365 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5366 tree new_vard
= TREE_OPERAND (new_var
, 0);
5367 gcc_assert (DECL_P (new_vard
));
5368 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5369 x
= TYPE_SIZE_UNIT (type
);
5370 if (TREE_CONSTANT (x
))
5372 x
= create_tmp_var_raw (type
, get_name (var
));
5373 gimple_add_tmp_var (x
);
5374 TREE_ADDRESSABLE (x
) = 1;
5375 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5376 x
= fold_convert_loc (clause_loc
,
5377 TREE_TYPE (new_vard
), x
);
5378 gimplify_assign (new_vard
, x
, ilist
);
5383 gimplify_and_add (nx
, ilist
);
5384 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5386 && ctx
->for_simd_scan_phase
)
5388 tree orig_v
= build_outer_var_ref (var
, ctx
,
5389 OMP_CLAUSE_LASTPRIVATE
);
5390 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
5392 gimplify_and_add (x
, ilist
);
5397 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5399 gimplify_and_add (x
, dlist
);
5402 case OMP_CLAUSE_LINEAR
:
5403 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
5404 goto do_firstprivate
;
5405 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5408 x
= build_outer_var_ref (var
, ctx
);
5411 case OMP_CLAUSE_FIRSTPRIVATE
:
5412 if (is_task_ctx (ctx
))
5414 if ((omp_is_reference (var
)
5415 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
5416 || is_variable_sized (var
))
5418 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
5420 || use_pointer_for_field (var
, NULL
))
5422 x
= build_receiver_ref (var
, false, ctx
);
5423 SET_DECL_VALUE_EXPR (new_var
, x
);
5424 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5428 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
5429 && omp_is_reference (var
))
5431 x
= build_outer_var_ref (var
, ctx
);
5432 gcc_assert (TREE_CODE (x
) == MEM_REF
5433 && integer_zerop (TREE_OPERAND (x
, 1)));
5434 x
= TREE_OPERAND (x
, 0);
5435 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5436 (c
, unshare_expr (new_var
), x
);
5437 gimplify_and_add (x
, ilist
);
5441 x
= build_outer_var_ref (var
, ctx
);
5444 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5445 && gimple_omp_for_combined_into_p (ctx
->stmt
))
5447 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5448 tree stept
= TREE_TYPE (t
);
5449 tree ct
= omp_find_clause (clauses
,
5450 OMP_CLAUSE__LOOPTEMP_
);
5452 tree l
= OMP_CLAUSE_DECL (ct
);
5453 tree n1
= fd
->loop
.n1
;
5454 tree step
= fd
->loop
.step
;
5455 tree itype
= TREE_TYPE (l
);
5456 if (POINTER_TYPE_P (itype
))
5457 itype
= signed_type_for (itype
);
5458 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
5459 if (TYPE_UNSIGNED (itype
)
5460 && fd
->loop
.cond_code
== GT_EXPR
)
5461 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
5462 fold_build1 (NEGATE_EXPR
, itype
, l
),
5463 fold_build1 (NEGATE_EXPR
,
5466 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
5467 t
= fold_build2 (MULT_EXPR
, stept
,
5468 fold_convert (stept
, l
), t
);
5470 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
5472 if (omp_is_reference (var
))
5474 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5475 tree new_vard
= TREE_OPERAND (new_var
, 0);
5476 gcc_assert (DECL_P (new_vard
));
5477 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5478 nx
= TYPE_SIZE_UNIT (type
);
5479 if (TREE_CONSTANT (nx
))
5481 nx
= create_tmp_var_raw (type
,
5483 gimple_add_tmp_var (nx
);
5484 TREE_ADDRESSABLE (nx
) = 1;
5485 nx
= build_fold_addr_expr_loc (clause_loc
,
5487 nx
= fold_convert_loc (clause_loc
,
5488 TREE_TYPE (new_vard
),
5490 gimplify_assign (new_vard
, nx
, ilist
);
5494 x
= lang_hooks
.decls
.omp_clause_linear_ctor
5496 gimplify_and_add (x
, ilist
);
5500 if (POINTER_TYPE_P (TREE_TYPE (x
)))
5501 x
= fold_build2 (POINTER_PLUS_EXPR
,
5502 TREE_TYPE (x
), x
, t
);
5504 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5507 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
5508 || TREE_ADDRESSABLE (new_var
)
5509 || omp_is_reference (var
))
5510 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5513 if (omp_is_reference (var
))
5515 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5516 tree new_vard
= TREE_OPERAND (new_var
, 0);
5517 gcc_assert (DECL_P (new_vard
));
5518 SET_DECL_VALUE_EXPR (new_vard
,
5519 build_fold_addr_expr (lvar
));
5520 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5522 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
5524 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
5525 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
5526 gimplify_and_add (x
, ilist
);
5527 gimple_stmt_iterator gsi
5528 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5530 = gimple_build_assign (unshare_expr (lvar
), iv
);
5531 gsi_insert_before_without_update (&gsi
, g
,
5533 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5534 enum tree_code code
= PLUS_EXPR
;
5535 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
5536 code
= POINTER_PLUS_EXPR
;
5537 g
= gimple_build_assign (iv
, code
, iv
, t
);
5538 gsi_insert_before_without_update (&gsi
, g
,
5542 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5543 (c
, unshare_expr (ivar
), x
);
5544 gimplify_and_add (x
, &llist
[0]);
5545 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5547 gimplify_and_add (x
, &llist
[1]);
5550 if (omp_is_reference (var
))
5552 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5553 tree new_vard
= TREE_OPERAND (new_var
, 0);
5554 gcc_assert (DECL_P (new_vard
));
5555 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5556 nx
= TYPE_SIZE_UNIT (type
);
5557 if (TREE_CONSTANT (nx
))
5559 nx
= create_tmp_var_raw (type
, get_name (var
));
5560 gimple_add_tmp_var (nx
);
5561 TREE_ADDRESSABLE (nx
) = 1;
5562 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
5563 nx
= fold_convert_loc (clause_loc
,
5564 TREE_TYPE (new_vard
), nx
);
5565 gimplify_assign (new_vard
, nx
, ilist
);
5569 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5570 (c
, unshare_expr (new_var
), x
);
5571 gimplify_and_add (x
, ilist
);
5574 case OMP_CLAUSE__LOOPTEMP_
:
5575 case OMP_CLAUSE__REDUCTEMP_
:
5576 gcc_assert (is_taskreg_ctx (ctx
));
5577 x
= build_outer_var_ref (var
, ctx
);
5578 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5579 gimplify_and_add (x
, ilist
);
5582 case OMP_CLAUSE_COPYIN
:
5583 by_ref
= use_pointer_for_field (var
, NULL
);
5584 x
= build_receiver_ref (var
, by_ref
, ctx
);
5585 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
5586 append_to_statement_list (x
, ©in_seq
);
5587 copyin_by_ref
|= by_ref
;
5590 case OMP_CLAUSE_REDUCTION
:
5591 case OMP_CLAUSE_IN_REDUCTION
:
5592 /* OpenACC reductions are initialized using the
5593 GOACC_REDUCTION internal function. */
5594 if (is_gimple_omp_oacc (ctx
->stmt
))
5596 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5598 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5600 tree ptype
= TREE_TYPE (placeholder
);
5603 x
= error_mark_node
;
5604 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
5605 && !task_reduction_needs_orig_p
)
5607 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5609 tree pptype
= build_pointer_type (ptype
);
5610 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5611 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5612 size_int (task_reduction_cnt_full
5613 + task_reduction_cntorig
- 1),
5614 NULL_TREE
, NULL_TREE
);
5618 = *ctx
->task_reduction_map
->get (c
);
5619 x
= task_reduction_read (ilist
, tskred_temp
,
5620 pptype
, 7 + 3 * idx
);
5622 x
= fold_convert (pptype
, x
);
5623 x
= build_simple_mem_ref (x
);
5628 x
= build_outer_var_ref (var
, ctx
);
5630 if (omp_is_reference (var
)
5631 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
5632 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5634 SET_DECL_VALUE_EXPR (placeholder
, x
);
5635 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5636 tree new_vard
= new_var
;
5637 if (omp_is_reference (var
))
5639 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5640 new_vard
= TREE_OPERAND (new_var
, 0);
5641 gcc_assert (DECL_P (new_vard
));
5643 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5645 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5646 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5649 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5653 if (new_vard
== new_var
)
5655 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
5656 SET_DECL_VALUE_EXPR (new_var
, ivar
);
5660 SET_DECL_VALUE_EXPR (new_vard
,
5661 build_fold_addr_expr (ivar
));
5662 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5664 x
= lang_hooks
.decls
.omp_clause_default_ctor
5665 (c
, unshare_expr (ivar
),
5666 build_outer_var_ref (var
, ctx
));
5667 if (rvarp
&& ctx
->for_simd_scan_phase
)
5670 gimplify_and_add (x
, &llist
[0]);
5671 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5673 gimplify_and_add (x
, &llist
[1]);
5680 gimplify_and_add (x
, &llist
[0]);
5682 tree ivar2
= unshare_expr (lvar
);
5683 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5684 x
= lang_hooks
.decls
.omp_clause_default_ctor
5685 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
5686 gimplify_and_add (x
, &llist
[0]);
5690 x
= lang_hooks
.decls
.omp_clause_default_ctor
5691 (c
, unshare_expr (rvar2
),
5692 build_outer_var_ref (var
, ctx
));
5693 gimplify_and_add (x
, &llist
[0]);
5696 /* For types that need construction, add another
5697 private var which will be default constructed
5698 and optionally initialized with
5699 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5700 loop we want to assign this value instead of
5701 constructing and destructing it in each
5703 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
5704 gimple_add_tmp_var (nv
);
5705 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
5709 x
= lang_hooks
.decls
.omp_clause_default_ctor
5710 (c
, nv
, build_outer_var_ref (var
, ctx
));
5711 gimplify_and_add (x
, ilist
);
5713 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5715 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5716 x
= DECL_VALUE_EXPR (new_vard
);
5718 if (new_vard
!= new_var
)
5719 vexpr
= build_fold_addr_expr (nv
);
5720 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5721 lower_omp (&tseq
, ctx
);
5722 SET_DECL_VALUE_EXPR (new_vard
, x
);
5723 gimple_seq_add_seq (ilist
, tseq
);
5724 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5727 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5729 gimplify_and_add (x
, dlist
);
5732 tree ref
= build_outer_var_ref (var
, ctx
);
5733 x
= unshare_expr (ivar
);
5734 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5736 gimplify_and_add (x
, &llist
[0]);
5738 ref
= build_outer_var_ref (var
, ctx
);
5739 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
5741 gimplify_and_add (x
, &llist
[3]);
5743 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5744 if (new_vard
== new_var
)
5745 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5747 SET_DECL_VALUE_EXPR (new_vard
,
5748 build_fold_addr_expr (lvar
));
5750 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5752 gimplify_and_add (x
, &llist
[1]);
5754 tree ivar2
= unshare_expr (lvar
);
5755 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5756 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
5758 gimplify_and_add (x
, &llist
[1]);
5762 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
5764 gimplify_and_add (x
, &llist
[1]);
5769 gimplify_and_add (x
, &llist
[0]);
5770 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5772 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5773 lower_omp (&tseq
, ctx
);
5774 gimple_seq_add_seq (&llist
[0], tseq
);
5776 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5777 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5778 lower_omp (&tseq
, ctx
);
5779 gimple_seq_add_seq (&llist
[1], tseq
);
5780 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5781 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5782 if (new_vard
== new_var
)
5783 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5785 SET_DECL_VALUE_EXPR (new_vard
,
5786 build_fold_addr_expr (lvar
));
5787 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5789 gimplify_and_add (x
, &llist
[1]);
5792 /* If this is a reference to constant size reduction var
5793 with placeholder, we haven't emitted the initializer
5794 for it because it is undesirable if SIMD arrays are used.
5795 But if they aren't used, we need to emit the deferred
5796 initialization now. */
5797 else if (omp_is_reference (var
) && is_simd
)
5798 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5800 tree lab2
= NULL_TREE
;
5804 if (!is_parallel_ctx (ctx
))
5806 tree condv
= create_tmp_var (boolean_type_node
);
5807 tree m
= build_simple_mem_ref (cond
);
5808 g
= gimple_build_assign (condv
, m
);
5809 gimple_seq_add_stmt (ilist
, g
);
5811 = create_artificial_label (UNKNOWN_LOCATION
);
5812 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5813 g
= gimple_build_cond (NE_EXPR
, condv
,
5816 gimple_seq_add_stmt (ilist
, g
);
5817 gimple_seq_add_stmt (ilist
,
5818 gimple_build_label (lab1
));
5820 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5822 gimple_seq_add_stmt (ilist
, g
);
5824 x
= lang_hooks
.decls
.omp_clause_default_ctor
5825 (c
, unshare_expr (new_var
),
5827 : build_outer_var_ref (var
, ctx
));
5829 gimplify_and_add (x
, ilist
);
5831 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5832 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5834 if (ctx
->for_simd_scan_phase
)
5837 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
5839 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
5840 gimple_add_tmp_var (nv
);
5841 ctx
->cb
.decl_map
->put (new_vard
, nv
);
5842 x
= lang_hooks
.decls
.omp_clause_default_ctor
5843 (c
, nv
, build_outer_var_ref (var
, ctx
));
5845 gimplify_and_add (x
, ilist
);
5846 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5848 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5850 if (new_vard
!= new_var
)
5851 vexpr
= build_fold_addr_expr (nv
);
5852 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5853 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5854 lower_omp (&tseq
, ctx
);
5855 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
5856 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
5857 gimple_seq_add_seq (ilist
, tseq
);
5859 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5860 if (is_simd
&& ctx
->scan_exclusive
)
5863 = create_tmp_var_raw (TREE_TYPE (new_var
));
5864 gimple_add_tmp_var (nv2
);
5865 ctx
->cb
.decl_map
->put (nv
, nv2
);
5866 x
= lang_hooks
.decls
.omp_clause_default_ctor
5867 (c
, nv2
, build_outer_var_ref (var
, ctx
));
5868 gimplify_and_add (x
, ilist
);
5869 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5871 gimplify_and_add (x
, dlist
);
5873 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5875 gimplify_and_add (x
, dlist
);
5878 && ctx
->scan_exclusive
5879 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
5881 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
5882 gimple_add_tmp_var (nv2
);
5883 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
5884 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5886 gimplify_and_add (x
, dlist
);
5888 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5892 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5894 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5895 lower_omp (&tseq
, ctx
);
5896 gimple_seq_add_seq (ilist
, tseq
);
5898 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5901 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5902 lower_omp (&tseq
, ctx
);
5903 gimple_seq_add_seq (dlist
, tseq
);
5904 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5906 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5910 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5917 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
5918 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
5919 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5924 tree lab2
= NULL_TREE
;
5925 /* GOMP_taskgroup_reduction_register memsets the whole
5926 array to zero. If the initializer is zero, we don't
5927 need to initialize it again, just mark it as ever
5928 used unconditionally, i.e. cond = true. */
5929 if (initializer_zerop (x
))
5931 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5933 gimple_seq_add_stmt (ilist
, g
);
5938 if (!cond) { cond = true; new_var = x; } */
5939 if (!is_parallel_ctx (ctx
))
5941 tree condv
= create_tmp_var (boolean_type_node
);
5942 tree m
= build_simple_mem_ref (cond
);
5943 g
= gimple_build_assign (condv
, m
);
5944 gimple_seq_add_stmt (ilist
, g
);
5946 = create_artificial_label (UNKNOWN_LOCATION
);
5947 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5948 g
= gimple_build_cond (NE_EXPR
, condv
,
5951 gimple_seq_add_stmt (ilist
, g
);
5952 gimple_seq_add_stmt (ilist
,
5953 gimple_build_label (lab1
));
5955 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5957 gimple_seq_add_stmt (ilist
, g
);
5958 gimplify_assign (new_var
, x
, ilist
);
5960 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5964 /* reduction(-:var) sums up the partial results, so it
5965 acts identically to reduction(+:var). */
5966 if (code
== MINUS_EXPR
)
5969 tree new_vard
= new_var
;
5970 if (is_simd
&& omp_is_reference (var
))
5972 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5973 new_vard
= TREE_OPERAND (new_var
, 0);
5974 gcc_assert (DECL_P (new_vard
));
5976 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5978 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5979 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5982 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5986 if (new_vard
!= new_var
)
5988 SET_DECL_VALUE_EXPR (new_vard
,
5989 build_fold_addr_expr (lvar
));
5990 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5993 tree ref
= build_outer_var_ref (var
, ctx
);
5997 if (ctx
->for_simd_scan_phase
)
5999 gimplify_assign (ivar
, ref
, &llist
[0]);
6000 ref
= build_outer_var_ref (var
, ctx
);
6001 gimplify_assign (ref
, rvar
, &llist
[3]);
6005 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6010 simt_lane
= create_tmp_var (unsigned_type_node
);
6011 x
= build_call_expr_internal_loc
6012 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6013 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6014 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
6015 gimplify_assign (ivar
, x
, &llist
[2]);
6017 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
6018 ref
= build_outer_var_ref (var
, ctx
);
6019 gimplify_assign (ref
, x
, &llist
[1]);
6024 if (omp_is_reference (var
) && is_simd
)
6025 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6026 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6027 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6029 gimplify_assign (new_var
, x
, ilist
);
6032 tree ref
= build_outer_var_ref (var
, ctx
);
6034 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
6035 ref
= build_outer_var_ref (var
, ctx
);
6036 gimplify_assign (ref
, x
, dlist
);
6049 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6050 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6053 if (known_eq (sctx
.max_vf
, 1U))
6055 sctx
.is_simt
= false;
6056 if (ctx
->lastprivate_conditional_map
)
6058 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6060 /* Signal to lower_omp_1 that it should use parent context. */
6061 ctx
->combined_into_simd_safelen1
= true;
6062 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6063 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6064 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6066 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6067 omp_context
*outer
= ctx
->outer
;
6068 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6069 outer
= outer
->outer
;
6070 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6071 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6072 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6078 /* When not vectorized, treat lastprivate(conditional:) like
6079 normal lastprivate, as there will be just one simd lane
6080 writing the privatized variable. */
6081 delete ctx
->lastprivate_conditional_map
;
6082 ctx
->lastprivate_conditional_map
= NULL
;
6087 if (nonconst_simd_if
)
6089 if (sctx
.lane
== NULL_TREE
)
6091 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6092 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6094 /* FIXME: For now. */
6095 sctx
.is_simt
= false;
6098 if (sctx
.lane
|| sctx
.is_simt
)
6100 uid
= create_tmp_var (ptr_type_node
, "simduid");
6101 /* Don't want uninit warnings on simduid, it is always uninitialized,
6102 but we use it not for the value, but for the DECL_UID only. */
6103 TREE_NO_WARNING (uid
) = 1;
6104 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6105 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6106 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6107 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6109 /* Emit calls denoting privatized variables and initializing a pointer to
6110 structure that holds private variables as fields after ompdevlow pass. */
6113 sctx
.simt_eargs
[0] = uid
;
6115 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6116 gimple_call_set_lhs (g
, uid
);
6117 gimple_seq_add_stmt (ilist
, g
);
6118 sctx
.simt_eargs
.release ();
6120 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6121 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6122 gimple_call_set_lhs (g
, simtrec
);
6123 gimple_seq_add_stmt (ilist
, g
);
6127 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6128 2 + (nonconst_simd_if
!= NULL
),
6129 uid
, integer_zero_node
,
6131 gimple_call_set_lhs (g
, sctx
.lane
);
6132 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6133 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6134 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6135 build_int_cst (unsigned_type_node
, 0));
6136 gimple_seq_add_stmt (ilist
, g
);
6139 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6141 gimple_call_set_lhs (g
, sctx
.lastlane
);
6142 gimple_seq_add_stmt (dlist
, g
);
6143 gimple_seq_add_seq (dlist
, llist
[3]);
6145 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6148 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6149 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6150 gimple_call_set_lhs (g
, simt_vf
);
6151 gimple_seq_add_stmt (dlist
, g
);
6153 tree t
= build_int_cst (unsigned_type_node
, 1);
6154 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6155 gimple_seq_add_stmt (dlist
, g
);
6157 t
= build_int_cst (unsigned_type_node
, 0);
6158 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6159 gimple_seq_add_stmt (dlist
, g
);
6161 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6162 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6163 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6164 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6165 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6167 gimple_seq_add_seq (dlist
, llist
[2]);
6169 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6170 gimple_seq_add_stmt (dlist
, g
);
6172 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6173 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6174 gimple_seq_add_stmt (dlist
, g
);
6176 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6178 for (int i
= 0; i
< 2; i
++)
6181 tree vf
= create_tmp_var (unsigned_type_node
);
6182 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6183 gimple_call_set_lhs (g
, vf
);
6184 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
6185 gimple_seq_add_stmt (seq
, g
);
6186 tree t
= build_int_cst (unsigned_type_node
, 0);
6187 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6188 gimple_seq_add_stmt (seq
, g
);
6189 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6190 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6191 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6192 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
6193 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
6194 gimple_seq_add_seq (seq
, llist
[i
]);
6195 t
= build_int_cst (unsigned_type_node
, 1);
6196 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
6197 gimple_seq_add_stmt (seq
, g
);
6198 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
6199 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
6200 gimple_seq_add_stmt (seq
, g
);
6201 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
6206 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
6208 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
6209 gimple_seq_add_stmt (dlist
, g
);
6212 /* The copyin sequence is not to be executed by the main thread, since
6213 that would result in self-copies. Perhaps not visible to scalars,
6214 but it certainly is to C++ operator=. */
6217 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
6219 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
6220 build_int_cst (TREE_TYPE (x
), 0));
6221 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
6222 gimplify_and_add (x
, ilist
);
6225 /* If any copyin variable is passed by reference, we must ensure the
6226 master thread doesn't modify it before it is copied over in all
6227 threads. Similarly for variables in both firstprivate and
6228 lastprivate clauses we need to ensure the lastprivate copying
6229 happens after firstprivate copying in all threads. And similarly
6230 for UDRs if initializer expression refers to omp_orig. */
6231 if (copyin_by_ref
|| lastprivate_firstprivate
6232 || (reduction_omp_orig_ref
6233 && !ctx
->scan_inclusive
6234 && !ctx
->scan_exclusive
))
6236 /* Don't add any barrier for #pragma omp simd or
6237 #pragma omp distribute. */
6238 if (!is_task_ctx (ctx
)
6239 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
6240 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
6241 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
6244 /* If max_vf is non-zero, then we can use only a vectorization factor
6245 up to the max_vf we chose. So stick it into the safelen clause. */
6246 if (maybe_ne (sctx
.max_vf
, 0U))
6248 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
6249 OMP_CLAUSE_SAFELEN
);
6250 poly_uint64 safe_len
;
6252 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
6253 && maybe_gt (safe_len
, sctx
.max_vf
)))
6255 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
6256 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
6258 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6259 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6264 /* Create temporary variables for lastprivate(conditional:) implementation
6265 in context CTX with CLAUSES. */
6268 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
6270 tree iter_type
= NULL_TREE
;
6271 tree cond_ptr
= NULL_TREE
;
6272 tree iter_var
= NULL_TREE
;
6273 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6274 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
6275 tree next
= *clauses
;
6276 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6277 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6278 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6282 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
6284 if (iter_type
== NULL_TREE
)
6286 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
6287 iter_var
= create_tmp_var_raw (iter_type
);
6288 DECL_CONTEXT (iter_var
) = current_function_decl
;
6289 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6290 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6291 ctx
->block_vars
= iter_var
;
6293 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6294 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6295 OMP_CLAUSE_DECL (c3
) = iter_var
;
6296 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
6298 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6300 next
= OMP_CLAUSE_CHAIN (cc
);
6301 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6302 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
6303 ctx
->lastprivate_conditional_map
->put (o
, v
);
6306 if (iter_type
== NULL
)
6308 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
6310 struct omp_for_data fd
;
6311 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
6313 iter_type
= unsigned_type_for (fd
.iter_type
);
6315 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
6316 iter_type
= unsigned_type_node
;
6317 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
6321 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
6322 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6326 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
6327 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
6328 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
6329 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
6330 ctx
->block_vars
= cond_ptr
;
6331 c2
= build_omp_clause (UNKNOWN_LOCATION
,
6332 OMP_CLAUSE__CONDTEMP_
);
6333 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6334 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
6337 iter_var
= create_tmp_var_raw (iter_type
);
6338 DECL_CONTEXT (iter_var
) = current_function_decl
;
6339 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6340 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6341 ctx
->block_vars
= iter_var
;
6343 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6344 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6345 OMP_CLAUSE_DECL (c3
) = iter_var
;
6346 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
6347 OMP_CLAUSE_CHAIN (c2
) = c3
;
6348 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6350 tree v
= create_tmp_var_raw (iter_type
);
6351 DECL_CONTEXT (v
) = current_function_decl
;
6352 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
6353 DECL_CHAIN (v
) = ctx
->block_vars
;
6354 ctx
->block_vars
= v
;
6355 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6356 ctx
->lastprivate_conditional_map
->put (o
, v
);
6361 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6362 both parallel and workshare constructs. PREDICATE may be NULL if it's
6363 always true. BODY_P is the sequence to insert early initialization
6364 if needed, STMT_LIST is where the non-conditional lastprivate handling
6365 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6369 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
6370 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
6373 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
6374 bool par_clauses
= false;
6375 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
6376 unsigned HOST_WIDE_INT conditional_off
= 0;
6377 gimple_seq post_stmt_list
= NULL
;
6379 /* Early exit if there are no lastprivate or linear clauses. */
6380 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
6381 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
6382 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
6383 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
6385 if (clauses
== NULL
)
6387 /* If this was a workshare clause, see if it had been combined
6388 with its parallel. In that case, look for the clauses on the
6389 parallel statement itself. */
6390 if (is_parallel_ctx (ctx
))
6394 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6397 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6398 OMP_CLAUSE_LASTPRIVATE
);
6399 if (clauses
== NULL
)
6404 bool maybe_simt
= false;
6405 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6406 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6408 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
6409 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
6411 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
6417 tree label_true
, arm1
, arm2
;
6418 enum tree_code pred_code
= TREE_CODE (predicate
);
6420 label
= create_artificial_label (UNKNOWN_LOCATION
);
6421 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
6422 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
6424 arm1
= TREE_OPERAND (predicate
, 0);
6425 arm2
= TREE_OPERAND (predicate
, 1);
6426 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6427 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6432 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6433 arm2
= boolean_false_node
;
6434 pred_code
= NE_EXPR
;
6438 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
6439 c
= fold_convert (integer_type_node
, c
);
6440 simtcond
= create_tmp_var (integer_type_node
);
6441 gimplify_assign (simtcond
, c
, stmt_list
);
6442 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
6444 c
= create_tmp_var (integer_type_node
);
6445 gimple_call_set_lhs (g
, c
);
6446 gimple_seq_add_stmt (stmt_list
, g
);
6447 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
6451 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
6452 gimple_seq_add_stmt (stmt_list
, stmt
);
6453 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
6456 tree cond_ptr
= NULL_TREE
;
6457 for (c
= clauses
; c
;)
6460 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6461 gimple_seq
*this_stmt_list
= stmt_list
;
6462 tree lab2
= NULL_TREE
;
6464 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6465 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6466 && ctx
->lastprivate_conditional_map
6467 && !ctx
->combined_into_simd_safelen1
)
6469 gcc_assert (body_p
);
6472 if (cond_ptr
== NULL_TREE
)
6474 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
6475 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
6477 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
6478 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6479 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
6480 gimplify_assign (v
, build_zero_cst (type
), body_p
);
6481 this_stmt_list
= cstmt_list
;
6483 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
6485 mem
= build2 (MEM_REF
, type
, cond_ptr
,
6486 build_int_cst (TREE_TYPE (cond_ptr
),
6488 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
6491 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
6492 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
6493 tree mem2
= copy_node (mem
);
6494 gimple_seq seq
= NULL
;
6495 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
6496 gimple_seq_add_seq (this_stmt_list
, seq
);
6497 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
6498 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6499 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
6500 gimple_seq_add_stmt (this_stmt_list
, g
);
6501 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
6502 gimplify_assign (mem2
, v
, this_stmt_list
);
6505 && ctx
->combined_into_simd_safelen1
6506 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6507 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6508 && ctx
->lastprivate_conditional_map
)
6509 this_stmt_list
= &post_stmt_list
;
6511 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6512 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6513 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
6515 var
= OMP_CLAUSE_DECL (c
);
6516 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6517 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
6518 && is_taskloop_ctx (ctx
))
6520 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
6521 new_var
= lookup_decl (var
, ctx
->outer
);
6525 new_var
= lookup_decl (var
, ctx
);
6526 /* Avoid uninitialized warnings for lastprivate and
6527 for linear iterators. */
6529 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6530 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
6531 TREE_NO_WARNING (new_var
) = 1;
6534 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
6536 tree val
= DECL_VALUE_EXPR (new_var
);
6537 if (TREE_CODE (val
) == ARRAY_REF
6538 && VAR_P (TREE_OPERAND (val
, 0))
6539 && lookup_attribute ("omp simd array",
6540 DECL_ATTRIBUTES (TREE_OPERAND (val
,
6543 if (lastlane
== NULL
)
6545 lastlane
= create_tmp_var (unsigned_type_node
);
6547 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6549 TREE_OPERAND (val
, 1));
6550 gimple_call_set_lhs (g
, lastlane
);
6551 gimple_seq_add_stmt (this_stmt_list
, g
);
6553 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
6554 TREE_OPERAND (val
, 0), lastlane
,
6555 NULL_TREE
, NULL_TREE
);
6556 TREE_THIS_NOTRAP (new_var
) = 1;
6559 else if (maybe_simt
)
6561 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
6562 ? DECL_VALUE_EXPR (new_var
)
6564 if (simtlast
== NULL
)
6566 simtlast
= create_tmp_var (unsigned_type_node
);
6567 gcall
*g
= gimple_build_call_internal
6568 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
6569 gimple_call_set_lhs (g
, simtlast
);
6570 gimple_seq_add_stmt (this_stmt_list
, g
);
6572 x
= build_call_expr_internal_loc
6573 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
6574 TREE_TYPE (val
), 2, val
, simtlast
);
6575 new_var
= unshare_expr (new_var
);
6576 gimplify_assign (new_var
, x
, this_stmt_list
);
6577 new_var
= unshare_expr (new_var
);
6580 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6581 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
6583 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
6584 gimple_seq_add_seq (this_stmt_list
,
6585 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
6586 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
6588 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6589 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
6591 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
6592 gimple_seq_add_seq (this_stmt_list
,
6593 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
6594 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
6598 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6599 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
6600 && is_taskloop_ctx (ctx
))
6602 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
6604 if (is_global_var (ovar
))
6608 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
6609 if (omp_is_reference (var
))
6610 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6611 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
6612 gimplify_and_add (x
, this_stmt_list
);
6615 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
6619 c
= OMP_CLAUSE_CHAIN (c
);
6620 if (c
== NULL
&& !par_clauses
)
6622 /* If this was a workshare clause, see if it had been combined
6623 with its parallel. In that case, continue looking for the
6624 clauses also on the parallel statement itself. */
6625 if (is_parallel_ctx (ctx
))
6629 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6632 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6633 OMP_CLAUSE_LASTPRIVATE
);
6639 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
6640 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
6643 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6644 (which might be a placeholder). INNER is true if this is an inner
6645 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6646 join markers. Generate the before-loop forking sequence in
6647 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6648 general form of these sequences is
6650 GOACC_REDUCTION_SETUP
6652 GOACC_REDUCTION_INIT
6654 GOACC_REDUCTION_FINI
6656 GOACC_REDUCTION_TEARDOWN. */
6659 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
6660 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
6661 gimple_seq
*join_seq
, omp_context
*ctx
)
6663 gimple_seq before_fork
= NULL
;
6664 gimple_seq after_fork
= NULL
;
6665 gimple_seq before_join
= NULL
;
6666 gimple_seq after_join
= NULL
;
6667 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
6668 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
6669 unsigned offset
= 0;
6671 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6672 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
6674 tree orig
= OMP_CLAUSE_DECL (c
);
6675 tree var
= maybe_lookup_decl (orig
, ctx
);
6676 tree ref_to_res
= NULL_TREE
;
6677 tree incoming
, outgoing
, v1
, v2
, v3
;
6678 bool is_private
= false;
6680 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
6681 if (rcode
== MINUS_EXPR
)
6683 else if (rcode
== TRUTH_ANDIF_EXPR
)
6684 rcode
= BIT_AND_EXPR
;
6685 else if (rcode
== TRUTH_ORIF_EXPR
)
6686 rcode
= BIT_IOR_EXPR
;
6687 tree op
= build_int_cst (unsigned_type_node
, rcode
);
6692 incoming
= outgoing
= var
;
6696 /* See if an outer construct also reduces this variable. */
6697 omp_context
*outer
= ctx
;
6699 while (omp_context
*probe
= outer
->outer
)
6701 enum gimple_code type
= gimple_code (probe
->stmt
);
6706 case GIMPLE_OMP_FOR
:
6707 cls
= gimple_omp_for_clauses (probe
->stmt
);
6710 case GIMPLE_OMP_TARGET
:
6711 if ((gimple_omp_target_kind (probe
->stmt
)
6712 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
6713 && (gimple_omp_target_kind (probe
->stmt
)
6714 != GF_OMP_TARGET_KIND_OACC_SERIAL
))
6717 cls
= gimple_omp_target_clauses (probe
->stmt
);
6725 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
6726 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
6727 && orig
== OMP_CLAUSE_DECL (cls
))
6729 incoming
= outgoing
= lookup_decl (orig
, probe
);
6730 goto has_outer_reduction
;
6732 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
6733 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
6734 && orig
== OMP_CLAUSE_DECL (cls
))
6742 /* This is the outermost construct with this reduction,
6743 see if there's a mapping for it. */
6744 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
6745 && maybe_lookup_field (orig
, outer
) && !is_private
)
6747 ref_to_res
= build_receiver_ref (orig
, false, outer
);
6748 if (omp_is_reference (orig
))
6749 ref_to_res
= build_simple_mem_ref (ref_to_res
);
6751 tree type
= TREE_TYPE (var
);
6752 if (POINTER_TYPE_P (type
))
6753 type
= TREE_TYPE (type
);
6756 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
6760 /* Try to look at enclosing contexts for reduction var,
6761 use original if no mapping found. */
6763 omp_context
*c
= ctx
->outer
;
6766 t
= maybe_lookup_decl (orig
, c
);
6769 incoming
= outgoing
= (t
? t
: orig
);
6772 has_outer_reduction
:;
6776 ref_to_res
= integer_zero_node
;
6778 if (omp_is_reference (orig
))
6780 tree type
= TREE_TYPE (var
);
6781 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
6785 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
6786 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
6789 v1
= create_tmp_var (type
, id
);
6790 v2
= create_tmp_var (type
, id
);
6791 v3
= create_tmp_var (type
, id
);
6793 gimplify_assign (v1
, var
, fork_seq
);
6794 gimplify_assign (v2
, var
, fork_seq
);
6795 gimplify_assign (v3
, var
, fork_seq
);
6797 var
= build_simple_mem_ref (var
);
6798 v1
= build_simple_mem_ref (v1
);
6799 v2
= build_simple_mem_ref (v2
);
6800 v3
= build_simple_mem_ref (v3
);
6801 outgoing
= build_simple_mem_ref (outgoing
);
6803 if (!TREE_CONSTANT (incoming
))
6804 incoming
= build_simple_mem_ref (incoming
);
6809 /* Determine position in reduction buffer, which may be used
6810 by target. The parser has ensured that this is not a
6811 variable-sized type. */
6812 fixed_size_mode mode
6813 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
6814 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
6815 offset
= (offset
+ align
- 1) & ~(align
- 1);
6816 tree off
= build_int_cst (sizetype
, offset
);
6817 offset
+= GET_MODE_SIZE (mode
);
6821 init_code
= build_int_cst (integer_type_node
,
6822 IFN_GOACC_REDUCTION_INIT
);
6823 fini_code
= build_int_cst (integer_type_node
,
6824 IFN_GOACC_REDUCTION_FINI
);
6825 setup_code
= build_int_cst (integer_type_node
,
6826 IFN_GOACC_REDUCTION_SETUP
);
6827 teardown_code
= build_int_cst (integer_type_node
,
6828 IFN_GOACC_REDUCTION_TEARDOWN
);
6832 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6833 TREE_TYPE (var
), 6, setup_code
,
6834 unshare_expr (ref_to_res
),
6835 incoming
, level
, op
, off
);
6837 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6838 TREE_TYPE (var
), 6, init_code
,
6839 unshare_expr (ref_to_res
),
6840 v1
, level
, op
, off
);
6842 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6843 TREE_TYPE (var
), 6, fini_code
,
6844 unshare_expr (ref_to_res
),
6845 v2
, level
, op
, off
);
6847 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6848 TREE_TYPE (var
), 6, teardown_code
,
6849 ref_to_res
, v3
, level
, op
, off
);
6851 gimplify_assign (v1
, setup_call
, &before_fork
);
6852 gimplify_assign (v2
, init_call
, &after_fork
);
6853 gimplify_assign (v3
, fini_call
, &before_join
);
6854 gimplify_assign (outgoing
, teardown_call
, &after_join
);
6857 /* Now stitch things together. */
6858 gimple_seq_add_seq (fork_seq
, before_fork
);
6860 gimple_seq_add_stmt (fork_seq
, fork
);
6861 gimple_seq_add_seq (fork_seq
, after_fork
);
6863 gimple_seq_add_seq (join_seq
, before_join
);
6865 gimple_seq_add_stmt (join_seq
, join
);
6866 gimple_seq_add_seq (join_seq
, after_join
);
6869 /* Generate code to implement the REDUCTION clauses, append it
6870 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6871 that should be emitted also inside of the critical section,
6872 in that case clear *CLIST afterwards, otherwise leave it as is
6873 and let the caller emit it itself. */
6876 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
6877 gimple_seq
*clist
, omp_context
*ctx
)
6879 gimple_seq sub_seq
= NULL
;
6884 /* OpenACC loop reductions are handled elsewhere. */
6885 if (is_gimple_omp_oacc (ctx
->stmt
))
6888 /* SIMD reductions are handled in lower_rec_input_clauses. */
6889 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6890 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6893 /* inscan reductions are handled elsewhere. */
6894 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
6897 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6898 update in that case, otherwise use a lock. */
6899 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
6900 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6901 && !OMP_CLAUSE_REDUCTION_TASK (c
))
6903 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
6904 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6906 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6916 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6918 tree var
, ref
, new_var
, orig_var
;
6919 enum tree_code code
;
6920 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6922 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
6923 || OMP_CLAUSE_REDUCTION_TASK (c
))
6926 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
6927 orig_var
= var
= OMP_CLAUSE_DECL (c
);
6928 if (TREE_CODE (var
) == MEM_REF
)
6930 var
= TREE_OPERAND (var
, 0);
6931 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
6932 var
= TREE_OPERAND (var
, 0);
6933 if (TREE_CODE (var
) == ADDR_EXPR
)
6934 var
= TREE_OPERAND (var
, 0);
6937 /* If this is a pointer or referenced based array
6938 section, the var could be private in the outer
6939 context e.g. on orphaned loop construct. Pretend this
6940 is private variable's outer reference. */
6941 ccode
= OMP_CLAUSE_PRIVATE
;
6942 if (TREE_CODE (var
) == INDIRECT_REF
)
6943 var
= TREE_OPERAND (var
, 0);
6946 if (is_variable_sized (var
))
6948 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
6949 var
= DECL_VALUE_EXPR (var
);
6950 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
6951 var
= TREE_OPERAND (var
, 0);
6952 gcc_assert (DECL_P (var
));
6955 new_var
= lookup_decl (var
, ctx
);
6956 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
6957 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6958 ref
= build_outer_var_ref (var
, ctx
, ccode
);
6959 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6961 /* reduction(-:var) sums up the partial results, so it acts
6962 identically to reduction(+:var). */
6963 if (code
== MINUS_EXPR
)
6968 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
6970 addr
= save_expr (addr
);
6971 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
6972 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
6973 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
6974 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
6975 gimplify_and_add (x
, stmt_seqp
);
6978 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6980 tree d
= OMP_CLAUSE_DECL (c
);
6981 tree type
= TREE_TYPE (d
);
6982 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
6983 tree i
= create_tmp_var (TREE_TYPE (v
));
6984 tree ptype
= build_pointer_type (TREE_TYPE (type
));
6985 tree bias
= TREE_OPERAND (d
, 1);
6986 d
= TREE_OPERAND (d
, 0);
6987 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
6989 tree b
= TREE_OPERAND (d
, 1);
6990 b
= maybe_lookup_decl (b
, ctx
);
6993 b
= TREE_OPERAND (d
, 1);
6994 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
6996 if (integer_zerop (bias
))
7000 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7001 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7002 TREE_TYPE (b
), b
, bias
);
7004 d
= TREE_OPERAND (d
, 0);
7006 /* For ref build_outer_var_ref already performs this, so
7007 only new_var needs a dereference. */
7008 if (TREE_CODE (d
) == INDIRECT_REF
)
7010 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7011 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
7013 else if (TREE_CODE (d
) == ADDR_EXPR
)
7015 if (orig_var
== var
)
7017 new_var
= build_fold_addr_expr (new_var
);
7018 ref
= build_fold_addr_expr (ref
);
7023 gcc_assert (orig_var
== var
);
7024 if (omp_is_reference (var
))
7025 ref
= build_fold_addr_expr (ref
);
7029 tree t
= maybe_lookup_decl (v
, ctx
);
7033 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7034 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7036 if (!integer_zerop (bias
))
7038 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7039 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7040 TREE_TYPE (new_var
), new_var
,
7041 unshare_expr (bias
));
7042 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7043 TREE_TYPE (ref
), ref
, bias
);
7045 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7046 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7047 tree m
= create_tmp_var (ptype
);
7048 gimplify_assign (m
, new_var
, stmt_seqp
);
7050 m
= create_tmp_var (ptype
);
7051 gimplify_assign (m
, ref
, stmt_seqp
);
7053 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7054 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7055 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7056 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7057 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7058 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7059 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7061 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7062 tree decl_placeholder
7063 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7064 SET_DECL_VALUE_EXPR (placeholder
, out
);
7065 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7066 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7067 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7068 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7069 gimple_seq_add_seq (&sub_seq
,
7070 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7071 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7072 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7073 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7077 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
7078 out
= unshare_expr (out
);
7079 gimplify_assign (out
, x
, &sub_seq
);
7081 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7082 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7083 gimple_seq_add_stmt (&sub_seq
, g
);
7084 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7085 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7086 gimple_seq_add_stmt (&sub_seq
, g
);
7087 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7088 build_int_cst (TREE_TYPE (i
), 1));
7089 gimple_seq_add_stmt (&sub_seq
, g
);
7090 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7091 gimple_seq_add_stmt (&sub_seq
, g
);
7092 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7094 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7096 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7098 if (omp_is_reference (var
)
7099 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7101 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7102 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7103 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7104 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7105 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7106 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7107 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7111 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
7112 ref
= build_outer_var_ref (var
, ctx
);
7113 gimplify_assign (ref
, x
, &sub_seq
);
7117 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7119 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7121 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7125 gimple_seq_add_seq (stmt_seqp
, *clist
);
7129 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7131 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7135 /* Generate code to implement the COPYPRIVATE clauses. */
7138 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
7143 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7145 tree var
, new_var
, ref
, x
;
7147 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7149 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
7152 var
= OMP_CLAUSE_DECL (c
);
7153 by_ref
= use_pointer_for_field (var
, NULL
);
7155 ref
= build_sender_ref (var
, ctx
);
7156 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
7159 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
7160 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
7162 gimplify_assign (ref
, x
, slist
);
7164 ref
= build_receiver_ref (var
, false, ctx
);
7167 ref
= fold_convert_loc (clause_loc
,
7168 build_pointer_type (TREE_TYPE (new_var
)),
7170 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
7172 if (omp_is_reference (var
))
7174 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
7175 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
7176 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7178 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
7179 gimplify_and_add (x
, rlist
);
7184 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7185 and REDUCTION from the sender (aka parent) side. */
7188 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
7192 int ignored_looptemp
= 0;
7193 bool is_taskloop
= false;
7195 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7196 by GOMP_taskloop. */
7197 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
7199 ignored_looptemp
= 2;
7203 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7205 tree val
, ref
, x
, var
;
7206 bool by_ref
, do_in
= false, do_out
= false;
7207 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7209 switch (OMP_CLAUSE_CODE (c
))
7211 case OMP_CLAUSE_PRIVATE
:
7212 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7215 case OMP_CLAUSE_FIRSTPRIVATE
:
7216 case OMP_CLAUSE_COPYIN
:
7217 case OMP_CLAUSE_LASTPRIVATE
:
7218 case OMP_CLAUSE_IN_REDUCTION
:
7219 case OMP_CLAUSE__REDUCTEMP_
:
7221 case OMP_CLAUSE_REDUCTION
:
7222 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
7225 case OMP_CLAUSE_SHARED
:
7226 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7229 case OMP_CLAUSE__LOOPTEMP_
:
7230 if (ignored_looptemp
)
7240 val
= OMP_CLAUSE_DECL (c
);
7241 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7242 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
7243 && TREE_CODE (val
) == MEM_REF
)
7245 val
= TREE_OPERAND (val
, 0);
7246 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
7247 val
= TREE_OPERAND (val
, 0);
7248 if (TREE_CODE (val
) == INDIRECT_REF
7249 || TREE_CODE (val
) == ADDR_EXPR
)
7250 val
= TREE_OPERAND (val
, 0);
7251 if (is_variable_sized (val
))
7255 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7256 outer taskloop region. */
7257 omp_context
*ctx_for_o
= ctx
;
7259 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
7260 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7261 ctx_for_o
= ctx
->outer
;
7263 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
7265 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
7266 && is_global_var (var
)
7267 && (val
== OMP_CLAUSE_DECL (c
)
7268 || !is_task_ctx (ctx
)
7269 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
7270 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
7271 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
7272 != POINTER_TYPE
)))))
7275 t
= omp_member_access_dummy_var (var
);
7278 var
= DECL_VALUE_EXPR (var
);
7279 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
7281 var
= unshare_and_remap (var
, t
, o
);
7283 var
= unshare_expr (var
);
7286 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
7288 /* Handle taskloop firstprivate/lastprivate, where the
7289 lastprivate on GIMPLE_OMP_TASK is represented as
7290 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7291 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
7292 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
7293 if (use_pointer_for_field (val
, ctx
))
7294 var
= build_fold_addr_expr (var
);
7295 gimplify_assign (x
, var
, ilist
);
7296 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
7300 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7301 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
7302 || val
== OMP_CLAUSE_DECL (c
))
7303 && is_variable_sized (val
))
7305 by_ref
= use_pointer_for_field (val
, NULL
);
7307 switch (OMP_CLAUSE_CODE (c
))
7309 case OMP_CLAUSE_FIRSTPRIVATE
:
7310 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
7312 && is_task_ctx (ctx
))
7313 TREE_NO_WARNING (var
) = 1;
7317 case OMP_CLAUSE_PRIVATE
:
7318 case OMP_CLAUSE_COPYIN
:
7319 case OMP_CLAUSE__LOOPTEMP_
:
7320 case OMP_CLAUSE__REDUCTEMP_
:
7324 case OMP_CLAUSE_LASTPRIVATE
:
7325 if (by_ref
|| omp_is_reference (val
))
7327 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
7334 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
7339 case OMP_CLAUSE_REDUCTION
:
7340 case OMP_CLAUSE_IN_REDUCTION
:
7342 if (val
== OMP_CLAUSE_DECL (c
))
7344 if (is_task_ctx (ctx
))
7345 by_ref
= use_pointer_for_field (val
, ctx
);
7347 do_out
= !(by_ref
|| omp_is_reference (val
));
7350 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
7359 ref
= build_sender_ref (val
, ctx
);
7360 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
7361 gimplify_assign (ref
, x
, ilist
);
7362 if (is_task_ctx (ctx
))
7363 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
7368 ref
= build_sender_ref (val
, ctx
);
7369 gimplify_assign (var
, ref
, olist
);
7374 /* Generate code to implement SHARED from the sender (aka parent)
7375 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7376 list things that got automatically shared. */
7379 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
7381 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
7383 if (ctx
->record_type
== NULL
)
7386 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
7387 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
7389 ovar
= DECL_ABSTRACT_ORIGIN (f
);
7390 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
7393 nvar
= maybe_lookup_decl (ovar
, ctx
);
7394 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
7397 /* If CTX is a nested parallel directive. Find the immediately
7398 enclosing parallel or workshare construct that contains a
7399 mapping for OVAR. */
7400 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7402 t
= omp_member_access_dummy_var (var
);
7405 var
= DECL_VALUE_EXPR (var
);
7406 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
7408 var
= unshare_and_remap (var
, t
, o
);
7410 var
= unshare_expr (var
);
7413 if (use_pointer_for_field (ovar
, ctx
))
7415 x
= build_sender_ref (ovar
, ctx
);
7416 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
7417 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
7419 gcc_assert (is_parallel_ctx (ctx
)
7420 && DECL_ARTIFICIAL (ovar
));
7421 /* _condtemp_ clause. */
7422 var
= build_constructor (TREE_TYPE (x
), NULL
);
7425 var
= build_fold_addr_expr (var
);
7426 gimplify_assign (x
, var
, ilist
);
7430 x
= build_sender_ref (ovar
, ctx
);
7431 gimplify_assign (x
, var
, ilist
);
7433 if (!TREE_READONLY (var
)
7434 /* We don't need to receive a new reference to a result
7435 or parm decl. In fact we may not store to it as we will
7436 invalidate any pending RSO and generate wrong gimple
7438 && !((TREE_CODE (var
) == RESULT_DECL
7439 || TREE_CODE (var
) == PARM_DECL
)
7440 && DECL_BY_REFERENCE (var
)))
7442 x
= build_sender_ref (ovar
, ctx
);
7443 gimplify_assign (var
, x
, olist
);
7449 /* Emit an OpenACC head marker call, encapulating the partitioning and
7450 other information that must be processed by the target compiler.
7451 Return the maximum number of dimensions the associated loop might
7452 be partitioned over. */
7455 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
7456 gimple_seq
*seq
, omp_context
*ctx
)
7458 unsigned levels
= 0;
7460 tree gang_static
= NULL_TREE
;
7461 auto_vec
<tree
, 5> args
;
7463 args
.quick_push (build_int_cst
7464 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
7465 args
.quick_push (ddvar
);
7466 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7468 switch (OMP_CLAUSE_CODE (c
))
7470 case OMP_CLAUSE_GANG
:
7471 tag
|= OLF_DIM_GANG
;
7472 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
7473 /* static:* is represented by -1, and we can ignore it, as
7474 scheduling is always static. */
7475 if (gang_static
&& integer_minus_onep (gang_static
))
7476 gang_static
= NULL_TREE
;
7480 case OMP_CLAUSE_WORKER
:
7481 tag
|= OLF_DIM_WORKER
;
7485 case OMP_CLAUSE_VECTOR
:
7486 tag
|= OLF_DIM_VECTOR
;
7490 case OMP_CLAUSE_SEQ
:
7494 case OMP_CLAUSE_AUTO
:
7498 case OMP_CLAUSE_INDEPENDENT
:
7499 tag
|= OLF_INDEPENDENT
;
7502 case OMP_CLAUSE_TILE
:
7513 if (DECL_P (gang_static
))
7514 gang_static
= build_outer_var_ref (gang_static
, ctx
);
7515 tag
|= OLF_GANG_STATIC
;
7518 /* In a parallel region, loops are implicitly INDEPENDENT. */
7519 omp_context
*tgt
= enclosing_target_ctx (ctx
);
7520 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
7521 tag
|= OLF_INDEPENDENT
;
7524 /* Tiling could use all 3 levels. */
7528 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7529 Ensure at least one level, or 2 for possible auto
7531 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
7532 << OLF_DIM_BASE
) | OLF_SEQ
));
7534 if (levels
< 1u + maybe_auto
)
7535 levels
= 1u + maybe_auto
;
7538 args
.quick_push (build_int_cst (integer_type_node
, levels
));
7539 args
.quick_push (build_int_cst (integer_type_node
, tag
));
7541 args
.quick_push (gang_static
);
7543 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
7544 gimple_set_location (call
, loc
);
7545 gimple_set_lhs (call
, ddvar
);
7546 gimple_seq_add_stmt (seq
, call
);
7551 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7552 partitioning level of the enclosed region. */
7555 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
7556 tree tofollow
, gimple_seq
*seq
)
7558 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
7559 : IFN_UNIQUE_OACC_TAIL_MARK
);
7560 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
7561 int nargs
= 2 + (tofollow
!= NULL_TREE
);
7562 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
7563 marker
, ddvar
, tofollow
);
7564 gimple_set_location (call
, loc
);
7565 gimple_set_lhs (call
, ddvar
);
7566 gimple_seq_add_stmt (seq
, call
);
7569 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7570 the loop clauses, from which we extract reductions. Initialize
7574 lower_oacc_head_tail (location_t loc
, tree clauses
,
7575 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
7578 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
7579 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
7581 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
7582 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
7583 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
7586 for (unsigned done
= 1; count
; count
--, done
++)
7588 gimple_seq fork_seq
= NULL
;
7589 gimple_seq join_seq
= NULL
;
7591 tree place
= build_int_cst (integer_type_node
, -1);
7592 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7593 fork_kind
, ddvar
, place
);
7594 gimple_set_location (fork
, loc
);
7595 gimple_set_lhs (fork
, ddvar
);
7597 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7598 join_kind
, ddvar
, place
);
7599 gimple_set_location (join
, loc
);
7600 gimple_set_lhs (join
, ddvar
);
7602 /* Mark the beginning of this level sequence. */
7604 lower_oacc_loop_marker (loc
, ddvar
, true,
7605 build_int_cst (integer_type_node
, count
),
7607 lower_oacc_loop_marker (loc
, ddvar
, false,
7608 build_int_cst (integer_type_node
, done
),
7611 lower_oacc_reductions (loc
, clauses
, place
, inner
,
7612 fork
, join
, &fork_seq
, &join_seq
, ctx
);
7614 /* Append this level to head. */
7615 gimple_seq_add_seq (head
, fork_seq
);
7616 /* Prepend it to tail. */
7617 gimple_seq_add_seq (&join_seq
, *tail
);
7623 /* Mark the end of the sequence. */
7624 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
7625 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
7628 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7629 catch handler and return it. This prevents programs from violating the
7630 structured block semantics with throws. */
7633 maybe_catch_exception (gimple_seq body
)
7638 if (!flag_exceptions
)
7641 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
7642 decl
= lang_hooks
.eh_protect_cleanup_actions ();
7644 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
7646 g
= gimple_build_eh_must_not_throw (decl
);
7647 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
7650 return gimple_seq_alloc_with_stmt (g
);
7654 /* Routines to lower OMP directives into OMP-GIMPLE. */
7656 /* If ctx is a worksharing context inside of a cancellable parallel
7657 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7658 and conditional branch to parallel's cancel_label to handle
7659 cancellation in the implicit barrier. */
7662 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
7665 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
7666 if (gimple_omp_return_nowait_p (omp_return
))
7668 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
7669 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
7670 && outer
->cancellable
)
7672 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
7673 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
7674 tree lhs
= create_tmp_var (c_bool_type
);
7675 gimple_omp_return_set_lhs (omp_return
, lhs
);
7676 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
7677 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
7678 fold_convert (c_bool_type
,
7679 boolean_false_node
),
7680 outer
->cancel_label
, fallthru_label
);
7681 gimple_seq_add_stmt (body
, g
);
7682 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
7684 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
7688 /* Find the first task_reduction or reduction clause or return NULL
7689 if there are none. */
7692 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
7693 enum omp_clause_code ccode
)
7697 clauses
= omp_find_clause (clauses
, ccode
);
7698 if (clauses
== NULL_TREE
)
7700 if (ccode
!= OMP_CLAUSE_REDUCTION
7701 || code
== OMP_TASKLOOP
7702 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
7704 clauses
= OMP_CLAUSE_CHAIN (clauses
);
7708 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
7709 gimple_seq
*, gimple_seq
*);
7711 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7712 CTX is the enclosing OMP context for the current statement. */
7715 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7717 tree block
, control
;
7718 gimple_stmt_iterator tgsi
;
7719 gomp_sections
*stmt
;
7721 gbind
*new_stmt
, *bind
;
7722 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
7724 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
7726 push_gimplify_context ();
7732 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
7733 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
7734 tree rtmp
= NULL_TREE
;
7737 tree type
= build_pointer_type (pointer_sized_int_node
);
7738 tree temp
= create_tmp_var (type
);
7739 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
7740 OMP_CLAUSE_DECL (c
) = temp
;
7741 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
7742 gimple_omp_sections_set_clauses (stmt
, c
);
7743 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
7744 gimple_omp_sections_clauses (stmt
),
7745 &ilist
, &tred_dlist
);
7747 rtmp
= make_ssa_name (type
);
7748 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
7751 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
7752 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
7754 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
7755 &ilist
, &dlist
, ctx
, NULL
);
7757 control
= create_tmp_var (unsigned_type_node
, ".section");
7758 gimple_omp_sections_set_control (stmt
, control
);
7760 new_body
= gimple_omp_body (stmt
);
7761 gimple_omp_set_body (stmt
, NULL
);
7762 tgsi
= gsi_start (new_body
);
7763 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
7768 sec_start
= gsi_stmt (tgsi
);
7769 sctx
= maybe_lookup_ctx (sec_start
);
7772 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
7773 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
7774 GSI_CONTINUE_LINKING
);
7775 gimple_omp_set_body (sec_start
, NULL
);
7777 if (gsi_one_before_end_p (tgsi
))
7779 gimple_seq l
= NULL
;
7780 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
7781 &ilist
, &l
, &clist
, ctx
);
7782 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
7783 gimple_omp_section_set_last (sec_start
);
7786 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
7787 GSI_CONTINUE_LINKING
);
7790 block
= make_node (BLOCK
);
7791 bind
= gimple_build_bind (NULL
, new_body
, block
);
7794 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
7798 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
7799 gcall
*g
= gimple_build_call (fndecl
, 0);
7800 gimple_seq_add_stmt (&olist
, g
);
7801 gimple_seq_add_seq (&olist
, clist
);
7802 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
7803 g
= gimple_build_call (fndecl
, 0);
7804 gimple_seq_add_stmt (&olist
, g
);
7807 block
= make_node (BLOCK
);
7808 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
7809 gsi_replace (gsi_p
, new_stmt
, true);
7811 pop_gimplify_context (new_stmt
);
7812 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
7813 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
7814 if (BLOCK_VARS (block
))
7815 TREE_USED (block
) = 1;
7818 gimple_seq_add_seq (&new_body
, ilist
);
7819 gimple_seq_add_stmt (&new_body
, stmt
);
7820 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
7821 gimple_seq_add_stmt (&new_body
, bind
);
7823 t
= gimple_build_omp_continue (control
, control
);
7824 gimple_seq_add_stmt (&new_body
, t
);
7826 gimple_seq_add_seq (&new_body
, olist
);
7827 if (ctx
->cancellable
)
7828 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7829 gimple_seq_add_seq (&new_body
, dlist
);
7831 new_body
= maybe_catch_exception (new_body
);
7833 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
7834 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7835 t
= gimple_build_omp_return (nowait
);
7836 gimple_seq_add_stmt (&new_body
, t
);
7837 gimple_seq_add_seq (&new_body
, tred_dlist
);
7838 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
7841 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
7843 gimple_bind_set_body (new_stmt
, new_body
);
7847 /* A subroutine of lower_omp_single. Expand the simple form of
7848 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7850 if (GOMP_single_start ())
7852 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7854 FIXME. It may be better to delay expanding the logic of this until
7855 pass_expand_omp. The expanded logic may make the job more difficult
7856 to a synchronization analysis pass. */
7859 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
7861 location_t loc
= gimple_location (single_stmt
);
7862 tree tlabel
= create_artificial_label (loc
);
7863 tree flabel
= create_artificial_label (loc
);
7864 gimple
*call
, *cond
;
7867 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
7868 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
7869 call
= gimple_build_call (decl
, 0);
7870 gimple_call_set_lhs (call
, lhs
);
7871 gimple_seq_add_stmt (pre_p
, call
);
7873 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
7874 fold_convert_loc (loc
, TREE_TYPE (lhs
),
7877 gimple_seq_add_stmt (pre_p
, cond
);
7878 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
7879 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7880 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
7884 /* A subroutine of lower_omp_single. Expand the simple form of
7885 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7887 #pragma omp single copyprivate (a, b, c)
7889 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7892 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7898 GOMP_single_copy_end (©out);
7909 FIXME. It may be better to delay expanding the logic of this until
7910 pass_expand_omp. The expanded logic may make the job more difficult
7911 to a synchronization analysis pass. */
7914 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
7917 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
7918 gimple_seq copyin_seq
;
7919 location_t loc
= gimple_location (single_stmt
);
7921 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
7923 ptr_type
= build_pointer_type (ctx
->record_type
);
7924 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
7926 l0
= create_artificial_label (loc
);
7927 l1
= create_artificial_label (loc
);
7928 l2
= create_artificial_label (loc
);
7930 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
7931 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
7932 t
= fold_convert_loc (loc
, ptr_type
, t
);
7933 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
7935 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
7936 build_int_cst (ptr_type
, 0));
7937 t
= build3 (COND_EXPR
, void_type_node
, t
,
7938 build_and_jump (&l0
), build_and_jump (&l1
));
7939 gimplify_and_add (t
, pre_p
);
7941 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
7943 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7946 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
7949 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
7950 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
7951 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
7952 gimplify_and_add (t
, pre_p
);
7954 t
= build_and_jump (&l2
);
7955 gimplify_and_add (t
, pre_p
);
7957 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
7959 gimple_seq_add_seq (pre_p
, copyin_seq
);
7961 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
7965 /* Expand code for an OpenMP single directive. */
7968 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7971 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
7973 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
7975 push_gimplify_context ();
7977 block
= make_node (BLOCK
);
7978 bind
= gimple_build_bind (NULL
, NULL
, block
);
7979 gsi_replace (gsi_p
, bind
, true);
7982 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
7983 &bind_body
, &dlist
, ctx
, NULL
);
7984 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
7986 gimple_seq_add_stmt (&bind_body
, single_stmt
);
7988 if (ctx
->record_type
)
7989 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
7991 lower_omp_single_simple (single_stmt
, &bind_body
);
7993 gimple_omp_set_body (single_stmt
, NULL
);
7995 gimple_seq_add_seq (&bind_body
, dlist
);
7997 bind_body
= maybe_catch_exception (bind_body
);
7999 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8000 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8001 gimple
*g
= gimple_build_omp_return (nowait
);
8002 gimple_seq_add_stmt (&bind_body_tail
, g
);
8003 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8004 if (ctx
->record_type
)
8006 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8007 tree clobber
= build_clobber (ctx
->record_type
);
8008 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8009 clobber
), GSI_SAME_STMT
);
8011 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8012 gimple_bind_set_body (bind
, bind_body
);
8014 pop_gimplify_context (bind
);
8016 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8017 BLOCK_VARS (block
) = ctx
->block_vars
;
8018 if (BLOCK_VARS (block
))
8019 TREE_USED (block
) = 1;
8023 /* Expand code for an OpenMP master directive. */
8026 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8028 tree block
, lab
= NULL
, x
, bfn_decl
;
8029 gimple
*stmt
= gsi_stmt (*gsi_p
);
8031 location_t loc
= gimple_location (stmt
);
8034 push_gimplify_context ();
8036 block
= make_node (BLOCK
);
8037 bind
= gimple_build_bind (NULL
, NULL
, block
);
8038 gsi_replace (gsi_p
, bind
, true);
8039 gimple_bind_add_stmt (bind
, stmt
);
8041 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8042 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
8043 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
8044 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
8046 gimplify_and_add (x
, &tseq
);
8047 gimple_bind_add_seq (bind
, tseq
);
8049 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8050 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8051 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8052 gimple_omp_set_body (stmt
, NULL
);
8054 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
8056 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8058 pop_gimplify_context (bind
);
8060 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8061 BLOCK_VARS (block
) = ctx
->block_vars
;
8064 /* Helper function for lower_omp_task_reductions. For a specific PASS
8065 find out the current clause it should be processed, or return false
8066 if all have been processed already. */
8069 omp_task_reduction_iterate (int pass
, enum tree_code code
,
8070 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
8071 tree
*type
, tree
*next
)
8073 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
8075 if (ccode
== OMP_CLAUSE_REDUCTION
8076 && code
!= OMP_TASKLOOP
8077 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
8079 *decl
= OMP_CLAUSE_DECL (*c
);
8080 *type
= TREE_TYPE (*decl
);
8081 if (TREE_CODE (*decl
) == MEM_REF
)
8088 if (omp_is_reference (*decl
))
8089 *type
= TREE_TYPE (*type
);
8090 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
8093 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
8102 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8103 OMP_TASKGROUP only with task modifier). Register mapping of those in
8104 START sequence and reducing them and unregister them in the END sequence. */
8107 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
8108 gimple_seq
*start
, gimple_seq
*end
)
8110 enum omp_clause_code ccode
8111 = (code
== OMP_TASKGROUP
8112 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
8113 tree cancellable
= NULL_TREE
;
8114 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
8115 if (clauses
== NULL_TREE
)
8117 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8119 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8120 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8121 && outer
->cancellable
)
8123 cancellable
= error_mark_node
;
8126 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
8129 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8130 tree
*last
= &TYPE_FIELDS (record_type
);
8134 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8136 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8139 DECL_CHAIN (field
) = ifield
;
8140 last
= &DECL_CHAIN (ifield
);
8141 DECL_CONTEXT (field
) = record_type
;
8142 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8143 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8144 DECL_CONTEXT (ifield
) = record_type
;
8145 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
8146 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
8148 for (int pass
= 0; pass
< 2; pass
++)
8150 tree decl
, type
, next
;
8151 for (tree c
= clauses
;
8152 omp_task_reduction_iterate (pass
, code
, ccode
,
8153 &c
, &decl
, &type
, &next
); c
= next
)
8156 tree new_type
= type
;
8158 new_type
= remap_type (type
, &ctx
->outer
->cb
);
8160 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
8161 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
8163 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
8165 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
8166 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
8167 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
8170 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
8171 DECL_CONTEXT (field
) = record_type
;
8172 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8173 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8175 last
= &DECL_CHAIN (field
);
8177 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
8179 DECL_CONTEXT (bfield
) = record_type
;
8180 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
8181 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
8183 last
= &DECL_CHAIN (bfield
);
8187 layout_type (record_type
);
8189 /* Build up an array which registers with the runtime all the reductions
8190 and deregisters them at the end. Format documented in libgomp/task.c. */
8191 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
8192 tree avar
= create_tmp_var_raw (atype
);
8193 gimple_add_tmp_var (avar
);
8194 TREE_ADDRESSABLE (avar
) = 1;
8195 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
8196 NULL_TREE
, NULL_TREE
);
8197 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
8198 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8199 gimple_seq seq
= NULL
;
8200 tree sz
= fold_convert (pointer_sized_int_node
,
8201 TYPE_SIZE_UNIT (record_type
));
8203 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
8204 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
8205 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
8206 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
8207 ctx
->task_reductions
.create (1 + cnt
);
8208 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
8209 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
8211 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
8212 gimple_seq_add_seq (start
, seq
);
8213 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
8214 NULL_TREE
, NULL_TREE
);
8215 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
8216 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8217 NULL_TREE
, NULL_TREE
);
8218 t
= build_int_cst (pointer_sized_int_node
,
8219 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
8220 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8221 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
8222 NULL_TREE
, NULL_TREE
);
8223 t
= build_int_cst (pointer_sized_int_node
, -1);
8224 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8225 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
8226 NULL_TREE
, NULL_TREE
);
8227 t
= build_int_cst (pointer_sized_int_node
, 0);
8228 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8230 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8231 and for each task reduction checks a bool right after the private variable
8232 within that thread's chunk; if the bool is clear, it hasn't been
8233 initialized and thus isn't going to be reduced nor destructed, otherwise
8234 reduce and destruct it. */
8235 tree idx
= create_tmp_var (size_type_node
);
8236 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
8237 tree num_thr_sz
= create_tmp_var (size_type_node
);
8238 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
8239 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
8240 tree lab3
= NULL_TREE
;
8242 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8244 /* For worksharing constructs, only perform it in the master thread,
8245 with the exception of cancelled implicit barriers - then only handle
8246 the current thread. */
8247 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8248 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8249 tree thr_num
= create_tmp_var (integer_type_node
);
8250 g
= gimple_build_call (t
, 0);
8251 gimple_call_set_lhs (g
, thr_num
);
8252 gimple_seq_add_stmt (end
, g
);
8256 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8257 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8258 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8259 if (code
== OMP_FOR
)
8260 c
= gimple_omp_for_clauses (ctx
->stmt
);
8261 else /* if (code == OMP_SECTIONS) */
8262 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8263 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
8265 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
8267 gimple_seq_add_stmt (end
, g
);
8268 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8269 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
8270 gimple_seq_add_stmt (end
, g
);
8271 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
8272 build_one_cst (TREE_TYPE (idx
)));
8273 gimple_seq_add_stmt (end
, g
);
8274 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
8275 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8277 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
8278 gimple_seq_add_stmt (end
, g
);
8279 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8281 if (code
!= OMP_PARALLEL
)
8283 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
8284 tree num_thr
= create_tmp_var (integer_type_node
);
8285 g
= gimple_build_call (t
, 0);
8286 gimple_call_set_lhs (g
, num_thr
);
8287 gimple_seq_add_stmt (end
, g
);
8288 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
8289 gimple_seq_add_stmt (end
, g
);
8291 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8295 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
8296 OMP_CLAUSE__REDUCTEMP_
);
8297 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
8298 t
= fold_convert (size_type_node
, t
);
8299 gimplify_assign (num_thr_sz
, t
, end
);
8301 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8302 NULL_TREE
, NULL_TREE
);
8303 tree data
= create_tmp_var (pointer_sized_int_node
);
8304 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
8305 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
8307 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
8308 ptr
= create_tmp_var (build_pointer_type (record_type
));
8310 ptr
= create_tmp_var (ptr_type_node
);
8311 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
8313 tree field
= TYPE_FIELDS (record_type
);
8316 field
= DECL_CHAIN (DECL_CHAIN (field
));
8317 for (int pass
= 0; pass
< 2; pass
++)
8319 tree decl
, type
, next
;
8320 for (tree c
= clauses
;
8321 omp_task_reduction_iterate (pass
, code
, ccode
,
8322 &c
, &decl
, &type
, &next
); c
= next
)
8324 tree var
= decl
, ref
;
8325 if (TREE_CODE (decl
) == MEM_REF
)
8327 var
= TREE_OPERAND (var
, 0);
8328 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
8329 var
= TREE_OPERAND (var
, 0);
8331 if (TREE_CODE (var
) == ADDR_EXPR
)
8332 var
= TREE_OPERAND (var
, 0);
8333 else if (TREE_CODE (var
) == INDIRECT_REF
)
8334 var
= TREE_OPERAND (var
, 0);
8335 tree orig_var
= var
;
8336 if (is_variable_sized (var
))
8338 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
8339 var
= DECL_VALUE_EXPR (var
);
8340 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
8341 var
= TREE_OPERAND (var
, 0);
8342 gcc_assert (DECL_P (var
));
8344 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8345 if (orig_var
!= var
)
8346 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
8347 else if (TREE_CODE (v
) == ADDR_EXPR
)
8348 t
= build_fold_addr_expr (t
);
8349 else if (TREE_CODE (v
) == INDIRECT_REF
)
8350 t
= build_fold_indirect_ref (t
);
8351 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
8353 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
8354 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
8355 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
8357 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
8358 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
8359 fold_convert (size_type_node
,
8360 TREE_OPERAND (decl
, 1)));
8364 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8365 if (!omp_is_reference (decl
))
8366 t
= build_fold_addr_expr (t
);
8368 t
= fold_convert (pointer_sized_int_node
, t
);
8370 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8371 gimple_seq_add_seq (start
, seq
);
8372 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8373 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8374 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8375 t
= unshare_expr (byte_position (field
));
8376 t
= fold_convert (pointer_sized_int_node
, t
);
8377 ctx
->task_reduction_map
->put (c
, cnt
);
8378 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
8381 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8382 gimple_seq_add_seq (start
, seq
);
8383 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8384 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
8385 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8387 tree bfield
= DECL_CHAIN (field
);
8389 if (code
== OMP_PARALLEL
|| code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8390 /* In parallel or worksharing all threads unconditionally
8391 initialize all their task reduction private variables. */
8392 cond
= boolean_true_node
;
8393 else if (TREE_TYPE (ptr
) == ptr_type_node
)
8395 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8396 unshare_expr (byte_position (bfield
)));
8398 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
8399 gimple_seq_add_seq (end
, seq
);
8400 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
8401 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
8402 build_int_cst (pbool
, 0));
8405 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
8406 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
8407 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8408 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8409 tree condv
= create_tmp_var (boolean_type_node
);
8410 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
8411 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
8413 gimple_seq_add_stmt (end
, g
);
8414 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8415 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
8417 /* If this reduction doesn't need destruction and parallel
8418 has been cancelled, there is nothing to do for this
8419 reduction, so jump around the merge operation. */
8420 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8421 g
= gimple_build_cond (NE_EXPR
, cancellable
,
8422 build_zero_cst (TREE_TYPE (cancellable
)),
8424 gimple_seq_add_stmt (end
, g
);
8425 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8429 if (TREE_TYPE (ptr
) == ptr_type_node
)
8431 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8432 unshare_expr (byte_position (field
)));
8434 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
8435 gimple_seq_add_seq (end
, seq
);
8436 tree pbool
= build_pointer_type (TREE_TYPE (field
));
8437 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
8438 build_int_cst (pbool
, 0));
8441 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
8442 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
8444 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
8445 if (TREE_CODE (decl
) != MEM_REF
&& omp_is_reference (decl
))
8446 ref
= build_simple_mem_ref (ref
);
8447 /* reduction(-:var) sums up the partial results, so it acts
8448 identically to reduction(+:var). */
8449 if (rcode
== MINUS_EXPR
)
8451 if (TREE_CODE (decl
) == MEM_REF
)
8453 tree type
= TREE_TYPE (new_var
);
8454 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8455 tree i
= create_tmp_var (TREE_TYPE (v
));
8456 tree ptype
= build_pointer_type (TREE_TYPE (type
));
8459 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
8460 tree vv
= create_tmp_var (TREE_TYPE (v
));
8461 gimplify_assign (vv
, v
, start
);
8464 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8465 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8466 new_var
= build_fold_addr_expr (new_var
);
8467 new_var
= fold_convert (ptype
, new_var
);
8468 ref
= fold_convert (ptype
, ref
);
8469 tree m
= create_tmp_var (ptype
);
8470 gimplify_assign (m
, new_var
, end
);
8472 m
= create_tmp_var (ptype
);
8473 gimplify_assign (m
, ref
, end
);
8475 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
8476 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
8477 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
8478 gimple_seq_add_stmt (end
, gimple_build_label (body
));
8479 tree priv
= build_simple_mem_ref (new_var
);
8480 tree out
= build_simple_mem_ref (ref
);
8481 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8483 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8484 tree decl_placeholder
8485 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
8486 tree lab6
= NULL_TREE
;
8489 /* If this reduction needs destruction and parallel
8490 has been cancelled, jump around the merge operation
8491 to the destruction. */
8492 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8493 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8494 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8495 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8497 gimple_seq_add_stmt (end
, g
);
8498 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8500 SET_DECL_VALUE_EXPR (placeholder
, out
);
8501 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8502 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
8503 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
8504 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8505 gimple_seq_add_seq (end
,
8506 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8507 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8508 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8510 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8511 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
8514 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8515 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
8518 gimple_seq tseq
= NULL
;
8519 gimplify_stmt (&x
, &tseq
);
8520 gimple_seq_add_seq (end
, tseq
);
8525 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
8526 out
= unshare_expr (out
);
8527 gimplify_assign (out
, x
, end
);
8530 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
8531 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8532 gimple_seq_add_stmt (end
, g
);
8533 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
8534 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8535 gimple_seq_add_stmt (end
, g
);
8536 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
8537 build_int_cst (TREE_TYPE (i
), 1));
8538 gimple_seq_add_stmt (end
, g
);
8539 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
8540 gimple_seq_add_stmt (end
, g
);
8541 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
8543 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8545 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8546 tree oldv
= NULL_TREE
;
8547 tree lab6
= NULL_TREE
;
8550 /* If this reduction needs destruction and parallel
8551 has been cancelled, jump around the merge operation
8552 to the destruction. */
8553 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8554 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8555 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8556 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8558 gimple_seq_add_stmt (end
, g
);
8559 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8561 if (omp_is_reference (decl
)
8562 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
8564 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8565 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8566 tree refv
= create_tmp_var (TREE_TYPE (ref
));
8567 gimplify_assign (refv
, ref
, end
);
8568 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
8569 SET_DECL_VALUE_EXPR (placeholder
, ref
);
8570 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8571 tree d
= maybe_lookup_decl (decl
, ctx
);
8573 if (DECL_HAS_VALUE_EXPR_P (d
))
8574 oldv
= DECL_VALUE_EXPR (d
);
8575 if (omp_is_reference (var
))
8577 tree v
= fold_convert (TREE_TYPE (d
),
8578 build_fold_addr_expr (new_var
));
8579 SET_DECL_VALUE_EXPR (d
, v
);
8582 SET_DECL_VALUE_EXPR (d
, new_var
);
8583 DECL_HAS_VALUE_EXPR_P (d
) = 1;
8584 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8586 SET_DECL_VALUE_EXPR (d
, oldv
);
8589 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
8590 DECL_HAS_VALUE_EXPR_P (d
) = 0;
8592 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8593 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8594 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8595 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8597 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8598 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
8601 gimple_seq tseq
= NULL
;
8602 gimplify_stmt (&x
, &tseq
);
8603 gimple_seq_add_seq (end
, tseq
);
8608 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
8609 ref
= unshare_expr (ref
);
8610 gimplify_assign (ref
, x
, end
);
8612 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8614 field
= DECL_CHAIN (bfield
);
8618 if (code
== OMP_TASKGROUP
)
8620 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
8621 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8622 gimple_seq_add_stmt (start
, g
);
8627 if (code
== OMP_FOR
)
8628 c
= gimple_omp_for_clauses (ctx
->stmt
);
8629 else if (code
== OMP_SECTIONS
)
8630 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8632 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
8633 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
8634 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
8635 build_fold_addr_expr (avar
));
8636 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
8639 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
8640 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
8642 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
8643 gimple_seq_add_stmt (end
, g
);
8644 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
8645 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8647 enum built_in_function bfn
8648 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
8649 t
= builtin_decl_explicit (bfn
);
8650 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
8654 arg
= create_tmp_var (c_bool_type
);
8655 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
8659 arg
= build_int_cst (c_bool_type
, 0);
8660 g
= gimple_build_call (t
, 1, arg
);
8664 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
8665 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8667 gimple_seq_add_stmt (end
, g
);
8668 t
= build_constructor (atype
, NULL
);
8669 TREE_THIS_VOLATILE (t
) = 1;
8670 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
8673 /* Expand code for an OpenMP taskgroup directive. */
8676 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8678 gimple
*stmt
= gsi_stmt (*gsi_p
);
8681 gimple_seq dseq
= NULL
;
8682 tree block
= make_node (BLOCK
);
8684 bind
= gimple_build_bind (NULL
, NULL
, block
);
8685 gsi_replace (gsi_p
, bind
, true);
8686 gimple_bind_add_stmt (bind
, stmt
);
8688 push_gimplify_context ();
8690 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
8692 gimple_bind_add_stmt (bind
, x
);
8694 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
8695 gimple_omp_taskgroup_clauses (stmt
),
8696 gimple_bind_body_ptr (bind
), &dseq
);
8698 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8699 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8700 gimple_omp_set_body (stmt
, NULL
);
8702 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8703 gimple_bind_add_seq (bind
, dseq
);
8705 pop_gimplify_context (bind
);
8707 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8708 BLOCK_VARS (block
) = ctx
->block_vars
;
8712 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8715 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
8718 struct omp_for_data fd
;
8719 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
8722 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
8723 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
8724 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
8728 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8729 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
8730 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
8731 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8733 /* Merge depend clauses from multiple adjacent
8734 #pragma omp ordered depend(sink:...) constructs
8735 into one #pragma omp ordered depend(sink:...), so that
8736 we can optimize them together. */
8737 gimple_stmt_iterator gsi
= *gsi_p
;
8739 while (!gsi_end_p (gsi
))
8741 gimple
*stmt
= gsi_stmt (gsi
);
8742 if (is_gimple_debug (stmt
)
8743 || gimple_code (stmt
) == GIMPLE_NOP
)
8748 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
8750 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
8751 c
= gimple_omp_ordered_clauses (ord_stmt2
);
8753 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
8754 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8757 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
8759 gsi_remove (&gsi
, true);
8763 /* Canonicalize sink dependence clauses into one folded clause if
8766 The basic algorithm is to create a sink vector whose first
8767 element is the GCD of all the first elements, and whose remaining
8768 elements are the minimum of the subsequent columns.
8770 We ignore dependence vectors whose first element is zero because
8771 such dependencies are known to be executed by the same thread.
8773 We take into account the direction of the loop, so a minimum
8774 becomes a maximum if the loop is iterating forwards. We also
8775 ignore sink clauses where the loop direction is unknown, or where
8776 the offsets are clearly invalid because they are not a multiple
8777 of the loop increment.
8781 #pragma omp for ordered(2)
8782 for (i=0; i < N; ++i)
8783 for (j=0; j < M; ++j)
8785 #pragma omp ordered \
8786 depend(sink:i-8,j-2) \
8787 depend(sink:i,j-1) \ // Completely ignored because i+0.
8788 depend(sink:i-4,j-3) \
8789 depend(sink:i-6,j-4)
8790 #pragma omp ordered depend(source)
8795 depend(sink:-gcd(8,4,6),-min(2,3,4))
8800 /* FIXME: Computing GCD's where the first element is zero is
8801 non-trivial in the presence of collapsed loops. Do this later. */
8802 if (fd
.collapse
> 1)
8805 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
8807 /* wide_int is not a POD so it must be default-constructed. */
8808 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
8809 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
8811 tree folded_dep
= NULL_TREE
;
8812 /* TRUE if the first dimension's offset is negative. */
8813 bool neg_offset_p
= false;
8815 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8817 while ((c
= *list_p
) != NULL
)
8819 bool remove
= false;
8821 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
8822 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8823 goto next_ordered_clause
;
8826 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
8827 vec
&& TREE_CODE (vec
) == TREE_LIST
;
8828 vec
= TREE_CHAIN (vec
), ++i
)
8830 gcc_assert (i
< len
);
8832 /* omp_extract_for_data has canonicalized the condition. */
8833 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
8834 || fd
.loops
[i
].cond_code
== GT_EXPR
);
8835 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
8836 bool maybe_lexically_later
= true;
8838 /* While the committee makes up its mind, bail if we have any
8839 non-constant steps. */
8840 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
8841 goto lower_omp_ordered_ret
;
8843 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
8844 if (POINTER_TYPE_P (itype
))
8846 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
8847 TYPE_PRECISION (itype
),
8850 /* Ignore invalid offsets that are not multiples of the step. */
8851 if (!wi::multiple_of_p (wi::abs (offset
),
8852 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
8855 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
8856 "ignoring sink clause with offset that is not "
8857 "a multiple of the loop step");
8859 goto next_ordered_clause
;
8862 /* Calculate the first dimension. The first dimension of
8863 the folded dependency vector is the GCD of the first
8864 elements, while ignoring any first elements whose offset
8868 /* Ignore dependence vectors whose first dimension is 0. */
8872 goto next_ordered_clause
;
8876 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
8878 error_at (OMP_CLAUSE_LOCATION (c
),
8879 "first offset must be in opposite direction "
8880 "of loop iterations");
8881 goto lower_omp_ordered_ret
;
8885 neg_offset_p
= forward
;
8886 /* Initialize the first time around. */
8887 if (folded_dep
== NULL_TREE
)
8890 folded_deps
[0] = offset
;
8893 folded_deps
[0] = wi::gcd (folded_deps
[0],
8897 /* Calculate minimum for the remaining dimensions. */
8900 folded_deps
[len
+ i
- 1] = offset
;
8901 if (folded_dep
== c
)
8902 folded_deps
[i
] = offset
;
8903 else if (maybe_lexically_later
8904 && !wi::eq_p (folded_deps
[i
], offset
))
8906 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
8910 for (j
= 1; j
<= i
; j
++)
8911 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
8914 maybe_lexically_later
= false;
8918 gcc_assert (i
== len
);
8922 next_ordered_clause
:
8924 *list_p
= OMP_CLAUSE_CHAIN (c
);
8926 list_p
= &OMP_CLAUSE_CHAIN (c
);
8932 folded_deps
[0] = -folded_deps
[0];
8934 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
8935 if (POINTER_TYPE_P (itype
))
8938 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
8939 = wide_int_to_tree (itype
, folded_deps
[0]);
8940 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
8941 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
8944 lower_omp_ordered_ret
:
8946 /* Ordered without clauses is #pragma omp threads, while we want
8947 a nop instead if we remove all clauses. */
8948 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
8949 gsi_replace (gsi_p
, gimple_build_nop (), true);
8953 /* Expand code for an OpenMP ordered directive. */
8956 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8959 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
8960 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
8963 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8965 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8968 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
8969 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8970 OMP_CLAUSE_THREADS
);
8972 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8975 /* FIXME: This is needs to be moved to the expansion to verify various
8976 conditions only testable on cfg with dominators computed, and also
8977 all the depend clauses to be merged still might need to be available
8978 for the runtime checks. */
8980 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
8984 push_gimplify_context ();
8986 block
= make_node (BLOCK
);
8987 bind
= gimple_build_bind (NULL
, NULL
, block
);
8988 gsi_replace (gsi_p
, bind
, true);
8989 gimple_bind_add_stmt (bind
, stmt
);
8993 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
8994 build_int_cst (NULL_TREE
, threads
));
8995 cfun
->has_simduid_loops
= true;
8998 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
9000 gimple_bind_add_stmt (bind
, x
);
9002 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
9005 counter
= create_tmp_var (integer_type_node
);
9006 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
9007 gimple_call_set_lhs (g
, counter
);
9008 gimple_bind_add_stmt (bind
, g
);
9010 body
= create_artificial_label (UNKNOWN_LOCATION
);
9011 test
= create_artificial_label (UNKNOWN_LOCATION
);
9012 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
9014 tree simt_pred
= create_tmp_var (integer_type_node
);
9015 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
9016 gimple_call_set_lhs (g
, simt_pred
);
9017 gimple_bind_add_stmt (bind
, g
);
9019 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
9020 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
9021 gimple_bind_add_stmt (bind
, g
);
9023 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
9025 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9026 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9027 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9028 gimple_omp_set_body (stmt
, NULL
);
9032 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
9033 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
9034 gimple_bind_add_stmt (bind
, g
);
9036 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
9037 tree nonneg
= create_tmp_var (integer_type_node
);
9038 gimple_seq tseq
= NULL
;
9039 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
9040 gimple_bind_add_seq (bind
, tseq
);
9042 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
9043 gimple_call_set_lhs (g
, nonneg
);
9044 gimple_bind_add_stmt (bind
, g
);
9046 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
9047 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
9048 gimple_bind_add_stmt (bind
, g
);
9050 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
9053 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
9054 build_int_cst (NULL_TREE
, threads
));
9056 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
9058 gimple_bind_add_stmt (bind
, x
);
9060 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9062 pop_gimplify_context (bind
);
9064 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9065 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9069 /* Expand code for an OpenMP scan directive and the structured block
9070 before the scan directive. */
9073 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9075 gimple
*stmt
= gsi_stmt (*gsi_p
);
9077 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
9078 tree lane
= NULL_TREE
;
9079 gimple_seq before
= NULL
;
9080 omp_context
*octx
= ctx
->outer
;
9082 if (octx
->scan_exclusive
&& !has_clauses
)
9084 gimple_stmt_iterator gsi2
= *gsi_p
;
9086 gimple
*stmt2
= gsi_stmt (gsi2
);
9087 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9088 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9089 the one with exclusive clause(s), comes first. */
9091 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
9092 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
9094 gsi_remove (gsi_p
, false);
9095 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
9096 ctx
= maybe_lookup_ctx (stmt2
);
9098 lower_omp_scan (gsi_p
, ctx
);
9103 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
9104 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9105 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
9106 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9107 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
9108 && !gimple_omp_for_combined_p (octx
->stmt
));
9109 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
9110 if (is_for_simd
&& octx
->for_simd_scan_phase
)
9113 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
9114 OMP_CLAUSE__SIMDUID_
))
9116 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
9117 lane
= create_tmp_var (unsigned_type_node
);
9118 tree t
= build_int_cst (integer_type_node
,
9120 : octx
->scan_inclusive
? 2 : 3);
9122 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
9123 gimple_call_set_lhs (g
, lane
);
9124 gimple_seq_add_stmt (&before
, g
);
9127 if (is_simd
|| is_for
)
9129 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
9130 c
; c
= OMP_CLAUSE_CHAIN (c
))
9131 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9132 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9134 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9135 tree var
= OMP_CLAUSE_DECL (c
);
9136 tree new_var
= lookup_decl (var
, octx
);
9138 tree var2
= NULL_TREE
;
9139 tree var3
= NULL_TREE
;
9140 tree var4
= NULL_TREE
;
9141 tree lane0
= NULL_TREE
;
9142 tree new_vard
= new_var
;
9143 if (omp_is_reference (var
))
9145 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9148 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
9150 val
= DECL_VALUE_EXPR (new_vard
);
9151 if (new_vard
!= new_var
)
9153 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
9154 val
= TREE_OPERAND (val
, 0);
9156 if (TREE_CODE (val
) == ARRAY_REF
9157 && VAR_P (TREE_OPERAND (val
, 0)))
9159 tree v
= TREE_OPERAND (val
, 0);
9160 if (lookup_attribute ("omp simd array",
9161 DECL_ATTRIBUTES (v
)))
9163 val
= unshare_expr (val
);
9164 lane0
= TREE_OPERAND (val
, 1);
9165 TREE_OPERAND (val
, 1) = lane
;
9166 var2
= lookup_decl (v
, octx
);
9167 if (octx
->scan_exclusive
)
9168 var4
= lookup_decl (var2
, octx
);
9170 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9171 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
9174 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9175 var2
, lane
, NULL_TREE
, NULL_TREE
);
9176 TREE_THIS_NOTRAP (var2
) = 1;
9177 if (octx
->scan_exclusive
)
9179 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9180 var4
, lane
, NULL_TREE
,
9182 TREE_THIS_NOTRAP (var4
) = 1;
9193 var2
= build_outer_var_ref (var
, octx
);
9194 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9196 var3
= maybe_lookup_decl (new_vard
, octx
);
9197 if (var3
== new_vard
|| var3
== NULL_TREE
)
9199 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
9201 var4
= maybe_lookup_decl (var3
, octx
);
9202 if (var4
== var3
|| var4
== NULL_TREE
)
9204 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
9215 && octx
->scan_exclusive
9217 && var4
== NULL_TREE
)
9218 var4
= create_tmp_var (TREE_TYPE (val
));
9220 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9222 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9227 /* If we've added a separate identity element
9228 variable, copy it over into val. */
9229 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9231 gimplify_and_add (x
, &before
);
9233 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9235 /* Otherwise, assign to it the identity element. */
9236 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9238 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9239 tree ref
= build_outer_var_ref (var
, octx
);
9240 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9241 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9244 if (new_vard
!= new_var
)
9245 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9246 SET_DECL_VALUE_EXPR (new_vard
, val
);
9248 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9249 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9250 lower_omp (&tseq
, octx
);
9252 SET_DECL_VALUE_EXPR (new_vard
, x
);
9253 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9254 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9255 gimple_seq_add_seq (&before
, tseq
);
9257 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9263 if (octx
->scan_exclusive
)
9265 tree v4
= unshare_expr (var4
);
9266 tree v2
= unshare_expr (var2
);
9267 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
9268 gimplify_and_add (x
, &before
);
9270 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9271 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9272 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9274 if (x
&& new_vard
!= new_var
)
9275 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
9277 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9278 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9279 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9280 lower_omp (&tseq
, octx
);
9281 gimple_seq_add_seq (&before
, tseq
);
9282 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9284 SET_DECL_VALUE_EXPR (new_vard
, x
);
9285 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9286 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9287 if (octx
->scan_inclusive
)
9289 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9291 gimplify_and_add (x
, &before
);
9293 else if (lane0
== NULL_TREE
)
9295 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9297 gimplify_and_add (x
, &before
);
9305 /* input phase. Set val to initializer before
9307 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
9308 gimplify_assign (val
, x
, &before
);
9313 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
9314 if (code
== MINUS_EXPR
)
9317 tree x
= build2 (code
, TREE_TYPE (var2
),
9318 unshare_expr (var2
), unshare_expr (val
));
9319 if (octx
->scan_inclusive
)
9321 gimplify_assign (unshare_expr (var2
), x
, &before
);
9322 gimplify_assign (val
, var2
, &before
);
9326 gimplify_assign (unshare_expr (var4
),
9327 unshare_expr (var2
), &before
);
9328 gimplify_assign (var2
, x
, &before
);
9329 if (lane0
== NULL_TREE
)
9330 gimplify_assign (val
, var4
, &before
);
9334 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
9336 tree vexpr
= unshare_expr (var4
);
9337 TREE_OPERAND (vexpr
, 1) = lane0
;
9338 if (new_vard
!= new_var
)
9339 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
9340 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9344 if (is_simd
&& !is_for_simd
)
9346 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
9347 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
9348 gsi_replace (gsi_p
, gimple_build_nop (), true);
9351 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
9354 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (stmt
));
9355 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
9360 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9361 substitution of a couple of function calls. But in the NAMED case,
9362 requires that languages coordinate a symbol name. It is therefore
9363 best put here in common code. */
9365 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
9368 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9371 tree name
, lock
, unlock
;
9372 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
9374 location_t loc
= gimple_location (stmt
);
9377 name
= gimple_omp_critical_name (stmt
);
9382 if (!critical_name_mutexes
)
9383 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
9385 tree
*n
= critical_name_mutexes
->get (name
);
9390 decl
= create_tmp_var_raw (ptr_type_node
);
9392 new_str
= ACONCAT ((".gomp_critical_user_",
9393 IDENTIFIER_POINTER (name
), NULL
));
9394 DECL_NAME (decl
) = get_identifier (new_str
);
9395 TREE_PUBLIC (decl
) = 1;
9396 TREE_STATIC (decl
) = 1;
9397 DECL_COMMON (decl
) = 1;
9398 DECL_ARTIFICIAL (decl
) = 1;
9399 DECL_IGNORED_P (decl
) = 1;
9401 varpool_node::finalize_decl (decl
);
9403 critical_name_mutexes
->put (name
, decl
);
9408 /* If '#pragma omp critical' is inside offloaded region or
9409 inside function marked as offloadable, the symbol must be
9410 marked as offloadable too. */
9412 if (cgraph_node::get (current_function_decl
)->offloadable
)
9413 varpool_node::get_create (decl
)->offloadable
= 1;
9415 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
9416 if (is_gimple_omp_offloaded (octx
->stmt
))
9418 varpool_node::get_create (decl
)->offloadable
= 1;
9422 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
9423 lock
= build_call_expr_loc (loc
, lock
, 1,
9424 build_fold_addr_expr_loc (loc
, decl
));
9426 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
9427 unlock
= build_call_expr_loc (loc
, unlock
, 1,
9428 build_fold_addr_expr_loc (loc
, decl
));
9432 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
9433 lock
= build_call_expr_loc (loc
, lock
, 0);
9435 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
9436 unlock
= build_call_expr_loc (loc
, unlock
, 0);
9439 push_gimplify_context ();
9441 block
= make_node (BLOCK
);
9442 bind
= gimple_build_bind (NULL
, NULL
, block
);
9443 gsi_replace (gsi_p
, bind
, true);
9444 gimple_bind_add_stmt (bind
, stmt
);
9446 tbody
= gimple_bind_body (bind
);
9447 gimplify_and_add (lock
, &tbody
);
9448 gimple_bind_set_body (bind
, tbody
);
9450 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9451 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9452 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9453 gimple_omp_set_body (stmt
, NULL
);
9455 tbody
= gimple_bind_body (bind
);
9456 gimplify_and_add (unlock
, &tbody
);
9457 gimple_bind_set_body (bind
, tbody
);
9459 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9461 pop_gimplify_context (bind
);
9462 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9463 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9466 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9467 for a lastprivate clause. Given a loop control predicate of (V
9468 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9469 is appended to *DLIST, iterator initialization is appended to
9470 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9471 to be emitted in a critical section. */
9474 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
9475 gimple_seq
*dlist
, gimple_seq
*clist
,
9476 struct omp_context
*ctx
)
9478 tree clauses
, cond
, vinit
;
9479 enum tree_code cond_code
;
9482 cond_code
= fd
->loop
.cond_code
;
9483 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
9485 /* When possible, use a strict equality expression. This can let VRP
9486 type optimizations deduce the value and remove a copy. */
9487 if (tree_fits_shwi_p (fd
->loop
.step
))
9489 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
9490 if (step
== 1 || step
== -1)
9491 cond_code
= EQ_EXPR
;
9494 tree n2
= fd
->loop
.n2
;
9495 if (fd
->collapse
> 1
9496 && TREE_CODE (n2
) != INTEGER_CST
9497 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
9499 struct omp_context
*taskreg_ctx
= NULL
;
9500 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
9502 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
9503 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
9504 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
9506 if (gimple_omp_for_combined_into_p (gfor
))
9508 gcc_assert (ctx
->outer
->outer
9509 && is_parallel_ctx (ctx
->outer
->outer
));
9510 taskreg_ctx
= ctx
->outer
->outer
;
9514 struct omp_for_data outer_fd
;
9515 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
9516 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
9519 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
9520 taskreg_ctx
= ctx
->outer
->outer
;
9522 else if (is_taskreg_ctx (ctx
->outer
))
9523 taskreg_ctx
= ctx
->outer
;
9527 tree taskreg_clauses
9528 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
9529 tree innerc
= omp_find_clause (taskreg_clauses
,
9530 OMP_CLAUSE__LOOPTEMP_
);
9531 gcc_assert (innerc
);
9532 for (i
= 0; i
< fd
->collapse
; i
++)
9534 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9535 OMP_CLAUSE__LOOPTEMP_
);
9536 gcc_assert (innerc
);
9538 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9539 OMP_CLAUSE__LOOPTEMP_
);
9541 n2
= fold_convert (TREE_TYPE (n2
),
9542 lookup_decl (OMP_CLAUSE_DECL (innerc
),
9546 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
9548 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
9550 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
9551 if (!gimple_seq_empty_p (stmts
))
9553 gimple_seq_add_seq (&stmts
, *dlist
);
9556 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9557 vinit
= fd
->loop
.n1
;
9558 if (cond_code
== EQ_EXPR
9559 && tree_fits_shwi_p (fd
->loop
.n2
)
9560 && ! integer_zerop (fd
->loop
.n2
))
9561 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
9563 vinit
= unshare_expr (vinit
);
9565 /* Initialize the iterator variable, so that threads that don't execute
9566 any iterations don't execute the lastprivate clauses by accident. */
9567 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
9571 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9574 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9575 struct walk_stmt_info
*wi
)
9577 gimple
*stmt
= gsi_stmt (*gsi_p
);
9579 *handled_ops_p
= true;
9580 switch (gimple_code (stmt
))
9584 case GIMPLE_OMP_FOR
:
9585 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
9586 && gimple_omp_for_combined_into_p (stmt
))
9587 *handled_ops_p
= false;
9590 case GIMPLE_OMP_SCAN
:
9591 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
9592 return integer_zero_node
;
9599 /* Helper function for lower_omp_for, add transformations for a worksharing
9600 loop with scan directives inside of it.
9601 For worksharing loop not combined with simd, transform:
9602 #pragma omp for reduction(inscan,+:r) private(i)
9603 for (i = 0; i < n; i = i + 1)
9608 #pragma omp scan inclusive(r)
9614 into two worksharing loops + code to merge results:
9616 num_threads = omp_get_num_threads ();
9617 thread_num = omp_get_thread_num ();
9618 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9623 // For UDRs this is UDR init, or if ctors are needed, copy from
9624 // var3 that has been constructed to contain the neutral element.
9628 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9629 // a shared array with num_threads elements and rprivb to a local array
9630 // number of elements equal to the number of (contiguous) iterations the
9631 // current thread will perform. controlb and controlp variables are
9632 // temporaries to handle deallocation of rprivb at the end of second
9634 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9635 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9636 for (i = 0; i < n; i = i + 1)
9639 // For UDRs this is UDR init or copy from var3.
9641 // This is the input phase from user code.
9645 // For UDRs this is UDR merge.
9647 // Rather than handing it over to the user, save to local thread's
9649 rprivb[ivar] = var2;
9650 // For exclusive scan, the above two statements are swapped.
9654 // And remember the final value from this thread's into the shared
9656 rpriva[(sizetype) thread_num] = var2;
9657 // If more than one thread, compute using Work-Efficient prefix sum
9658 // the inclusive parallel scan of the rpriva array.
9659 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9664 num_threadsu = (unsigned int) num_threads;
9665 thread_numup1 = (unsigned int) thread_num + 1;
9668 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9672 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9677 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9678 mul = REALPART_EXPR <cplx>;
9679 ovf = IMAGPART_EXPR <cplx>;
9680 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9683 andvm1 = andv + 4294967295;
9685 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9687 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9688 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9689 rpriva[l] = rpriva[l - k] + rpriva[l];
9691 if (down == 0) goto <D.2121>; else goto <D.2122>;
9699 if (k != 0) goto <D.2108>; else goto <D.2103>;
9701 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9703 // For UDRs this is UDR init or copy from var3.
9707 var2 = rpriva[thread_num - 1];
9710 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9711 reduction(inscan,+:r) private(i)
9712 for (i = 0; i < n; i = i + 1)
9715 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9716 r = var2 + rprivb[ivar];
9719 // This is the scan phase from user code.
9721 // Plus a bump of the iterator.
9727 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
9728 struct omp_for_data
*fd
, omp_context
*ctx
)
9730 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
9731 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
9733 gimple_seq body
= gimple_omp_body (stmt
);
9734 gimple_stmt_iterator input1_gsi
= gsi_none ();
9735 struct walk_stmt_info wi
;
9736 memset (&wi
, 0, sizeof (wi
));
9738 wi
.info
= (void *) &input1_gsi
;
9739 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
9740 gcc_assert (!gsi_end_p (input1_gsi
));
9742 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
9743 gimple_stmt_iterator gsi
= input1_gsi
;
9745 gimple_stmt_iterator scan1_gsi
= gsi
;
9746 gimple
*scan_stmt1
= gsi_stmt (gsi
);
9747 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
9749 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
9750 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
9751 gimple_omp_set_body (input_stmt1
, NULL
);
9752 gimple_omp_set_body (scan_stmt1
, NULL
);
9753 gimple_omp_set_body (stmt
, NULL
);
9755 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
9756 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
9757 gimple_omp_set_body (stmt
, body
);
9758 gimple_omp_set_body (input_stmt1
, input_body
);
9760 gimple_stmt_iterator input2_gsi
= gsi_none ();
9761 memset (&wi
, 0, sizeof (wi
));
9763 wi
.info
= (void *) &input2_gsi
;
9764 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
9765 gcc_assert (!gsi_end_p (input2_gsi
));
9767 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
9770 gimple_stmt_iterator scan2_gsi
= gsi
;
9771 gimple
*scan_stmt2
= gsi_stmt (gsi
);
9772 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
9773 gimple_omp_set_body (scan_stmt2
, scan_body
);
9775 gimple_stmt_iterator input3_gsi
= gsi_none ();
9776 gimple_stmt_iterator scan3_gsi
= gsi_none ();
9777 gimple_stmt_iterator input4_gsi
= gsi_none ();
9778 gimple_stmt_iterator scan4_gsi
= gsi_none ();
9779 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
9780 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
9781 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
9784 memset (&wi
, 0, sizeof (wi
));
9786 wi
.info
= (void *) &input3_gsi
;
9787 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
9788 gcc_assert (!gsi_end_p (input3_gsi
));
9790 input_stmt3
= gsi_stmt (input3_gsi
);
9794 scan_stmt3
= gsi_stmt (gsi
);
9795 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
9797 memset (&wi
, 0, sizeof (wi
));
9799 wi
.info
= (void *) &input4_gsi
;
9800 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
9801 gcc_assert (!gsi_end_p (input4_gsi
));
9803 input_stmt4
= gsi_stmt (input4_gsi
);
9807 scan_stmt4
= gsi_stmt (gsi
);
9808 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
9810 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
9811 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
9814 tree num_threads
= create_tmp_var (integer_type_node
);
9815 tree thread_num
= create_tmp_var (integer_type_node
);
9816 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9817 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9818 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
9819 gimple_call_set_lhs (g
, num_threads
);
9820 gimple_seq_add_stmt (body_p
, g
);
9821 g
= gimple_build_call (threadnum_decl
, 0);
9822 gimple_call_set_lhs (g
, thread_num
);
9823 gimple_seq_add_stmt (body_p
, g
);
9825 tree ivar
= create_tmp_var (sizetype
);
9826 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
9827 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
9828 tree k
= create_tmp_var (unsigned_type_node
);
9829 tree l
= create_tmp_var (unsigned_type_node
);
9831 gimple_seq clist
= NULL
, mdlist
= NULL
;
9832 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
9833 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
9834 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
9835 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
9836 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9837 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9838 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9840 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9841 tree var
= OMP_CLAUSE_DECL (c
);
9842 tree new_var
= lookup_decl (var
, ctx
);
9843 tree var3
= NULL_TREE
;
9844 tree new_vard
= new_var
;
9845 if (omp_is_reference (var
))
9846 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9847 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9849 var3
= maybe_lookup_decl (new_vard
, ctx
);
9850 if (var3
== new_vard
)
9854 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
9855 tree rpriva
= create_tmp_var (ptype
);
9856 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9857 OMP_CLAUSE_DECL (nc
) = rpriva
;
9859 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9861 tree rprivb
= create_tmp_var (ptype
);
9862 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9863 OMP_CLAUSE_DECL (nc
) = rprivb
;
9864 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
9866 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9868 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
9869 if (new_vard
!= new_var
)
9870 TREE_ADDRESSABLE (var2
) = 1;
9871 gimple_add_tmp_var (var2
);
9873 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
9874 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9875 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9876 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9877 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9879 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
9880 thread_num
, integer_minus_one_node
);
9881 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9882 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9883 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9884 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9885 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9887 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
9888 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9889 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9890 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9891 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9893 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
9894 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9895 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9896 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9897 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9898 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9900 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
9901 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9902 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
9903 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9905 tree var4
= is_for_simd
? new_var
: var2
;
9906 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
9909 var5
= lookup_decl (var
, input_simd_ctx
);
9910 var6
= lookup_decl (var
, scan_simd_ctx
);
9911 if (new_vard
!= new_var
)
9913 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
9914 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
9917 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9919 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9922 x
= lang_hooks
.decls
.omp_clause_default_ctor
9923 (c
, var2
, build_outer_var_ref (var
, ctx
));
9925 gimplify_and_add (x
, &clist
);
9927 x
= build_outer_var_ref (var
, ctx
);
9928 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
9930 gimplify_and_add (x
, &thr01_list
);
9932 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9933 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9936 x
= unshare_expr (var4
);
9937 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
9938 gimplify_and_add (x
, &thrn1_list
);
9939 x
= unshare_expr (var4
);
9940 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
9941 gimplify_and_add (x
, &thr02_list
);
9943 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9945 /* Otherwise, assign to it the identity element. */
9946 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9947 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9950 if (new_vard
!= new_var
)
9951 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9952 SET_DECL_VALUE_EXPR (new_vard
, val
);
9953 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9955 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
9956 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9957 lower_omp (&tseq
, ctx
);
9958 gimple_seq_add_seq (&thrn1_list
, tseq
);
9959 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9960 lower_omp (&tseq
, ctx
);
9961 gimple_seq_add_seq (&thr02_list
, tseq
);
9962 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9963 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9964 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9966 SET_DECL_VALUE_EXPR (new_vard
, y
);
9969 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9970 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9974 x
= unshare_expr (var4
);
9975 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
9976 gimplify_and_add (x
, &thrn2_list
);
9980 x
= unshare_expr (rprivb_ref
);
9981 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
9982 gimplify_and_add (x
, &scan1_list
);
9986 if (ctx
->scan_exclusive
)
9988 x
= unshare_expr (rprivb_ref
);
9989 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
9990 gimplify_and_add (x
, &scan1_list
);
9993 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9994 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9995 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9996 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9997 lower_omp (&tseq
, ctx
);
9998 gimple_seq_add_seq (&scan1_list
, tseq
);
10000 if (ctx
->scan_inclusive
)
10002 x
= unshare_expr (rprivb_ref
);
10003 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
10004 gimplify_and_add (x
, &scan1_list
);
10008 x
= unshare_expr (rpriva_ref
);
10009 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
10010 unshare_expr (var4
));
10011 gimplify_and_add (x
, &mdlist
);
10013 x
= unshare_expr (is_for_simd
? var6
: new_var
);
10014 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
10015 gimplify_and_add (x
, &input2_list
);
10018 if (new_vard
!= new_var
)
10019 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10021 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10022 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10023 SET_DECL_VALUE_EXPR (new_vard
, val
);
10024 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10027 SET_DECL_VALUE_EXPR (placeholder
, var6
);
10028 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10031 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10032 lower_omp (&tseq
, ctx
);
10034 SET_DECL_VALUE_EXPR (new_vard
, y
);
10037 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10038 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10042 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
10043 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10044 lower_omp (&tseq
, ctx
);
10046 gimple_seq_add_seq (&input2_list
, tseq
);
10048 x
= build_outer_var_ref (var
, ctx
);
10049 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
10050 gimplify_and_add (x
, &last_list
);
10052 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
10053 gimplify_and_add (x
, &reduc_list
);
10054 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10055 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10057 if (new_vard
!= new_var
)
10058 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10059 SET_DECL_VALUE_EXPR (new_vard
, val
);
10060 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10061 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10062 lower_omp (&tseq
, ctx
);
10063 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10064 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10065 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10067 SET_DECL_VALUE_EXPR (new_vard
, y
);
10070 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10071 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10073 gimple_seq_add_seq (&reduc_list
, tseq
);
10074 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
10075 gimplify_and_add (x
, &reduc_list
);
10077 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
10079 gimplify_and_add (x
, dlist
);
10083 x
= build_outer_var_ref (var
, ctx
);
10084 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
10086 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10087 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
10089 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
10091 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
10093 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10094 if (code
== MINUS_EXPR
)
10098 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
10101 if (ctx
->scan_exclusive
)
10102 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10104 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
10105 gimplify_assign (var2
, x
, &scan1_list
);
10106 if (ctx
->scan_inclusive
)
10107 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10111 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
10114 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
10115 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
10117 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
10120 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
10121 unshare_expr (rprival_ref
));
10122 gimplify_assign (rprival_ref
, x
, &reduc_list
);
10126 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10127 gimple_seq_add_stmt (&scan1_list
, g
);
10128 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10129 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10130 ? scan_stmt4
: scan_stmt2
), g
);
10132 tree controlb
= create_tmp_var (boolean_type_node
);
10133 tree controlp
= create_tmp_var (ptr_type_node
);
10134 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10135 OMP_CLAUSE_DECL (nc
) = controlb
;
10136 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10138 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10139 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10140 OMP_CLAUSE_DECL (nc
) = controlp
;
10141 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10143 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10144 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10145 OMP_CLAUSE_DECL (nc
) = controlb
;
10146 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10148 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10149 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10150 OMP_CLAUSE_DECL (nc
) = controlp
;
10151 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10153 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10155 *cp1
= gimple_omp_for_clauses (stmt
);
10156 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
10157 *cp2
= gimple_omp_for_clauses (new_stmt
);
10158 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
10162 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
10163 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
10165 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
10167 gsi_remove (&input3_gsi
, true);
10168 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
10170 gsi_remove (&scan3_gsi
, true);
10171 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
10173 gsi_remove (&input4_gsi
, true);
10174 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
10176 gsi_remove (&scan4_gsi
, true);
10180 gimple_omp_set_body (scan_stmt1
, scan1_list
);
10181 gimple_omp_set_body (input_stmt2
, input2_list
);
10184 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
10186 gsi_remove (&input1_gsi
, true);
10187 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
10189 gsi_remove (&scan1_gsi
, true);
10190 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
10192 gsi_remove (&input2_gsi
, true);
10193 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
10195 gsi_remove (&scan2_gsi
, true);
10197 gimple_seq_add_seq (body_p
, clist
);
10199 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10200 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10201 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10202 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10203 gimple_seq_add_stmt (body_p
, g
);
10204 g
= gimple_build_label (lab1
);
10205 gimple_seq_add_stmt (body_p
, g
);
10206 gimple_seq_add_seq (body_p
, thr01_list
);
10207 g
= gimple_build_goto (lab3
);
10208 gimple_seq_add_stmt (body_p
, g
);
10209 g
= gimple_build_label (lab2
);
10210 gimple_seq_add_stmt (body_p
, g
);
10211 gimple_seq_add_seq (body_p
, thrn1_list
);
10212 g
= gimple_build_label (lab3
);
10213 gimple_seq_add_stmt (body_p
, g
);
10215 g
= gimple_build_assign (ivar
, size_zero_node
);
10216 gimple_seq_add_stmt (body_p
, g
);
10218 gimple_seq_add_stmt (body_p
, stmt
);
10219 gimple_seq_add_seq (body_p
, body
);
10220 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
10223 g
= gimple_build_omp_return (true);
10224 gimple_seq_add_stmt (body_p
, g
);
10225 gimple_seq_add_seq (body_p
, mdlist
);
10227 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10228 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10229 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
10230 gimple_seq_add_stmt (body_p
, g
);
10231 g
= gimple_build_label (lab1
);
10232 gimple_seq_add_stmt (body_p
, g
);
10234 g
= omp_build_barrier (NULL
);
10235 gimple_seq_add_stmt (body_p
, g
);
10237 tree down
= create_tmp_var (unsigned_type_node
);
10238 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
10239 gimple_seq_add_stmt (body_p
, g
);
10241 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
10242 gimple_seq_add_stmt (body_p
, g
);
10244 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
10245 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
10246 gimple_seq_add_stmt (body_p
, g
);
10248 tree thread_numu
= create_tmp_var (unsigned_type_node
);
10249 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
10250 gimple_seq_add_stmt (body_p
, g
);
10252 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
10253 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
10254 build_int_cst (unsigned_type_node
, 1));
10255 gimple_seq_add_stmt (body_p
, g
);
10257 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10258 g
= gimple_build_label (lab3
);
10259 gimple_seq_add_stmt (body_p
, g
);
10261 tree twok
= create_tmp_var (unsigned_type_node
);
10262 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10263 gimple_seq_add_stmt (body_p
, g
);
10265 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
10266 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
10267 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
10268 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
10269 gimple_seq_add_stmt (body_p
, g
);
10270 g
= gimple_build_label (lab4
);
10271 gimple_seq_add_stmt (body_p
, g
);
10272 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
10273 gimple_seq_add_stmt (body_p
, g
);
10274 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10275 gimple_seq_add_stmt (body_p
, g
);
10277 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
10278 gimple_seq_add_stmt (body_p
, g
);
10279 g
= gimple_build_label (lab6
);
10280 gimple_seq_add_stmt (body_p
, g
);
10282 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10283 gimple_seq_add_stmt (body_p
, g
);
10285 g
= gimple_build_label (lab5
);
10286 gimple_seq_add_stmt (body_p
, g
);
10288 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10289 gimple_seq_add_stmt (body_p
, g
);
10291 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
10292 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
10293 gimple_call_set_lhs (g
, cplx
);
10294 gimple_seq_add_stmt (body_p
, g
);
10295 tree mul
= create_tmp_var (unsigned_type_node
);
10296 g
= gimple_build_assign (mul
, REALPART_EXPR
,
10297 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
10298 gimple_seq_add_stmt (body_p
, g
);
10299 tree ovf
= create_tmp_var (unsigned_type_node
);
10300 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
10301 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
10302 gimple_seq_add_stmt (body_p
, g
);
10304 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
10305 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
10306 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
10308 gimple_seq_add_stmt (body_p
, g
);
10309 g
= gimple_build_label (lab7
);
10310 gimple_seq_add_stmt (body_p
, g
);
10312 tree andv
= create_tmp_var (unsigned_type_node
);
10313 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
10314 gimple_seq_add_stmt (body_p
, g
);
10315 tree andvm1
= create_tmp_var (unsigned_type_node
);
10316 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
10317 build_minus_one_cst (unsigned_type_node
));
10318 gimple_seq_add_stmt (body_p
, g
);
10320 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
10321 gimple_seq_add_stmt (body_p
, g
);
10323 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
10324 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
10325 gimple_seq_add_stmt (body_p
, g
);
10326 g
= gimple_build_label (lab9
);
10327 gimple_seq_add_stmt (body_p
, g
);
10328 gimple_seq_add_seq (body_p
, reduc_list
);
10329 g
= gimple_build_label (lab8
);
10330 gimple_seq_add_stmt (body_p
, g
);
10332 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
10333 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
10334 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
10335 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
10337 gimple_seq_add_stmt (body_p
, g
);
10338 g
= gimple_build_label (lab10
);
10339 gimple_seq_add_stmt (body_p
, g
);
10340 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
10341 gimple_seq_add_stmt (body_p
, g
);
10342 g
= gimple_build_goto (lab12
);
10343 gimple_seq_add_stmt (body_p
, g
);
10344 g
= gimple_build_label (lab11
);
10345 gimple_seq_add_stmt (body_p
, g
);
10346 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10347 gimple_seq_add_stmt (body_p
, g
);
10348 g
= gimple_build_label (lab12
);
10349 gimple_seq_add_stmt (body_p
, g
);
10351 g
= omp_build_barrier (NULL
);
10352 gimple_seq_add_stmt (body_p
, g
);
10354 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
10356 gimple_seq_add_stmt (body_p
, g
);
10358 g
= gimple_build_label (lab2
);
10359 gimple_seq_add_stmt (body_p
, g
);
10361 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10362 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10363 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10364 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10365 gimple_seq_add_stmt (body_p
, g
);
10366 g
= gimple_build_label (lab1
);
10367 gimple_seq_add_stmt (body_p
, g
);
10368 gimple_seq_add_seq (body_p
, thr02_list
);
10369 g
= gimple_build_goto (lab3
);
10370 gimple_seq_add_stmt (body_p
, g
);
10371 g
= gimple_build_label (lab2
);
10372 gimple_seq_add_stmt (body_p
, g
);
10373 gimple_seq_add_seq (body_p
, thrn2_list
);
10374 g
= gimple_build_label (lab3
);
10375 gimple_seq_add_stmt (body_p
, g
);
10377 g
= gimple_build_assign (ivar
, size_zero_node
);
10378 gimple_seq_add_stmt (body_p
, g
);
10379 gimple_seq_add_stmt (body_p
, new_stmt
);
10380 gimple_seq_add_seq (body_p
, new_body
);
10382 gimple_seq new_dlist
= NULL
;
10383 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10384 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10385 tree num_threadsm1
= create_tmp_var (integer_type_node
);
10386 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
10387 integer_minus_one_node
);
10388 gimple_seq_add_stmt (&new_dlist
, g
);
10389 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
10390 gimple_seq_add_stmt (&new_dlist
, g
);
10391 g
= gimple_build_label (lab1
);
10392 gimple_seq_add_stmt (&new_dlist
, g
);
10393 gimple_seq_add_seq (&new_dlist
, last_list
);
10394 g
= gimple_build_label (lab2
);
10395 gimple_seq_add_stmt (&new_dlist
, g
);
10396 gimple_seq_add_seq (&new_dlist
, *dlist
);
10397 *dlist
= new_dlist
;
10400 /* Lower code for an OMP loop directive. */
10403 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10405 tree
*rhs_p
, block
;
10406 struct omp_for_data fd
, *fdp
= NULL
;
10407 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
10409 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
10410 gimple_seq cnt_list
= NULL
, clist
= NULL
;
10411 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
10414 push_gimplify_context ();
10416 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
10418 block
= make_node (BLOCK
);
10419 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
10420 /* Replace at gsi right away, so that 'stmt' is no member
10421 of a sequence anymore as we're going to add to a different
10423 gsi_replace (gsi_p
, new_stmt
, true);
10425 /* Move declaration of temporaries in the loop body before we make
10427 omp_for_body
= gimple_omp_body (stmt
);
10428 if (!gimple_seq_empty_p (omp_for_body
)
10429 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
10432 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
10433 tree vars
= gimple_bind_vars (inner_bind
);
10434 gimple_bind_append_vars (new_stmt
, vars
);
10435 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10436 keep them on the inner_bind and it's block. */
10437 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
10438 if (gimple_bind_block (inner_bind
))
10439 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
10442 if (gimple_omp_for_combined_into_p (stmt
))
10444 omp_extract_for_data (stmt
, &fd
, NULL
);
10447 /* We need two temporaries with fd.loop.v type (istart/iend)
10448 and then (fd.collapse - 1) temporaries with the same
10449 type for count2 ... countN-1 vars if not constant. */
10451 tree type
= fd
.iter_type
;
10452 if (fd
.collapse
> 1
10453 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
10454 count
+= fd
.collapse
- 1;
10456 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
10457 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
10458 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
10460 tree clauses
= *pc
;
10463 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
10464 OMP_CLAUSE__LOOPTEMP_
);
10465 if (ctx
->simt_stmt
)
10466 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
10467 OMP_CLAUSE__LOOPTEMP_
);
10468 for (i
= 0; i
< count
; i
++)
10473 gcc_assert (outerc
);
10474 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
10475 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
10476 OMP_CLAUSE__LOOPTEMP_
);
10480 /* If there are 2 adjacent SIMD stmts, one with _simt_
10481 clause, another without, make sure they have the same
10482 decls in _looptemp_ clauses, because the outer stmt
10483 they are combined into will look up just one inner_stmt. */
10484 if (ctx
->simt_stmt
)
10485 temp
= OMP_CLAUSE_DECL (simtc
);
10487 temp
= create_tmp_var (type
);
10488 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
10490 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
10491 OMP_CLAUSE_DECL (*pc
) = temp
;
10492 pc
= &OMP_CLAUSE_CHAIN (*pc
);
10493 if (ctx
->simt_stmt
)
10494 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
10495 OMP_CLAUSE__LOOPTEMP_
);
10500 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10504 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
10505 OMP_CLAUSE_REDUCTION
);
10506 tree rtmp
= NULL_TREE
;
10509 tree type
= build_pointer_type (pointer_sized_int_node
);
10510 tree temp
= create_tmp_var (type
);
10511 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
10512 OMP_CLAUSE_DECL (c
) = temp
;
10513 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
10514 gimple_omp_for_set_clauses (stmt
, c
);
10515 lower_omp_task_reductions (ctx
, OMP_FOR
,
10516 gimple_omp_for_clauses (stmt
),
10517 &tred_ilist
, &tred_dlist
);
10519 rtmp
= make_ssa_name (type
);
10520 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
10523 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
10526 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
10528 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
10529 gimple_omp_for_pre_body (stmt
));
10531 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10533 /* Lower the header expressions. At this point, we can assume that
10534 the header is of the form:
10536 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10538 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10539 using the .omp_data_s mapping, if needed. */
10540 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
10542 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
10543 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
10545 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
10546 TREE_VEC_ELT (*rhs_p
, 1)
10547 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
10548 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
10549 TREE_VEC_ELT (*rhs_p
, 2)
10550 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
10552 else if (!is_gimple_min_invariant (*rhs_p
))
10553 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10554 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10555 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10557 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
10558 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
10560 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
10561 TREE_VEC_ELT (*rhs_p
, 1)
10562 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
10563 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
10564 TREE_VEC_ELT (*rhs_p
, 2)
10565 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
10567 else if (!is_gimple_min_invariant (*rhs_p
))
10568 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10569 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10570 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10572 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
10573 if (!is_gimple_min_invariant (*rhs_p
))
10574 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10577 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
10579 gimple_seq_add_seq (&body
, cnt_list
);
10581 /* Once lowered, extract the bounds and clauses. */
10582 omp_extract_for_data (stmt
, &fd
, NULL
);
10584 if (is_gimple_omp_oacc (ctx
->stmt
)
10585 && !ctx_in_oacc_kernels_region (ctx
))
10586 lower_oacc_head_tail (gimple_location (stmt
),
10587 gimple_omp_for_clauses (stmt
),
10588 &oacc_head
, &oacc_tail
, ctx
);
10590 /* Add OpenACC partitioning and reduction markers just before the loop. */
10592 gimple_seq_add_seq (&body
, oacc_head
);
10594 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
10596 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10597 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10598 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10599 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10601 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
10602 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
10603 OMP_CLAUSE_LINEAR_STEP (c
)
10604 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
10608 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
10609 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10610 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
10613 gimple_seq_add_stmt (&body
, stmt
);
10614 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
10617 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
10620 /* After the loop, add exit clauses. */
10621 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
10625 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
10626 gcall
*g
= gimple_build_call (fndecl
, 0);
10627 gimple_seq_add_stmt (&body
, g
);
10628 gimple_seq_add_seq (&body
, clist
);
10629 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
10630 g
= gimple_build_call (fndecl
, 0);
10631 gimple_seq_add_stmt (&body
, g
);
10634 if (ctx
->cancellable
)
10635 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
10637 gimple_seq_add_seq (&body
, dlist
);
10641 gimple_seq_add_seq (&tred_ilist
, body
);
10645 body
= maybe_catch_exception (body
);
10647 /* Region exit marker goes at the end of the loop body. */
10648 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
10649 gimple_seq_add_stmt (&body
, g
);
10651 gimple_seq_add_seq (&body
, tred_dlist
);
10653 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
10656 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
10658 /* Add OpenACC joining and reduction markers just after the loop. */
10660 gimple_seq_add_seq (&body
, oacc_tail
);
10662 pop_gimplify_context (new_stmt
);
10664 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
10665 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
10666 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
10667 if (BLOCK_VARS (block
))
10668 TREE_USED (block
) = 1;
10670 gimple_bind_set_body (new_stmt
, body
);
10671 gimple_omp_set_body (stmt
, NULL
);
10672 gimple_omp_for_set_pre_body (stmt
, NULL
);
10675 /* Callback for walk_stmts. Check if the current statement only contains
10676 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10679 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
10680 bool *handled_ops_p
,
10681 struct walk_stmt_info
*wi
)
10683 int *info
= (int *) wi
->info
;
10684 gimple
*stmt
= gsi_stmt (*gsi_p
);
10686 *handled_ops_p
= true;
10687 switch (gimple_code (stmt
))
10693 case GIMPLE_OMP_FOR
:
10694 case GIMPLE_OMP_SECTIONS
:
10695 *info
= *info
== 0 ? 1 : -1;
10704 struct omp_taskcopy_context
10706 /* This field must be at the beginning, as we do "inheritance": Some
10707 callback functions for tree-inline.c (e.g., omp_copy_decl)
10708 receive a copy_body_data pointer that is up-casted to an
10709 omp_context pointer. */
10715 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
10717 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
10719 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
10720 return create_tmp_var (TREE_TYPE (var
));
10726 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
10728 tree name
, new_fields
= NULL
, type
, f
;
10730 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
10731 name
= DECL_NAME (TYPE_NAME (orig_type
));
10732 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
10733 TYPE_DECL
, name
, type
);
10734 TYPE_NAME (type
) = name
;
10736 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
10738 tree new_f
= copy_node (f
);
10739 DECL_CONTEXT (new_f
) = type
;
10740 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
10741 TREE_CHAIN (new_f
) = new_fields
;
10742 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10743 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10744 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
10746 new_fields
= new_f
;
10747 tcctx
->cb
.decl_map
->put (f
, new_f
);
10749 TYPE_FIELDS (type
) = nreverse (new_fields
);
10750 layout_type (type
);
10754 /* Create task copyfn. */
10757 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
10759 struct function
*child_cfun
;
10760 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
10761 tree record_type
, srecord_type
, bind
, list
;
10762 bool record_needs_remap
= false, srecord_needs_remap
= false;
10764 struct omp_taskcopy_context tcctx
;
10765 location_t loc
= gimple_location (task_stmt
);
10766 size_t looptempno
= 0;
10768 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
10769 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
10770 gcc_assert (child_cfun
->cfg
== NULL
);
10771 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
10773 /* Reset DECL_CONTEXT on function arguments. */
10774 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
10775 DECL_CONTEXT (t
) = child_fn
;
10777 /* Populate the function. */
10778 push_gimplify_context ();
10779 push_cfun (child_cfun
);
10781 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
10782 TREE_SIDE_EFFECTS (bind
) = 1;
10784 DECL_SAVED_TREE (child_fn
) = bind
;
10785 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
10787 /* Remap src and dst argument types if needed. */
10788 record_type
= ctx
->record_type
;
10789 srecord_type
= ctx
->srecord_type
;
10790 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
10791 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10793 record_needs_remap
= true;
10796 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
10797 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10799 srecord_needs_remap
= true;
10803 if (record_needs_remap
|| srecord_needs_remap
)
10805 memset (&tcctx
, '\0', sizeof (tcctx
));
10806 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
10807 tcctx
.cb
.dst_fn
= child_fn
;
10808 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
10809 gcc_checking_assert (tcctx
.cb
.src_node
);
10810 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
10811 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
10812 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
10813 tcctx
.cb
.eh_lp_nr
= 0;
10814 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
10815 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
10818 if (record_needs_remap
)
10819 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
10820 if (srecord_needs_remap
)
10821 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
10824 tcctx
.cb
.decl_map
= NULL
;
10826 arg
= DECL_ARGUMENTS (child_fn
);
10827 TREE_TYPE (arg
) = build_pointer_type (record_type
);
10828 sarg
= DECL_CHAIN (arg
);
10829 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
10831 /* First pass: initialize temporaries used in record_type and srecord_type
10832 sizes and field offsets. */
10833 if (tcctx
.cb
.decl_map
)
10834 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10835 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10839 decl
= OMP_CLAUSE_DECL (c
);
10840 p
= tcctx
.cb
.decl_map
->get (decl
);
10843 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10844 sf
= (tree
) n
->value
;
10845 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10846 src
= build_simple_mem_ref_loc (loc
, sarg
);
10847 src
= omp_build_component_ref (src
, sf
);
10848 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
10849 append_to_statement_list (t
, &list
);
10852 /* Second pass: copy shared var pointers and copy construct non-VLA
10853 firstprivate vars. */
10854 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10855 switch (OMP_CLAUSE_CODE (c
))
10857 splay_tree_key key
;
10858 case OMP_CLAUSE_SHARED
:
10859 decl
= OMP_CLAUSE_DECL (c
);
10860 key
= (splay_tree_key
) decl
;
10861 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
10862 key
= (splay_tree_key
) &DECL_UID (decl
);
10863 n
= splay_tree_lookup (ctx
->field_map
, key
);
10866 f
= (tree
) n
->value
;
10867 if (tcctx
.cb
.decl_map
)
10868 f
= *tcctx
.cb
.decl_map
->get (f
);
10869 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10870 sf
= (tree
) n
->value
;
10871 if (tcctx
.cb
.decl_map
)
10872 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10873 src
= build_simple_mem_ref_loc (loc
, sarg
);
10874 src
= omp_build_component_ref (src
, sf
);
10875 dst
= build_simple_mem_ref_loc (loc
, arg
);
10876 dst
= omp_build_component_ref (dst
, f
);
10877 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10878 append_to_statement_list (t
, &list
);
10880 case OMP_CLAUSE_REDUCTION
:
10881 case OMP_CLAUSE_IN_REDUCTION
:
10882 decl
= OMP_CLAUSE_DECL (c
);
10883 if (TREE_CODE (decl
) == MEM_REF
)
10885 decl
= TREE_OPERAND (decl
, 0);
10886 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
10887 decl
= TREE_OPERAND (decl
, 0);
10888 if (TREE_CODE (decl
) == INDIRECT_REF
10889 || TREE_CODE (decl
) == ADDR_EXPR
)
10890 decl
= TREE_OPERAND (decl
, 0);
10892 key
= (splay_tree_key
) decl
;
10893 n
= splay_tree_lookup (ctx
->field_map
, key
);
10896 f
= (tree
) n
->value
;
10897 if (tcctx
.cb
.decl_map
)
10898 f
= *tcctx
.cb
.decl_map
->get (f
);
10899 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10900 sf
= (tree
) n
->value
;
10901 if (tcctx
.cb
.decl_map
)
10902 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10903 src
= build_simple_mem_ref_loc (loc
, sarg
);
10904 src
= omp_build_component_ref (src
, sf
);
10905 if (decl
!= OMP_CLAUSE_DECL (c
)
10906 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10907 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
10908 src
= build_simple_mem_ref_loc (loc
, src
);
10909 dst
= build_simple_mem_ref_loc (loc
, arg
);
10910 dst
= omp_build_component_ref (dst
, f
);
10911 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10912 append_to_statement_list (t
, &list
);
10914 case OMP_CLAUSE__LOOPTEMP_
:
10915 /* Fields for first two _looptemp_ clauses are initialized by
10916 GOMP_taskloop*, the rest are handled like firstprivate. */
10917 if (looptempno
< 2)
10923 case OMP_CLAUSE__REDUCTEMP_
:
10924 case OMP_CLAUSE_FIRSTPRIVATE
:
10925 decl
= OMP_CLAUSE_DECL (c
);
10926 if (is_variable_sized (decl
))
10928 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10931 f
= (tree
) n
->value
;
10932 if (tcctx
.cb
.decl_map
)
10933 f
= *tcctx
.cb
.decl_map
->get (f
);
10934 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10937 sf
= (tree
) n
->value
;
10938 if (tcctx
.cb
.decl_map
)
10939 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10940 src
= build_simple_mem_ref_loc (loc
, sarg
);
10941 src
= omp_build_component_ref (src
, sf
);
10942 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
10943 src
= build_simple_mem_ref_loc (loc
, src
);
10947 dst
= build_simple_mem_ref_loc (loc
, arg
);
10948 dst
= omp_build_component_ref (dst
, f
);
10949 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
10950 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10952 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
10953 append_to_statement_list (t
, &list
);
10955 case OMP_CLAUSE_PRIVATE
:
10956 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
10958 decl
= OMP_CLAUSE_DECL (c
);
10959 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10960 f
= (tree
) n
->value
;
10961 if (tcctx
.cb
.decl_map
)
10962 f
= *tcctx
.cb
.decl_map
->get (f
);
10963 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10966 sf
= (tree
) n
->value
;
10967 if (tcctx
.cb
.decl_map
)
10968 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10969 src
= build_simple_mem_ref_loc (loc
, sarg
);
10970 src
= omp_build_component_ref (src
, sf
);
10971 if (use_pointer_for_field (decl
, NULL
))
10972 src
= build_simple_mem_ref_loc (loc
, src
);
10976 dst
= build_simple_mem_ref_loc (loc
, arg
);
10977 dst
= omp_build_component_ref (dst
, f
);
10978 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10979 append_to_statement_list (t
, &list
);
10985 /* Last pass: handle VLA firstprivates. */
10986 if (tcctx
.cb
.decl_map
)
10987 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10988 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10992 decl
= OMP_CLAUSE_DECL (c
);
10993 if (!is_variable_sized (decl
))
10995 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10998 f
= (tree
) n
->value
;
10999 f
= *tcctx
.cb
.decl_map
->get (f
);
11000 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
11001 ind
= DECL_VALUE_EXPR (decl
);
11002 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
11003 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
11004 n
= splay_tree_lookup (ctx
->sfield_map
,
11005 (splay_tree_key
) TREE_OPERAND (ind
, 0));
11006 sf
= (tree
) n
->value
;
11007 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11008 src
= build_simple_mem_ref_loc (loc
, sarg
);
11009 src
= omp_build_component_ref (src
, sf
);
11010 src
= build_simple_mem_ref_loc (loc
, src
);
11011 dst
= build_simple_mem_ref_loc (loc
, arg
);
11012 dst
= omp_build_component_ref (dst
, f
);
11013 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
11014 append_to_statement_list (t
, &list
);
11015 n
= splay_tree_lookup (ctx
->field_map
,
11016 (splay_tree_key
) TREE_OPERAND (ind
, 0));
11017 df
= (tree
) n
->value
;
11018 df
= *tcctx
.cb
.decl_map
->get (df
);
11019 ptr
= build_simple_mem_ref_loc (loc
, arg
);
11020 ptr
= omp_build_component_ref (ptr
, df
);
11021 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
11022 build_fold_addr_expr_loc (loc
, dst
));
11023 append_to_statement_list (t
, &list
);
11026 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
11027 append_to_statement_list (t
, &list
);
11029 if (tcctx
.cb
.decl_map
)
11030 delete tcctx
.cb
.decl_map
;
11031 pop_gimplify_context (NULL
);
11032 BIND_EXPR_BODY (bind
) = list
;
11037 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
11041 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
11043 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
11044 gcc_assert (clauses
);
11045 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11046 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
11047 switch (OMP_CLAUSE_DEPEND_KIND (c
))
11049 case OMP_CLAUSE_DEPEND_LAST
:
11050 /* Lowering already done at gimplification. */
11052 case OMP_CLAUSE_DEPEND_IN
:
11055 case OMP_CLAUSE_DEPEND_OUT
:
11056 case OMP_CLAUSE_DEPEND_INOUT
:
11059 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
11062 case OMP_CLAUSE_DEPEND_DEPOBJ
:
11065 case OMP_CLAUSE_DEPEND_SOURCE
:
11066 case OMP_CLAUSE_DEPEND_SINK
:
11069 gcc_unreachable ();
11071 if (cnt
[1] || cnt
[3])
11073 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
11074 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
11075 tree array
= create_tmp_var (type
);
11076 TREE_ADDRESSABLE (array
) = 1;
11077 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
11081 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
11082 gimple_seq_add_stmt (iseq
, g
);
11083 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
11086 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
11087 gimple_seq_add_stmt (iseq
, g
);
11088 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
11090 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
11091 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
11092 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
11093 gimple_seq_add_stmt (iseq
, g
);
11095 for (i
= 0; i
< 4; i
++)
11099 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11100 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
11104 switch (OMP_CLAUSE_DEPEND_KIND (c
))
11106 case OMP_CLAUSE_DEPEND_IN
:
11110 case OMP_CLAUSE_DEPEND_OUT
:
11111 case OMP_CLAUSE_DEPEND_INOUT
:
11115 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
11119 case OMP_CLAUSE_DEPEND_DEPOBJ
:
11124 gcc_unreachable ();
11126 tree t
= OMP_CLAUSE_DECL (c
);
11127 t
= fold_convert (ptr_type_node
, t
);
11128 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
11129 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
11130 NULL_TREE
, NULL_TREE
);
11131 g
= gimple_build_assign (r
, t
);
11132 gimple_seq_add_stmt (iseq
, g
);
11135 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
11136 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
11137 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
11138 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
11140 tree clobber
= build_clobber (type
);
11141 g
= gimple_build_assign (array
, clobber
);
11142 gimple_seq_add_stmt (oseq
, g
);
11145 /* Lower the OpenMP parallel or task directive in the current statement
11146 in GSI_P. CTX holds context information for the directive. */
11149 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11153 gimple
*stmt
= gsi_stmt (*gsi_p
);
11154 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
11155 gimple_seq par_body
;
11156 location_t loc
= gimple_location (stmt
);
11158 clauses
= gimple_omp_taskreg_clauses (stmt
);
11159 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11160 && gimple_omp_task_taskwait_p (stmt
))
11168 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
11169 par_body
= gimple_bind_body (par_bind
);
11171 child_fn
= ctx
->cb
.dst_fn
;
11172 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
11173 && !gimple_omp_parallel_combined_p (stmt
))
11175 struct walk_stmt_info wi
;
11178 memset (&wi
, 0, sizeof (wi
));
11180 wi
.val_only
= true;
11181 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
11183 gimple_omp_parallel_set_combined_p (stmt
, true);
11185 gimple_seq dep_ilist
= NULL
;
11186 gimple_seq dep_olist
= NULL
;
11187 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11188 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11190 push_gimplify_context ();
11191 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11192 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
11193 &dep_ilist
, &dep_olist
);
11196 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11197 && gimple_omp_task_taskwait_p (stmt
))
11201 gsi_replace (gsi_p
, dep_bind
, true);
11202 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11203 gimple_bind_add_stmt (dep_bind
, stmt
);
11204 gimple_bind_add_seq (dep_bind
, dep_olist
);
11205 pop_gimplify_context (dep_bind
);
11210 if (ctx
->srecord_type
)
11211 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
11213 gimple_seq tskred_ilist
= NULL
;
11214 gimple_seq tskred_olist
= NULL
;
11215 if ((is_task_ctx (ctx
)
11216 && gimple_omp_task_taskloop_p (ctx
->stmt
)
11217 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
11218 OMP_CLAUSE_REDUCTION
))
11219 || (is_parallel_ctx (ctx
)
11220 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
11221 OMP_CLAUSE__REDUCTEMP_
)))
11223 if (dep_bind
== NULL
)
11225 push_gimplify_context ();
11226 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11228 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
11230 gimple_omp_taskreg_clauses (ctx
->stmt
),
11231 &tskred_ilist
, &tskred_olist
);
11234 push_gimplify_context ();
11236 gimple_seq par_olist
= NULL
;
11237 gimple_seq par_ilist
= NULL
;
11238 gimple_seq par_rlist
= NULL
;
11239 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
11240 lower_omp (&par_body
, ctx
);
11241 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
11242 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
11244 /* Declare all the variables created by mapping and the variables
11245 declared in the scope of the parallel body. */
11246 record_vars_into (ctx
->block_vars
, child_fn
);
11247 maybe_remove_omp_member_access_dummy_vars (par_bind
);
11248 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
11250 if (ctx
->record_type
)
11253 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
11254 : ctx
->record_type
, ".omp_data_o");
11255 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11256 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11257 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
11260 gimple_seq olist
= NULL
;
11261 gimple_seq ilist
= NULL
;
11262 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
11263 lower_send_shared_vars (&ilist
, &olist
, ctx
);
11265 if (ctx
->record_type
)
11267 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
11268 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
11272 /* Once all the expansions are done, sequence all the different
11273 fragments inside gimple_omp_body. */
11275 gimple_seq new_body
= NULL
;
11277 if (ctx
->record_type
)
11279 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
11280 /* fixup_child_record_type might have changed receiver_decl's type. */
11281 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
11282 gimple_seq_add_stmt (&new_body
,
11283 gimple_build_assign (ctx
->receiver_decl
, t
));
11286 gimple_seq_add_seq (&new_body
, par_ilist
);
11287 gimple_seq_add_seq (&new_body
, par_body
);
11288 gimple_seq_add_seq (&new_body
, par_rlist
);
11289 if (ctx
->cancellable
)
11290 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
11291 gimple_seq_add_seq (&new_body
, par_olist
);
11292 new_body
= maybe_catch_exception (new_body
);
11293 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
11294 gimple_seq_add_stmt (&new_body
,
11295 gimple_build_omp_continue (integer_zero_node
,
11296 integer_zero_node
));
11297 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
11298 gimple_omp_set_body (stmt
, new_body
);
11300 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
11301 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11303 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
11304 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
11305 gimple_bind_add_seq (bind
, ilist
);
11306 gimple_bind_add_stmt (bind
, stmt
);
11307 gimple_bind_add_seq (bind
, olist
);
11309 pop_gimplify_context (NULL
);
11313 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11314 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
11315 gimple_bind_add_stmt (dep_bind
, bind
);
11316 gimple_bind_add_seq (dep_bind
, tskred_olist
);
11317 gimple_bind_add_seq (dep_bind
, dep_olist
);
11318 pop_gimplify_context (dep_bind
);
11322 /* Lower the GIMPLE_OMP_TARGET in the current statement
11323 in GSI_P. CTX holds context information for the directive. */
11326 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11329 tree child_fn
, t
, c
;
11330 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
11331 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
11332 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
11333 location_t loc
= gimple_location (stmt
);
11334 bool offloaded
, data_region
;
11335 unsigned int map_cnt
= 0;
11337 offloaded
= is_gimple_omp_offloaded (stmt
);
11338 switch (gimple_omp_target_kind (stmt
))
11340 case GF_OMP_TARGET_KIND_REGION
:
11341 case GF_OMP_TARGET_KIND_UPDATE
:
11342 case GF_OMP_TARGET_KIND_ENTER_DATA
:
11343 case GF_OMP_TARGET_KIND_EXIT_DATA
:
11344 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
11345 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
11346 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
11347 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
11348 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
11349 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
11350 data_region
= false;
11352 case GF_OMP_TARGET_KIND_DATA
:
11353 case GF_OMP_TARGET_KIND_OACC_DATA
:
11354 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
11355 data_region
= true;
11358 gcc_unreachable ();
11361 clauses
= gimple_omp_target_clauses (stmt
);
11363 gimple_seq dep_ilist
= NULL
;
11364 gimple_seq dep_olist
= NULL
;
11365 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11367 push_gimplify_context ();
11368 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11369 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
11370 &dep_ilist
, &dep_olist
);
11377 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
11378 tgt_body
= gimple_bind_body (tgt_bind
);
11380 else if (data_region
)
11381 tgt_body
= gimple_omp_body (stmt
);
11382 child_fn
= ctx
->cb
.dst_fn
;
11384 push_gimplify_context ();
11387 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11388 switch (OMP_CLAUSE_CODE (c
))
11394 case OMP_CLAUSE_MAP
:
11396 /* First check what we're prepared to handle in the following. */
11397 switch (OMP_CLAUSE_MAP_KIND (c
))
11399 case GOMP_MAP_ALLOC
:
11401 case GOMP_MAP_FROM
:
11402 case GOMP_MAP_TOFROM
:
11403 case GOMP_MAP_POINTER
:
11404 case GOMP_MAP_TO_PSET
:
11405 case GOMP_MAP_DELETE
:
11406 case GOMP_MAP_RELEASE
:
11407 case GOMP_MAP_ALWAYS_TO
:
11408 case GOMP_MAP_ALWAYS_FROM
:
11409 case GOMP_MAP_ALWAYS_TOFROM
:
11410 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
11411 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
11412 case GOMP_MAP_STRUCT
:
11413 case GOMP_MAP_ALWAYS_POINTER
:
11415 case GOMP_MAP_IF_PRESENT
:
11416 case GOMP_MAP_FORCE_ALLOC
:
11417 case GOMP_MAP_FORCE_TO
:
11418 case GOMP_MAP_FORCE_FROM
:
11419 case GOMP_MAP_FORCE_TOFROM
:
11420 case GOMP_MAP_FORCE_PRESENT
:
11421 case GOMP_MAP_FORCE_DEVICEPTR
:
11422 case GOMP_MAP_DEVICE_RESIDENT
:
11423 case GOMP_MAP_LINK
:
11424 case GOMP_MAP_ATTACH
:
11425 case GOMP_MAP_DETACH
:
11426 case GOMP_MAP_FORCE_DETACH
:
11427 gcc_assert (is_gimple_omp_oacc (stmt
));
11430 gcc_unreachable ();
11434 case OMP_CLAUSE_TO
:
11435 case OMP_CLAUSE_FROM
:
11437 var
= OMP_CLAUSE_DECL (c
);
11440 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
11441 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11442 && (OMP_CLAUSE_MAP_KIND (c
)
11443 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
11448 if (DECL_SIZE (var
)
11449 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
11451 tree var2
= DECL_VALUE_EXPR (var
);
11452 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
11453 var2
= TREE_OPERAND (var2
, 0);
11454 gcc_assert (DECL_P (var2
));
11459 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11460 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11461 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11463 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11465 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
11466 && varpool_node::get_create (var
)->offloadable
)
11469 tree type
= build_pointer_type (TREE_TYPE (var
));
11470 tree new_var
= lookup_decl (var
, ctx
);
11471 x
= create_tmp_var_raw (type
, get_name (new_var
));
11472 gimple_add_tmp_var (x
);
11473 x
= build_simple_mem_ref (x
);
11474 SET_DECL_VALUE_EXPR (new_var
, x
);
11475 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11480 if (!maybe_lookup_field (var
, ctx
))
11483 /* Don't remap compute constructs' reduction variables, because the
11484 intermediate result must be local to each gang. */
11485 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11486 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
11488 x
= build_receiver_ref (var
, true, ctx
);
11489 tree new_var
= lookup_decl (var
, ctx
);
11491 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11492 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11493 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11494 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11495 x
= build_simple_mem_ref (x
);
11496 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11498 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11499 if (omp_is_reference (new_var
)
11500 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
11501 || DECL_BY_REFERENCE (var
)))
11503 /* Create a local object to hold the instance
11505 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
11506 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
11507 tree inst
= create_tmp_var (type
, id
);
11508 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
11509 x
= build_fold_addr_expr (inst
);
11511 gimplify_assign (new_var
, x
, &fplist
);
11513 else if (DECL_P (new_var
))
11515 SET_DECL_VALUE_EXPR (new_var
, x
);
11516 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11519 gcc_unreachable ();
11524 case OMP_CLAUSE_FIRSTPRIVATE
:
11525 if (is_oacc_parallel_or_serial (ctx
))
11526 goto oacc_firstprivate
;
11528 var
= OMP_CLAUSE_DECL (c
);
11529 if (!omp_is_reference (var
)
11530 && !is_gimple_reg_type (TREE_TYPE (var
)))
11532 tree new_var
= lookup_decl (var
, ctx
);
11533 if (is_variable_sized (var
))
11535 tree pvar
= DECL_VALUE_EXPR (var
);
11536 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11537 pvar
= TREE_OPERAND (pvar
, 0);
11538 gcc_assert (DECL_P (pvar
));
11539 tree new_pvar
= lookup_decl (pvar
, ctx
);
11540 x
= build_fold_indirect_ref (new_pvar
);
11541 TREE_THIS_NOTRAP (x
) = 1;
11544 x
= build_receiver_ref (var
, true, ctx
);
11545 SET_DECL_VALUE_EXPR (new_var
, x
);
11546 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11550 case OMP_CLAUSE_PRIVATE
:
11551 if (is_gimple_omp_oacc (ctx
->stmt
))
11553 var
= OMP_CLAUSE_DECL (c
);
11554 if (is_variable_sized (var
))
11556 tree new_var
= lookup_decl (var
, ctx
);
11557 tree pvar
= DECL_VALUE_EXPR (var
);
11558 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11559 pvar
= TREE_OPERAND (pvar
, 0);
11560 gcc_assert (DECL_P (pvar
));
11561 tree new_pvar
= lookup_decl (pvar
, ctx
);
11562 x
= build_fold_indirect_ref (new_pvar
);
11563 TREE_THIS_NOTRAP (x
) = 1;
11564 SET_DECL_VALUE_EXPR (new_var
, x
);
11565 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11569 case OMP_CLAUSE_USE_DEVICE_PTR
:
11570 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11571 case OMP_CLAUSE_IS_DEVICE_PTR
:
11572 var
= OMP_CLAUSE_DECL (c
);
11574 if (is_variable_sized (var
))
11576 tree new_var
= lookup_decl (var
, ctx
);
11577 tree pvar
= DECL_VALUE_EXPR (var
);
11578 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11579 pvar
= TREE_OPERAND (pvar
, 0);
11580 gcc_assert (DECL_P (pvar
));
11581 tree new_pvar
= lookup_decl (pvar
, ctx
);
11582 x
= build_fold_indirect_ref (new_pvar
);
11583 TREE_THIS_NOTRAP (x
) = 1;
11584 SET_DECL_VALUE_EXPR (new_var
, x
);
11585 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11587 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
11588 && !omp_is_reference (var
)
11589 && !omp_is_allocatable_or_ptr (var
)
11590 && !lang_hooks
.decls
.omp_array_data (var
, true))
11591 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11593 tree new_var
= lookup_decl (var
, ctx
);
11594 tree type
= build_pointer_type (TREE_TYPE (var
));
11595 x
= create_tmp_var_raw (type
, get_name (new_var
));
11596 gimple_add_tmp_var (x
);
11597 x
= build_simple_mem_ref (x
);
11598 SET_DECL_VALUE_EXPR (new_var
, x
);
11599 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11603 tree new_var
= lookup_decl (var
, ctx
);
11604 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
11605 gimple_add_tmp_var (x
);
11606 SET_DECL_VALUE_EXPR (new_var
, x
);
11607 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11614 target_nesting_level
++;
11615 lower_omp (&tgt_body
, ctx
);
11616 target_nesting_level
--;
11618 else if (data_region
)
11619 lower_omp (&tgt_body
, ctx
);
11623 /* Declare all the variables created by mapping and the variables
11624 declared in the scope of the target body. */
11625 record_vars_into (ctx
->block_vars
, child_fn
);
11626 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
11627 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
11632 if (ctx
->record_type
)
11635 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
11636 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11637 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11638 t
= make_tree_vec (3);
11639 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
11640 TREE_VEC_ELT (t
, 1)
11641 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
11642 ".omp_data_sizes");
11643 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
11644 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
11645 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
11646 tree tkind_type
= short_unsigned_type_node
;
11647 int talign_shift
= 8;
11648 TREE_VEC_ELT (t
, 2)
11649 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
11650 ".omp_data_kinds");
11651 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
11652 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
11653 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
11654 gimple_omp_target_set_data_arg (stmt
, t
);
11656 vec
<constructor_elt
, va_gc
> *vsize
;
11657 vec
<constructor_elt
, va_gc
> *vkind
;
11658 vec_alloc (vsize
, map_cnt
);
11659 vec_alloc (vkind
, map_cnt
);
11660 unsigned int map_idx
= 0;
11662 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11663 switch (OMP_CLAUSE_CODE (c
))
11665 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
11666 unsigned int talign
;
11671 case OMP_CLAUSE_MAP
:
11672 case OMP_CLAUSE_TO
:
11673 case OMP_CLAUSE_FROM
:
11674 oacc_firstprivate_map
:
11676 ovar
= OMP_CLAUSE_DECL (c
);
11677 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11678 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11679 || (OMP_CLAUSE_MAP_KIND (c
)
11680 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
11682 if (!DECL_P (ovar
))
11684 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11685 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
11687 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
11688 == get_base_address (ovar
));
11689 nc
= OMP_CLAUSE_CHAIN (c
);
11690 ovar
= OMP_CLAUSE_DECL (nc
);
11694 tree x
= build_sender_ref (ovar
, ctx
);
11696 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
11697 gimplify_assign (x
, v
, &ilist
);
11703 if (DECL_SIZE (ovar
)
11704 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
11706 tree ovar2
= DECL_VALUE_EXPR (ovar
);
11707 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
11708 ovar2
= TREE_OPERAND (ovar2
, 0);
11709 gcc_assert (DECL_P (ovar2
));
11712 if (!maybe_lookup_field (ovar
, ctx
))
11716 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
11717 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
11718 talign
= DECL_ALIGN_UNIT (ovar
);
11721 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11722 x
= build_sender_ref (ovar
, ctx
);
11724 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11725 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11726 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11727 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
11729 gcc_assert (offloaded
);
11731 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
11732 mark_addressable (avar
);
11733 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
11734 talign
= DECL_ALIGN_UNIT (avar
);
11735 avar
= build_fold_addr_expr (avar
);
11736 gimplify_assign (x
, avar
, &ilist
);
11738 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11740 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11741 if (!omp_is_reference (var
))
11743 if (is_gimple_reg (var
)
11744 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11745 TREE_NO_WARNING (var
) = 1;
11746 var
= build_fold_addr_expr (var
);
11749 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11750 gimplify_assign (x
, var
, &ilist
);
11752 else if (is_gimple_reg (var
))
11754 gcc_assert (offloaded
);
11755 tree avar
= create_tmp_var (TREE_TYPE (var
));
11756 mark_addressable (avar
);
11757 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
11758 if (GOMP_MAP_COPY_TO_P (map_kind
)
11759 || map_kind
== GOMP_MAP_POINTER
11760 || map_kind
== GOMP_MAP_TO_PSET
11761 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11763 /* If we need to initialize a temporary
11764 with VAR because it is not addressable, and
11765 the variable hasn't been initialized yet, then
11766 we'll get a warning for the store to avar.
11767 Don't warn in that case, the mapping might
11769 TREE_NO_WARNING (var
) = 1;
11770 gimplify_assign (avar
, var
, &ilist
);
11772 avar
= build_fold_addr_expr (avar
);
11773 gimplify_assign (x
, avar
, &ilist
);
11774 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
11775 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11776 && !TYPE_READONLY (TREE_TYPE (var
)))
11778 x
= unshare_expr (x
);
11779 x
= build_simple_mem_ref (x
);
11780 gimplify_assign (var
, x
, &olist
);
11785 /* While MAP is handled explicitly by the FE,
11786 for 'target update', only the identified is passed. */
11787 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
11788 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
11789 && (omp_is_allocatable_or_ptr (var
)
11790 && omp_check_optional_argument (var
, false)))
11791 var
= build_fold_indirect_ref (var
);
11792 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
11793 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
11794 || (!omp_is_allocatable_or_ptr (var
)
11795 && !omp_check_optional_argument (var
, false)))
11796 var
= build_fold_addr_expr (var
);
11797 gimplify_assign (x
, var
, &ilist
);
11801 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11803 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11804 s
= TREE_TYPE (ovar
);
11805 if (TREE_CODE (s
) == REFERENCE_TYPE
11806 || omp_check_optional_argument (ovar
, false))
11808 s
= TYPE_SIZE_UNIT (s
);
11811 s
= OMP_CLAUSE_SIZE (c
);
11812 if (s
== NULL_TREE
)
11813 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11814 s
= fold_convert (size_type_node
, s
);
11815 purpose
= size_int (map_idx
++);
11816 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11817 if (TREE_CODE (s
) != INTEGER_CST
)
11818 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11820 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
11821 switch (OMP_CLAUSE_CODE (c
))
11823 case OMP_CLAUSE_MAP
:
11824 tkind
= OMP_CLAUSE_MAP_KIND (c
);
11825 tkind_zero
= tkind
;
11826 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
11829 case GOMP_MAP_ALLOC
:
11830 case GOMP_MAP_IF_PRESENT
:
11832 case GOMP_MAP_FROM
:
11833 case GOMP_MAP_TOFROM
:
11834 case GOMP_MAP_ALWAYS_TO
:
11835 case GOMP_MAP_ALWAYS_FROM
:
11836 case GOMP_MAP_ALWAYS_TOFROM
:
11837 case GOMP_MAP_RELEASE
:
11838 case GOMP_MAP_FORCE_TO
:
11839 case GOMP_MAP_FORCE_FROM
:
11840 case GOMP_MAP_FORCE_TOFROM
:
11841 case GOMP_MAP_FORCE_PRESENT
:
11842 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
11844 case GOMP_MAP_DELETE
:
11845 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
11849 if (tkind_zero
!= tkind
)
11851 if (integer_zerop (s
))
11852 tkind
= tkind_zero
;
11853 else if (integer_nonzerop (s
))
11854 tkind_zero
= tkind
;
11857 case OMP_CLAUSE_FIRSTPRIVATE
:
11858 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11859 tkind
= GOMP_MAP_TO
;
11860 tkind_zero
= tkind
;
11862 case OMP_CLAUSE_TO
:
11863 tkind
= GOMP_MAP_TO
;
11864 tkind_zero
= tkind
;
11866 case OMP_CLAUSE_FROM
:
11867 tkind
= GOMP_MAP_FROM
;
11868 tkind_zero
= tkind
;
11871 gcc_unreachable ();
11873 gcc_checking_assert (tkind
11874 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11875 gcc_checking_assert (tkind_zero
11876 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11877 talign
= ceil_log2 (talign
);
11878 tkind
|= talign
<< talign_shift
;
11879 tkind_zero
|= talign
<< talign_shift
;
11880 gcc_checking_assert (tkind
11881 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11882 gcc_checking_assert (tkind_zero
11883 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11884 if (tkind
== tkind_zero
)
11885 x
= build_int_cstu (tkind_type
, tkind
);
11888 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
11889 x
= build3 (COND_EXPR
, tkind_type
,
11890 fold_build2 (EQ_EXPR
, boolean_type_node
,
11891 unshare_expr (s
), size_zero_node
),
11892 build_int_cstu (tkind_type
, tkind_zero
),
11893 build_int_cstu (tkind_type
, tkind
));
11895 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
11900 case OMP_CLAUSE_FIRSTPRIVATE
:
11901 if (is_oacc_parallel_or_serial (ctx
))
11902 goto oacc_firstprivate_map
;
11903 ovar
= OMP_CLAUSE_DECL (c
);
11904 if (omp_is_reference (ovar
))
11905 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11907 talign
= DECL_ALIGN_UNIT (ovar
);
11908 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11909 x
= build_sender_ref (ovar
, ctx
);
11910 tkind
= GOMP_MAP_FIRSTPRIVATE
;
11911 type
= TREE_TYPE (ovar
);
11912 if (omp_is_reference (ovar
))
11913 type
= TREE_TYPE (type
);
11914 if ((INTEGRAL_TYPE_P (type
)
11915 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
11916 || TREE_CODE (type
) == POINTER_TYPE
)
11918 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
11920 if (omp_is_reference (var
))
11921 t
= build_simple_mem_ref (var
);
11922 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11923 TREE_NO_WARNING (var
) = 1;
11924 if (TREE_CODE (type
) != POINTER_TYPE
)
11925 t
= fold_convert (pointer_sized_int_node
, t
);
11926 t
= fold_convert (TREE_TYPE (x
), t
);
11927 gimplify_assign (x
, t
, &ilist
);
11929 else if (omp_is_reference (var
))
11930 gimplify_assign (x
, var
, &ilist
);
11931 else if (is_gimple_reg (var
))
11933 tree avar
= create_tmp_var (TREE_TYPE (var
));
11934 mark_addressable (avar
);
11935 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11936 TREE_NO_WARNING (var
) = 1;
11937 gimplify_assign (avar
, var
, &ilist
);
11938 avar
= build_fold_addr_expr (avar
);
11939 gimplify_assign (x
, avar
, &ilist
);
11943 var
= build_fold_addr_expr (var
);
11944 gimplify_assign (x
, var
, &ilist
);
11946 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
11948 else if (omp_is_reference (ovar
))
11949 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11951 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11952 s
= fold_convert (size_type_node
, s
);
11953 purpose
= size_int (map_idx
++);
11954 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11955 if (TREE_CODE (s
) != INTEGER_CST
)
11956 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11958 gcc_checking_assert (tkind
11959 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11960 talign
= ceil_log2 (talign
);
11961 tkind
|= talign
<< talign_shift
;
11962 gcc_checking_assert (tkind
11963 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11964 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
11965 build_int_cstu (tkind_type
, tkind
));
11968 case OMP_CLAUSE_USE_DEVICE_PTR
:
11969 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11970 case OMP_CLAUSE_IS_DEVICE_PTR
:
11971 ovar
= OMP_CLAUSE_DECL (c
);
11972 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11974 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
11976 tkind
= (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
11977 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
11978 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
11980 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
11982 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
11983 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
11987 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
11988 x
= build_sender_ref (ovar
, ctx
);
11991 if (is_gimple_omp_oacc (ctx
->stmt
))
11993 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
11995 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
11996 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
11999 type
= TREE_TYPE (ovar
);
12000 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
12001 var
= lang_hooks
.decls
.omp_array_data (ovar
, false);
12002 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12003 && !omp_is_reference (ovar
)
12004 && !omp_is_allocatable_or_ptr (ovar
))
12005 || TREE_CODE (type
) == ARRAY_TYPE
)
12006 var
= build_fold_addr_expr (var
);
12009 if (omp_is_reference (ovar
)
12010 || omp_check_optional_argument (ovar
, false)
12011 || omp_is_allocatable_or_ptr (ovar
))
12013 type
= TREE_TYPE (type
);
12014 if (TREE_CODE (type
) != ARRAY_TYPE
12015 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
12016 && !omp_is_allocatable_or_ptr (ovar
))
12017 || (omp_is_reference (ovar
)
12018 && omp_is_allocatable_or_ptr (ovar
))))
12019 var
= build_simple_mem_ref (var
);
12020 var
= fold_convert (TREE_TYPE (x
), var
);
12024 present
= omp_check_optional_argument (ovar
, true);
12027 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
12028 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
12029 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
12030 tree new_x
= unshare_expr (x
);
12031 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
12033 gcond
*cond
= gimple_build_cond_from_tree (present
,
12036 gimple_seq_add_stmt (&ilist
, cond
);
12037 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
12038 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
12039 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
12040 gimple_seq_add_stmt (&ilist
,
12041 gimple_build_label (notnull_label
));
12042 gimplify_assign (x
, var
, &ilist
);
12043 gimple_seq_add_stmt (&ilist
,
12044 gimple_build_label (opt_arg_label
));
12047 gimplify_assign (x
, var
, &ilist
);
12049 purpose
= size_int (map_idx
++);
12050 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
12051 gcc_checking_assert (tkind
12052 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12053 gcc_checking_assert (tkind
12054 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12055 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
12056 build_int_cstu (tkind_type
, tkind
));
12060 gcc_assert (map_idx
== map_cnt
);
12062 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
12063 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
12064 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
12065 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
12066 for (int i
= 1; i
<= 2; i
++)
12067 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
12069 gimple_seq initlist
= NULL
;
12070 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
12071 TREE_VEC_ELT (t
, i
)),
12072 &initlist
, true, NULL_TREE
);
12073 gimple_seq_add_seq (&ilist
, initlist
);
12075 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
12076 gimple_seq_add_stmt (&olist
,
12077 gimple_build_assign (TREE_VEC_ELT (t
, i
),
12081 tree clobber
= build_clobber (ctx
->record_type
);
12082 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12086 /* Once all the expansions are done, sequence all the different
12087 fragments inside gimple_omp_body. */
12092 && ctx
->record_type
)
12094 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12095 /* fixup_child_record_type might have changed receiver_decl's type. */
12096 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12097 gimple_seq_add_stmt (&new_body
,
12098 gimple_build_assign (ctx
->receiver_decl
, t
));
12100 gimple_seq_add_seq (&new_body
, fplist
);
12102 if (offloaded
|| data_region
)
12104 tree prev
= NULL_TREE
;
12105 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12106 switch (OMP_CLAUSE_CODE (c
))
12111 case OMP_CLAUSE_FIRSTPRIVATE
:
12112 if (is_gimple_omp_oacc (ctx
->stmt
))
12114 var
= OMP_CLAUSE_DECL (c
);
12115 if (omp_is_reference (var
)
12116 || is_gimple_reg_type (TREE_TYPE (var
)))
12118 tree new_var
= lookup_decl (var
, ctx
);
12120 type
= TREE_TYPE (var
);
12121 if (omp_is_reference (var
))
12122 type
= TREE_TYPE (type
);
12123 if ((INTEGRAL_TYPE_P (type
)
12124 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
12125 || TREE_CODE (type
) == POINTER_TYPE
)
12127 x
= build_receiver_ref (var
, false, ctx
);
12128 if (TREE_CODE (type
) != POINTER_TYPE
)
12129 x
= fold_convert (pointer_sized_int_node
, x
);
12130 x
= fold_convert (type
, x
);
12131 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12133 if (omp_is_reference (var
))
12135 tree v
= create_tmp_var_raw (type
, get_name (var
));
12136 gimple_add_tmp_var (v
);
12137 TREE_ADDRESSABLE (v
) = 1;
12138 gimple_seq_add_stmt (&new_body
,
12139 gimple_build_assign (v
, x
));
12140 x
= build_fold_addr_expr (v
);
12142 gimple_seq_add_stmt (&new_body
,
12143 gimple_build_assign (new_var
, x
));
12147 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
12148 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12150 gimple_seq_add_stmt (&new_body
,
12151 gimple_build_assign (new_var
, x
));
12154 else if (is_variable_sized (var
))
12156 tree pvar
= DECL_VALUE_EXPR (var
);
12157 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12158 pvar
= TREE_OPERAND (pvar
, 0);
12159 gcc_assert (DECL_P (pvar
));
12160 tree new_var
= lookup_decl (pvar
, ctx
);
12161 x
= build_receiver_ref (var
, false, ctx
);
12162 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12163 gimple_seq_add_stmt (&new_body
,
12164 gimple_build_assign (new_var
, x
));
12167 case OMP_CLAUSE_PRIVATE
:
12168 if (is_gimple_omp_oacc (ctx
->stmt
))
12170 var
= OMP_CLAUSE_DECL (c
);
12171 if (omp_is_reference (var
))
12173 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12174 tree new_var
= lookup_decl (var
, ctx
);
12175 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12176 if (TREE_CONSTANT (x
))
12178 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
12180 gimple_add_tmp_var (x
);
12181 TREE_ADDRESSABLE (x
) = 1;
12182 x
= build_fold_addr_expr_loc (clause_loc
, x
);
12187 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12188 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12189 gimple_seq_add_stmt (&new_body
,
12190 gimple_build_assign (new_var
, x
));
12193 case OMP_CLAUSE_USE_DEVICE_PTR
:
12194 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12195 case OMP_CLAUSE_IS_DEVICE_PTR
:
12197 gimple_seq assign_body
;
12198 bool is_array_data
;
12199 bool do_optional_check
;
12200 assign_body
= NULL
;
12201 do_optional_check
= false;
12202 var
= OMP_CLAUSE_DECL (c
);
12203 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
12205 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
12206 x
= build_sender_ref (is_array_data
12207 ? (splay_tree_key
) &DECL_NAME (var
)
12208 : (splay_tree_key
) &DECL_UID (var
), ctx
);
12210 x
= build_receiver_ref (var
, false, ctx
);
12214 bool is_ref
= omp_is_reference (var
);
12215 do_optional_check
= true;
12216 /* First, we copy the descriptor data from the host; then
12217 we update its data to point to the target address. */
12218 new_var
= lookup_decl (var
, ctx
);
12219 new_var
= DECL_VALUE_EXPR (new_var
);
12224 var
= build_fold_indirect_ref (var
);
12225 gimplify_expr (&var
, &assign_body
, NULL
, is_gimple_val
,
12227 v
= create_tmp_var_raw (TREE_TYPE (var
), get_name (var
));
12228 gimple_add_tmp_var (v
);
12229 TREE_ADDRESSABLE (v
) = 1;
12230 gimple_seq_add_stmt (&assign_body
,
12231 gimple_build_assign (v
, var
));
12232 tree rhs
= build_fold_addr_expr (v
);
12233 gimple_seq_add_stmt (&assign_body
,
12234 gimple_build_assign (new_var
, rhs
));
12237 gimple_seq_add_stmt (&assign_body
,
12238 gimple_build_assign (new_var
, var
));
12240 tree v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
12242 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12243 gimple_seq_add_stmt (&assign_body
,
12244 gimple_build_assign (v2
, x
));
12246 else if (is_variable_sized (var
))
12248 tree pvar
= DECL_VALUE_EXPR (var
);
12249 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12250 pvar
= TREE_OPERAND (pvar
, 0);
12251 gcc_assert (DECL_P (pvar
));
12252 new_var
= lookup_decl (pvar
, ctx
);
12253 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12254 gimple_seq_add_stmt (&assign_body
,
12255 gimple_build_assign (new_var
, x
));
12257 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12258 && !omp_is_reference (var
)
12259 && !omp_is_allocatable_or_ptr (var
))
12260 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12262 new_var
= lookup_decl (var
, ctx
);
12263 new_var
= DECL_VALUE_EXPR (new_var
);
12264 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
12265 new_var
= TREE_OPERAND (new_var
, 0);
12266 gcc_assert (DECL_P (new_var
));
12267 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12268 gimple_seq_add_stmt (&assign_body
,
12269 gimple_build_assign (new_var
, x
));
12273 tree type
= TREE_TYPE (var
);
12274 new_var
= lookup_decl (var
, ctx
);
12275 if (omp_is_reference (var
))
12277 type
= TREE_TYPE (type
);
12278 if (TREE_CODE (type
) != ARRAY_TYPE
12279 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
12280 || (omp_is_reference (var
)
12281 && omp_is_allocatable_or_ptr (var
))))
12283 tree v
= create_tmp_var_raw (type
, get_name (var
));
12284 gimple_add_tmp_var (v
);
12285 TREE_ADDRESSABLE (v
) = 1;
12286 x
= fold_convert (type
, x
);
12287 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
12289 gimple_seq_add_stmt (&assign_body
,
12290 gimple_build_assign (v
, x
));
12291 x
= build_fold_addr_expr (v
);
12292 do_optional_check
= true;
12295 new_var
= DECL_VALUE_EXPR (new_var
);
12296 x
= fold_convert (TREE_TYPE (new_var
), x
);
12297 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12298 gimple_seq_add_stmt (&assign_body
,
12299 gimple_build_assign (new_var
, x
));
12302 present
= (do_optional_check
12303 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
12307 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
12308 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
12309 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
12310 glabel
*null_glabel
= gimple_build_label (null_label
);
12311 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
12312 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
12313 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12315 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
12317 gcond
*cond
= gimple_build_cond_from_tree (present
,
12320 gimple_seq_add_stmt (&new_body
, cond
);
12321 gimple_seq_add_stmt (&new_body
, null_glabel
);
12322 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
12323 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
12324 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
12325 gimple_seq_add_seq (&new_body
, assign_body
);
12326 gimple_seq_add_stmt (&new_body
,
12327 gimple_build_label (opt_arg_label
));
12330 gimple_seq_add_seq (&new_body
, assign_body
);
12333 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12334 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12335 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12336 or references to VLAs. */
12337 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12338 switch (OMP_CLAUSE_CODE (c
))
12343 case OMP_CLAUSE_MAP
:
12344 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12345 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12347 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12348 poly_int64 offset
= 0;
12350 var
= OMP_CLAUSE_DECL (c
);
12352 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
12353 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
12355 && varpool_node::get_create (var
)->offloadable
)
12357 if (TREE_CODE (var
) == INDIRECT_REF
12358 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
12359 var
= TREE_OPERAND (var
, 0);
12360 if (TREE_CODE (var
) == COMPONENT_REF
)
12362 var
= get_addr_base_and_unit_offset (var
, &offset
);
12363 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
12365 else if (DECL_SIZE (var
)
12366 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12368 tree var2
= DECL_VALUE_EXPR (var
);
12369 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12370 var2
= TREE_OPERAND (var2
, 0);
12371 gcc_assert (DECL_P (var2
));
12374 tree new_var
= lookup_decl (var
, ctx
), x
;
12375 tree type
= TREE_TYPE (new_var
);
12377 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
12378 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
12381 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
12383 new_var
= build2 (MEM_REF
, type
,
12384 build_fold_addr_expr (new_var
),
12385 build_int_cst (build_pointer_type (type
),
12388 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
12390 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
12391 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
12392 new_var
= build2 (MEM_REF
, type
,
12393 build_fold_addr_expr (new_var
),
12394 build_int_cst (build_pointer_type (type
),
12398 is_ref
= omp_is_reference (var
);
12399 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12401 bool ref_to_array
= false;
12404 type
= TREE_TYPE (type
);
12405 if (TREE_CODE (type
) == ARRAY_TYPE
)
12407 type
= build_pointer_type (type
);
12408 ref_to_array
= true;
12411 else if (TREE_CODE (type
) == ARRAY_TYPE
)
12413 tree decl2
= DECL_VALUE_EXPR (new_var
);
12414 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
12415 decl2
= TREE_OPERAND (decl2
, 0);
12416 gcc_assert (DECL_P (decl2
));
12418 type
= TREE_TYPE (new_var
);
12420 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
12421 x
= fold_convert_loc (clause_loc
, type
, x
);
12422 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
12424 tree bias
= OMP_CLAUSE_SIZE (c
);
12426 bias
= lookup_decl (bias
, ctx
);
12427 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
12428 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
12430 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
12431 TREE_TYPE (x
), x
, bias
);
12434 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12435 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12436 if (is_ref
&& !ref_to_array
)
12438 tree t
= create_tmp_var_raw (type
, get_name (var
));
12439 gimple_add_tmp_var (t
);
12440 TREE_ADDRESSABLE (t
) = 1;
12441 gimple_seq_add_stmt (&new_body
,
12442 gimple_build_assign (t
, x
));
12443 x
= build_fold_addr_expr_loc (clause_loc
, t
);
12445 gimple_seq_add_stmt (&new_body
,
12446 gimple_build_assign (new_var
, x
));
12449 else if (OMP_CLAUSE_CHAIN (c
)
12450 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
12452 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12453 == GOMP_MAP_FIRSTPRIVATE_POINTER
12454 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12455 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
12458 case OMP_CLAUSE_PRIVATE
:
12459 var
= OMP_CLAUSE_DECL (c
);
12460 if (is_variable_sized (var
))
12462 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12463 tree new_var
= lookup_decl (var
, ctx
);
12464 tree pvar
= DECL_VALUE_EXPR (var
);
12465 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12466 pvar
= TREE_OPERAND (pvar
, 0);
12467 gcc_assert (DECL_P (pvar
));
12468 tree new_pvar
= lookup_decl (pvar
, ctx
);
12469 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12470 tree al
= size_int (DECL_ALIGN (var
));
12471 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
12472 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12473 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
12474 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12475 gimple_seq_add_stmt (&new_body
,
12476 gimple_build_assign (new_pvar
, x
));
12478 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
12480 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12481 tree new_var
= lookup_decl (var
, ctx
);
12482 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12483 if (TREE_CONSTANT (x
))
12488 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12489 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
12490 tree al
= size_int (TYPE_ALIGN (rtype
));
12491 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12494 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12495 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12496 gimple_seq_add_stmt (&new_body
,
12497 gimple_build_assign (new_var
, x
));
12502 gimple_seq fork_seq
= NULL
;
12503 gimple_seq join_seq
= NULL
;
12505 if (is_oacc_parallel_or_serial (ctx
))
12507 /* If there are reductions on the offloaded region itself, treat
12508 them as a dummy GANG loop. */
12509 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
12511 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
12512 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
12515 gimple_seq_add_seq (&new_body
, fork_seq
);
12516 gimple_seq_add_seq (&new_body
, tgt_body
);
12517 gimple_seq_add_seq (&new_body
, join_seq
);
12520 new_body
= maybe_catch_exception (new_body
);
12522 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12523 gimple_omp_set_body (stmt
, new_body
);
12526 bind
= gimple_build_bind (NULL
, NULL
,
12527 tgt_bind
? gimple_bind_block (tgt_bind
)
12529 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12530 gimple_bind_add_seq (bind
, ilist
);
12531 gimple_bind_add_stmt (bind
, stmt
);
12532 gimple_bind_add_seq (bind
, olist
);
12534 pop_gimplify_context (NULL
);
12538 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12539 gimple_bind_add_stmt (dep_bind
, bind
);
12540 gimple_bind_add_seq (dep_bind
, dep_olist
);
12541 pop_gimplify_context (dep_bind
);
12545 /* Expand code for an OpenMP teams directive. */
12548 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12550 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
12551 push_gimplify_context ();
12553 tree block
= make_node (BLOCK
);
12554 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
12555 gsi_replace (gsi_p
, bind
, true);
12556 gimple_seq bind_body
= NULL
;
12557 gimple_seq dlist
= NULL
;
12558 gimple_seq olist
= NULL
;
12560 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12561 OMP_CLAUSE_NUM_TEAMS
);
12562 if (num_teams
== NULL_TREE
)
12563 num_teams
= build_int_cst (unsigned_type_node
, 0);
12566 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
12567 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
12568 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
12570 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12571 OMP_CLAUSE_THREAD_LIMIT
);
12572 if (thread_limit
== NULL_TREE
)
12573 thread_limit
= build_int_cst (unsigned_type_node
, 0);
12576 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
12577 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
12578 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
12582 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
12583 &bind_body
, &dlist
, ctx
, NULL
);
12584 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
12585 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
12587 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
12589 location_t loc
= gimple_location (teams_stmt
);
12590 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
12591 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
12592 gimple_set_location (call
, loc
);
12593 gimple_seq_add_stmt (&bind_body
, call
);
12595 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
12596 gimple_omp_set_body (teams_stmt
, NULL
);
12597 gimple_seq_add_seq (&bind_body
, olist
);
12598 gimple_seq_add_seq (&bind_body
, dlist
);
12599 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
12600 gimple_bind_set_body (bind
, bind_body
);
12602 pop_gimplify_context (bind
);
12604 gimple_bind_append_vars (bind
, ctx
->block_vars
);
12605 BLOCK_VARS (block
) = ctx
->block_vars
;
12606 if (BLOCK_VARS (block
))
12607 TREE_USED (block
) = 1;
12610 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12611 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12612 of OMP context, but with task_shared_vars set. */
12615 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
12620 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12621 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
12624 if (task_shared_vars
12626 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
12629 /* If a global variable has been privatized, TREE_CONSTANT on
12630 ADDR_EXPR might be wrong. */
12631 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
12632 recompute_tree_invariant_for_addr_expr (t
);
12634 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
12638 /* Data to be communicated between lower_omp_regimplify_operands and
12639 lower_omp_regimplify_operands_p. */
12641 struct lower_omp_regimplify_operands_data
12647 /* Helper function for lower_omp_regimplify_operands. Find
12648 omp_member_access_dummy_var vars and adjust temporarily their
12649 DECL_VALUE_EXPRs if needed. */
12652 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
12655 tree t
= omp_member_access_dummy_var (*tp
);
12658 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
12659 lower_omp_regimplify_operands_data
*ldata
12660 = (lower_omp_regimplify_operands_data
*) wi
->info
;
12661 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
12664 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
12665 ldata
->decls
->safe_push (*tp
);
12666 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
12667 SET_DECL_VALUE_EXPR (*tp
, v
);
12670 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
12674 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12675 of omp_member_access_dummy_var vars during regimplification. */
12678 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
12679 gimple_stmt_iterator
*gsi_p
)
12681 auto_vec
<tree
, 10> decls
;
12684 struct walk_stmt_info wi
;
12685 memset (&wi
, '\0', sizeof (wi
));
12686 struct lower_omp_regimplify_operands_data data
;
12688 data
.decls
= &decls
;
12690 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
12692 gimple_regimplify_operands (stmt
, gsi_p
);
12693 while (!decls
.is_empty ())
12695 tree t
= decls
.pop ();
12696 tree v
= decls
.pop ();
12697 SET_DECL_VALUE_EXPR (t
, v
);
12702 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12704 gimple
*stmt
= gsi_stmt (*gsi_p
);
12705 struct walk_stmt_info wi
;
12708 if (gimple_has_location (stmt
))
12709 input_location
= gimple_location (stmt
);
12711 if (task_shared_vars
)
12712 memset (&wi
, '\0', sizeof (wi
));
12714 /* If we have issued syntax errors, avoid doing any heavy lifting.
12715 Just replace the OMP directives with a NOP to avoid
12716 confusing RTL expansion. */
12717 if (seen_error () && is_gimple_omp (stmt
))
12719 gsi_replace (gsi_p
, gimple_build_nop (), true);
12723 switch (gimple_code (stmt
))
12727 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
12728 if ((ctx
|| task_shared_vars
)
12729 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
12730 lower_omp_regimplify_p
,
12731 ctx
? NULL
: &wi
, NULL
)
12732 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
12733 lower_omp_regimplify_p
,
12734 ctx
? NULL
: &wi
, NULL
)))
12735 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
12739 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
12741 case GIMPLE_EH_FILTER
:
12742 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
12745 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
12746 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
12748 case GIMPLE_TRANSACTION
:
12749 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
12753 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
12754 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
12756 case GIMPLE_OMP_PARALLEL
:
12757 case GIMPLE_OMP_TASK
:
12758 ctx
= maybe_lookup_ctx (stmt
);
12760 if (ctx
->cancellable
)
12761 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12762 lower_omp_taskreg (gsi_p
, ctx
);
12764 case GIMPLE_OMP_FOR
:
12765 ctx
= maybe_lookup_ctx (stmt
);
12767 if (ctx
->cancellable
)
12768 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12769 lower_omp_for (gsi_p
, ctx
);
12771 case GIMPLE_OMP_SECTIONS
:
12772 ctx
= maybe_lookup_ctx (stmt
);
12774 if (ctx
->cancellable
)
12775 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12776 lower_omp_sections (gsi_p
, ctx
);
12778 case GIMPLE_OMP_SINGLE
:
12779 ctx
= maybe_lookup_ctx (stmt
);
12781 lower_omp_single (gsi_p
, ctx
);
12783 case GIMPLE_OMP_MASTER
:
12784 ctx
= maybe_lookup_ctx (stmt
);
12786 lower_omp_master (gsi_p
, ctx
);
12788 case GIMPLE_OMP_TASKGROUP
:
12789 ctx
= maybe_lookup_ctx (stmt
);
12791 lower_omp_taskgroup (gsi_p
, ctx
);
12793 case GIMPLE_OMP_ORDERED
:
12794 ctx
= maybe_lookup_ctx (stmt
);
12796 lower_omp_ordered (gsi_p
, ctx
);
12798 case GIMPLE_OMP_SCAN
:
12799 ctx
= maybe_lookup_ctx (stmt
);
12801 lower_omp_scan (gsi_p
, ctx
);
12803 case GIMPLE_OMP_CRITICAL
:
12804 ctx
= maybe_lookup_ctx (stmt
);
12806 lower_omp_critical (gsi_p
, ctx
);
12808 case GIMPLE_OMP_ATOMIC_LOAD
:
12809 if ((ctx
|| task_shared_vars
)
12810 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12811 as_a
<gomp_atomic_load
*> (stmt
)),
12812 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
12813 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12815 case GIMPLE_OMP_TARGET
:
12816 ctx
= maybe_lookup_ctx (stmt
);
12818 lower_omp_target (gsi_p
, ctx
);
12820 case GIMPLE_OMP_TEAMS
:
12821 ctx
= maybe_lookup_ctx (stmt
);
12823 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
12824 lower_omp_taskreg (gsi_p
, ctx
);
12826 lower_omp_teams (gsi_p
, ctx
);
12830 call_stmt
= as_a
<gcall
*> (stmt
);
12831 fndecl
= gimple_call_fndecl (call_stmt
);
12833 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
12834 switch (DECL_FUNCTION_CODE (fndecl
))
12836 case BUILT_IN_GOMP_BARRIER
:
12840 case BUILT_IN_GOMP_CANCEL
:
12841 case BUILT_IN_GOMP_CANCELLATION_POINT
:
12844 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
12845 cctx
= cctx
->outer
;
12846 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
12847 if (!cctx
->cancellable
)
12849 if (DECL_FUNCTION_CODE (fndecl
)
12850 == BUILT_IN_GOMP_CANCELLATION_POINT
)
12852 stmt
= gimple_build_nop ();
12853 gsi_replace (gsi_p
, stmt
, false);
12857 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
12859 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
12860 gimple_call_set_fndecl (call_stmt
, fndecl
);
12861 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
12864 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
12865 gimple_call_set_lhs (call_stmt
, lhs
);
12866 tree fallthru_label
;
12867 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
12869 g
= gimple_build_label (fallthru_label
);
12870 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12871 g
= gimple_build_cond (NE_EXPR
, lhs
,
12872 fold_convert (TREE_TYPE (lhs
),
12873 boolean_false_node
),
12874 cctx
->cancel_label
, fallthru_label
);
12875 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12882 case GIMPLE_ASSIGN
:
12883 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
12885 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
12886 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
12887 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
12888 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
12889 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
12890 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
12891 && (gimple_omp_target_kind (up
->stmt
)
12892 == GF_OMP_TARGET_KIND_DATA
)))
12894 else if (!up
->lastprivate_conditional_map
)
12896 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
12897 if (TREE_CODE (lhs
) == MEM_REF
12898 && DECL_P (TREE_OPERAND (lhs
, 0))
12899 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
12900 0))) == REFERENCE_TYPE
)
12901 lhs
= TREE_OPERAND (lhs
, 0);
12903 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
12906 if (up
->combined_into_simd_safelen1
)
12909 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
12912 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
12913 clauses
= gimple_omp_for_clauses (up
->stmt
);
12915 clauses
= gimple_omp_sections_clauses (up
->stmt
);
12916 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
12917 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
12918 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
12919 OMP_CLAUSE__CONDTEMP_
);
12920 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
12921 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
12922 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12929 if ((ctx
|| task_shared_vars
)
12930 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
12933 /* Just remove clobbers, this should happen only if we have
12934 "privatized" local addressable variables in SIMD regions,
12935 the clobber isn't needed in that case and gimplifying address
12936 of the ARRAY_REF into a pointer and creating MEM_REF based
12937 clobber would create worse code than we get with the clobber
12939 if (gimple_clobber_p (stmt
))
12941 gsi_replace (gsi_p
, gimple_build_nop (), true);
12944 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12951 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
12953 location_t saved_location
= input_location
;
12954 gimple_stmt_iterator gsi
;
12955 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
12956 lower_omp_1 (&gsi
, ctx
);
12957 /* During gimplification, we haven't folded statments inside offloading
12958 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12959 if (target_nesting_level
|| taskreg_nesting_level
)
12960 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
12962 input_location
= saved_location
;
12965 /* Main entry point. */
12967 static unsigned int
12968 execute_lower_omp (void)
12974 /* This pass always runs, to provide PROP_gimple_lomp.
12975 But often, there is nothing to do. */
12976 if (flag_openacc
== 0 && flag_openmp
== 0
12977 && flag_openmp_simd
== 0)
12980 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
12981 delete_omp_context
);
12983 body
= gimple_body (current_function_decl
);
12985 scan_omp (&body
, NULL
);
12986 gcc_assert (taskreg_nesting_level
== 0);
12987 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
12988 finish_taskreg_scan (ctx
);
12989 taskreg_contexts
.release ();
12991 if (all_contexts
->root
)
12993 if (task_shared_vars
)
12994 push_gimplify_context ();
12995 lower_omp (&body
, NULL
);
12996 if (task_shared_vars
)
12997 pop_gimplify_context (NULL
);
13002 splay_tree_delete (all_contexts
);
13003 all_contexts
= NULL
;
13005 BITMAP_FREE (task_shared_vars
);
13006 BITMAP_FREE (global_nonaddressable_vars
);
13008 /* If current function is a method, remove artificial dummy VAR_DECL created
13009 for non-static data member privatization, they aren't needed for
13010 debuginfo nor anything else, have been already replaced everywhere in the
13011 IL and cause problems with LTO. */
13012 if (DECL_ARGUMENTS (current_function_decl
)
13013 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
13014 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
13016 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
13022 const pass_data pass_data_lower_omp
=
13024 GIMPLE_PASS
, /* type */
13025 "omplower", /* name */
13026 OPTGROUP_OMP
, /* optinfo_flags */
13027 TV_NONE
, /* tv_id */
13028 PROP_gimple_any
, /* properties_required */
13029 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
13030 0, /* properties_destroyed */
13031 0, /* todo_flags_start */
13032 0, /* todo_flags_finish */
13035 class pass_lower_omp
: public gimple_opt_pass
13038 pass_lower_omp (gcc::context
*ctxt
)
13039 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
13042 /* opt_pass methods: */
13043 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
13045 }; // class pass_lower_omp
13047 } // anon namespace
13050 make_pass_lower_omp (gcc::context
*ctxt
)
13052 return new pass_lower_omp (ctxt
);
13055 /* The following is a utility to diagnose structured block violations.
13056 It is not part of the "omplower" pass, as that's invoked too late. It
13057 should be invoked by the respective front ends after gimplification. */
13059 static splay_tree all_labels
;
13061 /* Check for mismatched contexts and generate an error if needed. Return
13062 true if an error is detected. */
13065 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
13066 gimple
*branch_ctx
, gimple
*label_ctx
)
13068 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
13069 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
13071 if (label_ctx
== branch_ctx
)
13074 const char* kind
= NULL
;
13078 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
13079 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
13081 gcc_checking_assert (kind
== NULL
);
13087 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
13091 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13092 so we could traverse it and issue a correct "exit" or "enter" error
13093 message upon a structured block violation.
13095 We built the context by building a list with tree_cons'ing, but there is
13096 no easy counterpart in gimple tuples. It seems like far too much work
13097 for issuing exit/enter error messages. If someone really misses the
13098 distinct error message... patches welcome. */
13101 /* Try to avoid confusing the user by producing and error message
13102 with correct "exit" or "enter" verbiage. We prefer "exit"
13103 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13104 if (branch_ctx
== NULL
)
13110 if (TREE_VALUE (label_ctx
) == branch_ctx
)
13115 label_ctx
= TREE_CHAIN (label_ctx
);
13120 error ("invalid exit from %s structured block", kind
);
13122 error ("invalid entry to %s structured block", kind
);
13125 /* If it's obvious we have an invalid entry, be specific about the error. */
13126 if (branch_ctx
== NULL
)
13127 error ("invalid entry to %s structured block", kind
);
13130 /* Otherwise, be vague and lazy, but efficient. */
13131 error ("invalid branch to/from %s structured block", kind
);
13134 gsi_replace (gsi_p
, gimple_build_nop (), false);
13138 /* Pass 1: Create a minimal tree of structured blocks, and record
13139 where each label is found. */
13142 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
13143 struct walk_stmt_info
*wi
)
13145 gimple
*context
= (gimple
*) wi
->info
;
13146 gimple
*inner_context
;
13147 gimple
*stmt
= gsi_stmt (*gsi_p
);
13149 *handled_ops_p
= true;
13151 switch (gimple_code (stmt
))
13155 case GIMPLE_OMP_PARALLEL
:
13156 case GIMPLE_OMP_TASK
:
13157 case GIMPLE_OMP_SECTIONS
:
13158 case GIMPLE_OMP_SINGLE
:
13159 case GIMPLE_OMP_SECTION
:
13160 case GIMPLE_OMP_MASTER
:
13161 case GIMPLE_OMP_ORDERED
:
13162 case GIMPLE_OMP_SCAN
:
13163 case GIMPLE_OMP_CRITICAL
:
13164 case GIMPLE_OMP_TARGET
:
13165 case GIMPLE_OMP_TEAMS
:
13166 case GIMPLE_OMP_TASKGROUP
:
13167 /* The minimal context here is just the current OMP construct. */
13168 inner_context
= stmt
;
13169 wi
->info
= inner_context
;
13170 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
13171 wi
->info
= context
;
13174 case GIMPLE_OMP_FOR
:
13175 inner_context
= stmt
;
13176 wi
->info
= inner_context
;
13177 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13179 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
13180 diagnose_sb_1
, NULL
, wi
);
13181 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
13182 wi
->info
= context
;
13186 splay_tree_insert (all_labels
,
13187 (splay_tree_key
) gimple_label_label (
13188 as_a
<glabel
*> (stmt
)),
13189 (splay_tree_value
) context
);
13199 /* Pass 2: Check each branch and see if its context differs from that of
13200 the destination label's context. */
13203 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
13204 struct walk_stmt_info
*wi
)
13206 gimple
*context
= (gimple
*) wi
->info
;
13208 gimple
*stmt
= gsi_stmt (*gsi_p
);
13210 *handled_ops_p
= true;
13212 switch (gimple_code (stmt
))
13216 case GIMPLE_OMP_PARALLEL
:
13217 case GIMPLE_OMP_TASK
:
13218 case GIMPLE_OMP_SECTIONS
:
13219 case GIMPLE_OMP_SINGLE
:
13220 case GIMPLE_OMP_SECTION
:
13221 case GIMPLE_OMP_MASTER
:
13222 case GIMPLE_OMP_ORDERED
:
13223 case GIMPLE_OMP_SCAN
:
13224 case GIMPLE_OMP_CRITICAL
:
13225 case GIMPLE_OMP_TARGET
:
13226 case GIMPLE_OMP_TEAMS
:
13227 case GIMPLE_OMP_TASKGROUP
:
13229 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
13230 wi
->info
= context
;
13233 case GIMPLE_OMP_FOR
:
13235 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13237 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
13238 diagnose_sb_2
, NULL
, wi
);
13239 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
13240 wi
->info
= context
;
13245 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
13246 tree lab
= gimple_cond_true_label (cond_stmt
);
13249 n
= splay_tree_lookup (all_labels
,
13250 (splay_tree_key
) lab
);
13251 diagnose_sb_0 (gsi_p
, context
,
13252 n
? (gimple
*) n
->value
: NULL
);
13254 lab
= gimple_cond_false_label (cond_stmt
);
13257 n
= splay_tree_lookup (all_labels
,
13258 (splay_tree_key
) lab
);
13259 diagnose_sb_0 (gsi_p
, context
,
13260 n
? (gimple
*) n
->value
: NULL
);
13267 tree lab
= gimple_goto_dest (stmt
);
13268 if (TREE_CODE (lab
) != LABEL_DECL
)
13271 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13272 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
13276 case GIMPLE_SWITCH
:
13278 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
13280 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
13282 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
13283 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13284 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
13290 case GIMPLE_RETURN
:
13291 diagnose_sb_0 (gsi_p
, context
, NULL
);
13301 static unsigned int
13302 diagnose_omp_structured_block_errors (void)
13304 struct walk_stmt_info wi
;
13305 gimple_seq body
= gimple_body (current_function_decl
);
13307 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
13309 memset (&wi
, 0, sizeof (wi
));
13310 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
13312 memset (&wi
, 0, sizeof (wi
));
13313 wi
.want_locations
= true;
13314 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
13316 gimple_set_body (current_function_decl
, body
);
13318 splay_tree_delete (all_labels
);
13326 const pass_data pass_data_diagnose_omp_blocks
=
13328 GIMPLE_PASS
, /* type */
13329 "*diagnose_omp_blocks", /* name */
13330 OPTGROUP_OMP
, /* optinfo_flags */
13331 TV_NONE
, /* tv_id */
13332 PROP_gimple_any
, /* properties_required */
13333 0, /* properties_provided */
13334 0, /* properties_destroyed */
13335 0, /* todo_flags_start */
13336 0, /* todo_flags_finish */
13339 class pass_diagnose_omp_blocks
: public gimple_opt_pass
13342 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13343 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
13346 /* opt_pass methods: */
13347 virtual bool gate (function
*)
13349 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
13351 virtual unsigned int execute (function
*)
13353 return diagnose_omp_structured_block_errors ();
13356 }; // class pass_diagnose_omp_blocks
13358 } // anon namespace
13361 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13363 return new pass_diagnose_omp_blocks (ctxt
);
13367 #include "gt-omp-low.h"