1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context
*outer
;
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map
;
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec
<tree
> task_reductions
;
121 /* A hash map from the reduction clauses to the registered array
123 hash_map
<tree
, unsigned> *task_reduction_map
;
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
129 /* A tree_list of the reduction clauses in this context. This is
130 only used for checking the consistency of OpenACC reduction
131 clauses in scan_omp_for and is not guaranteed to contain a valid
132 value outside of this function. */
133 tree local_reduction_clauses
;
135 /* A tree_list of the reduction clauses in outer contexts. This is
136 only used for checking the consistency of OpenACC reduction
137 clauses in scan_omp_for and is not guaranteed to contain a valid
138 value outside of this function. */
139 tree outer_reduction_clauses
;
141 /* Nesting depth of this context. Used to beautify error messages re
142 invalid gotos. The outermost ctx is depth 1, with depth 0 being
143 reserved for the main body of the function. */
146 /* True if this parallel directive is nested within another. */
149 /* True if this construct can be cancelled. */
152 /* True if lower_omp_1 should look up lastprivate conditional in parent
154 bool combined_into_simd_safelen1
;
156 /* True if there is nested scan context with inclusive clause. */
159 /* True if there is nested scan context with exclusive clause. */
162 /* True in the second simd loop of for simd with inscan reductions. */
163 bool for_simd_scan_phase
;
165 /* True if there is order(concurrent) clause on the construct. */
166 bool order_concurrent
;
168 /* True if there is bind clause on the construct (i.e. a loop construct). */
172 static splay_tree all_contexts
;
173 static int taskreg_nesting_level
;
174 static int target_nesting_level
;
175 static bitmap task_shared_vars
;
176 static bitmap global_nonaddressable_vars
;
177 static vec
<omp_context
*> taskreg_contexts
;
179 static void scan_omp (gimple_seq
*, omp_context
*);
180 static tree
scan_omp_1_op (tree
*, int *, void *);
182 #define WALK_SUBSTMTS \
186 case GIMPLE_EH_FILTER: \
187 case GIMPLE_TRANSACTION: \
188 /* The sub-statements for these should be walked. */ \
189 *handled_ops_p = false; \
192 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
196 is_oacc_parallel_or_serial (omp_context
*ctx
)
198 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
199 return ((outer_type
== GIMPLE_OMP_TARGET
)
200 && ((gimple_omp_target_kind (ctx
->stmt
)
201 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
202 || (gimple_omp_target_kind (ctx
->stmt
)
203 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
206 /* Return true if CTX corresponds to an oacc kernels region. */
209 is_oacc_kernels (omp_context
*ctx
)
211 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
212 return ((outer_type
== GIMPLE_OMP_TARGET
)
213 && (gimple_omp_target_kind (ctx
->stmt
)
214 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
217 /* Return true if STMT corresponds to an OpenMP target region. */
219 is_omp_target (gimple
*stmt
)
221 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
)
223 int kind
= gimple_omp_target_kind (stmt
);
224 return (kind
== GF_OMP_TARGET_KIND_REGION
225 || kind
== GF_OMP_TARGET_KIND_DATA
226 || kind
== GF_OMP_TARGET_KIND_ENTER_DATA
227 || kind
== GF_OMP_TARGET_KIND_EXIT_DATA
);
232 /* If DECL is the artificial dummy VAR_DECL created for non-static
233 data member privatization, return the underlying "this" parameter,
234 otherwise return NULL. */
237 omp_member_access_dummy_var (tree decl
)
240 || !DECL_ARTIFICIAL (decl
)
241 || !DECL_IGNORED_P (decl
)
242 || !DECL_HAS_VALUE_EXPR_P (decl
)
243 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
246 tree v
= DECL_VALUE_EXPR (decl
);
247 if (TREE_CODE (v
) != COMPONENT_REF
)
251 switch (TREE_CODE (v
))
257 case POINTER_PLUS_EXPR
:
258 v
= TREE_OPERAND (v
, 0);
261 if (DECL_CONTEXT (v
) == current_function_decl
262 && DECL_ARTIFICIAL (v
)
263 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
271 /* Helper for unshare_and_remap, called through walk_tree. */
274 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
276 tree
*pair
= (tree
*) data
;
279 *tp
= unshare_expr (pair
[1]);
282 else if (IS_TYPE_OR_DECL_P (*tp
))
287 /* Return unshare_expr (X) with all occurrences of FROM
291 unshare_and_remap (tree x
, tree from
, tree to
)
293 tree pair
[2] = { from
, to
};
294 x
= unshare_expr (x
);
295 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
299 /* Convenience function for calling scan_omp_1_op on tree operands. */
302 scan_omp_op (tree
*tp
, omp_context
*ctx
)
304 struct walk_stmt_info wi
;
306 memset (&wi
, 0, sizeof (wi
));
308 wi
.want_locations
= true;
310 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
313 static void lower_omp (gimple_seq
*, omp_context
*);
314 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
315 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
317 /* Return true if CTX is for an omp parallel. */
320 is_parallel_ctx (omp_context
*ctx
)
322 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
326 /* Return true if CTX is for an omp task. */
329 is_task_ctx (omp_context
*ctx
)
331 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
335 /* Return true if CTX is for an omp taskloop. */
338 is_taskloop_ctx (omp_context
*ctx
)
340 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
341 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
345 /* Return true if CTX is for a host omp teams. */
348 is_host_teams_ctx (omp_context
*ctx
)
350 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
351 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
354 /* Return true if CTX is for an omp parallel or omp task or host omp teams
355 (the last one is strictly not a task region in OpenMP speak, but we
356 need to treat it similarly). */
359 is_taskreg_ctx (omp_context
*ctx
)
361 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
364 /* Return true if EXPR is variable sized. */
367 is_variable_sized (const_tree expr
)
369 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
372 /* Lookup variables. The "maybe" form
373 allows for the variable form to not have been entered, otherwise we
374 assert that the variable must have been entered. */
377 lookup_decl (tree var
, omp_context
*ctx
)
379 tree
*n
= ctx
->cb
.decl_map
->get (var
);
384 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
386 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
387 return n
? *n
: NULL_TREE
;
391 lookup_field (tree var
, omp_context
*ctx
)
394 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
395 return (tree
) n
->value
;
399 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
402 n
= splay_tree_lookup (ctx
->sfield_map
403 ? ctx
->sfield_map
: ctx
->field_map
, key
);
404 return (tree
) n
->value
;
408 lookup_sfield (tree var
, omp_context
*ctx
)
410 return lookup_sfield ((splay_tree_key
) var
, ctx
);
414 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
417 n
= splay_tree_lookup (ctx
->field_map
, key
);
418 return n
? (tree
) n
->value
: NULL_TREE
;
422 maybe_lookup_field (tree var
, omp_context
*ctx
)
424 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
427 /* Return true if DECL should be copied by pointer. SHARED_CTX is
428 the parallel context if DECL is to be shared. */
431 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
433 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
434 || TYPE_ATOMIC (TREE_TYPE (decl
)))
437 /* We can only use copy-in/copy-out semantics for shared variables
438 when we know the value is not accessible from an outer scope. */
441 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
443 /* ??? Trivially accessible from anywhere. But why would we even
444 be passing an address in this case? Should we simply assert
445 this to be false, or should we have a cleanup pass that removes
446 these from the list of mappings? */
447 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
450 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
451 without analyzing the expression whether or not its location
452 is accessible to anyone else. In the case of nested parallel
453 regions it certainly may be. */
454 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
457 /* Do not use copy-in/copy-out for variables that have their
459 if (is_global_var (decl
))
461 /* For file scope vars, track whether we've seen them as
462 non-addressable initially and in that case, keep the same
463 answer for the duration of the pass, even when they are made
464 addressable later on e.g. through reduction expansion. Global
465 variables which weren't addressable before the pass will not
466 have their privatized copies address taken. See PR91216. */
467 if (!TREE_ADDRESSABLE (decl
))
469 if (!global_nonaddressable_vars
)
470 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
471 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
473 else if (!global_nonaddressable_vars
474 || !bitmap_bit_p (global_nonaddressable_vars
,
478 else if (TREE_ADDRESSABLE (decl
))
481 /* lower_send_shared_vars only uses copy-in, but not copy-out
483 if (TREE_READONLY (decl
)
484 || ((TREE_CODE (decl
) == RESULT_DECL
485 || TREE_CODE (decl
) == PARM_DECL
)
486 && DECL_BY_REFERENCE (decl
)))
489 /* Disallow copy-in/out in nested parallel if
490 decl is shared in outer parallel, otherwise
491 each thread could store the shared variable
492 in its own copy-in location, making the
493 variable no longer really shared. */
494 if (shared_ctx
->is_nested
)
498 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
499 if ((is_taskreg_ctx (up
)
500 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
501 && is_gimple_omp_offloaded (up
->stmt
)))
502 && maybe_lookup_decl (decl
, up
))
509 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
511 for (c
= gimple_omp_target_clauses (up
->stmt
);
512 c
; c
= OMP_CLAUSE_CHAIN (c
))
513 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
514 && OMP_CLAUSE_DECL (c
) == decl
)
518 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
519 c
; c
= OMP_CLAUSE_CHAIN (c
))
520 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
521 && OMP_CLAUSE_DECL (c
) == decl
)
525 goto maybe_mark_addressable_and_ret
;
529 /* For tasks avoid using copy-in/out. As tasks can be
530 deferred or executed in different thread, when GOMP_task
531 returns, the task hasn't necessarily terminated. */
532 if (is_task_ctx (shared_ctx
))
535 maybe_mark_addressable_and_ret
:
536 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
537 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
539 /* Taking address of OUTER in lower_send_shared_vars
540 might need regimplification of everything that uses the
542 if (!task_shared_vars
)
543 task_shared_vars
= BITMAP_ALLOC (NULL
);
544 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
545 TREE_ADDRESSABLE (outer
) = 1;
554 /* Construct a new automatic decl similar to VAR. */
557 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
559 tree copy
= copy_var_decl (var
, name
, type
);
561 DECL_CONTEXT (copy
) = current_function_decl
;
562 DECL_CHAIN (copy
) = ctx
->block_vars
;
563 /* If VAR is listed in task_shared_vars, it means it wasn't
564 originally addressable and is just because task needs to take
565 it's address. But we don't need to take address of privatizations
567 if (TREE_ADDRESSABLE (var
)
568 && ((task_shared_vars
569 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
570 || (global_nonaddressable_vars
571 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
572 TREE_ADDRESSABLE (copy
) = 0;
573 ctx
->block_vars
= copy
;
579 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
581 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
584 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
587 omp_build_component_ref (tree obj
, tree field
)
589 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
590 if (TREE_THIS_VOLATILE (field
))
591 TREE_THIS_VOLATILE (ret
) |= 1;
592 if (TREE_READONLY (field
))
593 TREE_READONLY (ret
) |= 1;
597 /* Build tree nodes to access the field for VAR on the receiver side. */
600 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
602 tree x
, field
= lookup_field (var
, ctx
);
604 /* If the receiver record type was remapped in the child function,
605 remap the field into the new record type. */
606 x
= maybe_lookup_field (field
, ctx
);
610 x
= build_simple_mem_ref (ctx
->receiver_decl
);
611 TREE_THIS_NOTRAP (x
) = 1;
612 x
= omp_build_component_ref (x
, field
);
615 x
= build_simple_mem_ref (x
);
616 TREE_THIS_NOTRAP (x
) = 1;
622 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
623 of a parallel, this is a component reference; for workshare constructs
624 this is some variable. */
627 build_outer_var_ref (tree var
, omp_context
*ctx
,
628 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
631 omp_context
*outer
= ctx
->outer
;
632 while (outer
&& gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
633 outer
= outer
->outer
;
635 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
637 else if (is_variable_sized (var
))
639 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
640 x
= build_outer_var_ref (x
, ctx
, code
);
641 x
= build_simple_mem_ref (x
);
643 else if (is_taskreg_ctx (ctx
))
645 bool by_ref
= use_pointer_for_field (var
, NULL
);
646 x
= build_receiver_ref (var
, by_ref
, ctx
);
648 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
649 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
651 || (code
== OMP_CLAUSE_PRIVATE
652 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
653 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
654 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
656 /* #pragma omp simd isn't a worksharing construct, and can reference
657 even private vars in its linear etc. clauses.
658 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
659 to private vars in all worksharing constructs. */
661 if (outer
&& is_taskreg_ctx (outer
))
662 x
= lookup_decl (var
, outer
);
664 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
668 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
672 = splay_tree_lookup (outer
->field_map
,
673 (splay_tree_key
) &DECL_UID (var
));
676 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
679 x
= lookup_decl (var
, outer
);
683 tree field
= (tree
) n
->value
;
684 /* If the receiver record type was remapped in the child function,
685 remap the field into the new record type. */
686 x
= maybe_lookup_field (field
, outer
);
690 x
= build_simple_mem_ref (outer
->receiver_decl
);
691 x
= omp_build_component_ref (x
, field
);
692 if (use_pointer_for_field (var
, outer
))
693 x
= build_simple_mem_ref (x
);
697 x
= lookup_decl (var
, outer
);
698 else if (omp_is_reference (var
))
699 /* This can happen with orphaned constructs. If var is reference, it is
700 possible it is shared and as such valid. */
702 else if (omp_member_access_dummy_var (var
))
709 tree t
= omp_member_access_dummy_var (var
);
712 x
= DECL_VALUE_EXPR (var
);
713 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
715 x
= unshare_and_remap (x
, t
, o
);
717 x
= unshare_expr (x
);
721 if (omp_is_reference (var
))
722 x
= build_simple_mem_ref (x
);
727 /* Build tree nodes to access the field for VAR on the sender side. */
730 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
732 tree field
= lookup_sfield (key
, ctx
);
733 return omp_build_component_ref (ctx
->sender_decl
, field
);
737 build_sender_ref (tree var
, omp_context
*ctx
)
739 return build_sender_ref ((splay_tree_key
) var
, ctx
);
742 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
743 BASE_POINTERS_RESTRICT, declare the field with restrict. */
746 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
748 tree field
, type
, sfield
= NULL_TREE
;
749 splay_tree_key key
= (splay_tree_key
) var
;
751 if ((mask
& 16) != 0)
753 key
= (splay_tree_key
) &DECL_NAME (var
);
754 gcc_checking_assert (key
!= (splay_tree_key
) var
);
758 key
= (splay_tree_key
) &DECL_UID (var
);
759 gcc_checking_assert (key
!= (splay_tree_key
) var
);
761 gcc_assert ((mask
& 1) == 0
762 || !splay_tree_lookup (ctx
->field_map
, key
));
763 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
764 || !splay_tree_lookup (ctx
->sfield_map
, key
));
765 gcc_assert ((mask
& 3) == 3
766 || !is_gimple_omp_oacc (ctx
->stmt
));
768 type
= TREE_TYPE (var
);
769 if ((mask
& 16) != 0)
770 type
= lang_hooks
.decls
.omp_array_data (var
, true);
772 /* Prevent redeclaring the var in the split-off function with a restrict
773 pointer type. Note that we only clear type itself, restrict qualifiers in
774 the pointed-to type will be ignored by points-to analysis. */
775 if (POINTER_TYPE_P (type
)
776 && TYPE_RESTRICT (type
))
777 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
781 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
782 type
= build_pointer_type (build_pointer_type (type
));
785 type
= build_pointer_type (type
);
786 else if ((mask
& 3) == 1 && omp_is_reference (var
))
787 type
= TREE_TYPE (type
);
789 field
= build_decl (DECL_SOURCE_LOCATION (var
),
790 FIELD_DECL
, DECL_NAME (var
), type
);
792 /* Remember what variable this field was created for. This does have a
793 side effect of making dwarf2out ignore this member, so for helpful
794 debugging we clear it later in delete_omp_context. */
795 DECL_ABSTRACT_ORIGIN (field
) = var
;
796 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
798 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
799 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
800 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
803 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
807 insert_field_into_struct (ctx
->record_type
, field
);
808 if (ctx
->srecord_type
)
810 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
811 FIELD_DECL
, DECL_NAME (var
), type
);
812 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
813 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
814 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
815 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
816 insert_field_into_struct (ctx
->srecord_type
, sfield
);
821 if (ctx
->srecord_type
== NULL_TREE
)
825 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
826 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
827 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
829 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
830 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
831 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
832 insert_field_into_struct (ctx
->srecord_type
, sfield
);
833 splay_tree_insert (ctx
->sfield_map
,
834 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
835 (splay_tree_value
) sfield
);
839 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
840 : ctx
->srecord_type
, field
);
844 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
845 if ((mask
& 2) && ctx
->sfield_map
)
846 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
850 install_var_local (tree var
, omp_context
*ctx
)
852 tree new_var
= omp_copy_decl_1 (var
, ctx
);
853 insert_decl_map (&ctx
->cb
, var
, new_var
);
857 /* Adjust the replacement for DECL in CTX for the new context. This means
858 copying the DECL_VALUE_EXPR, and fixing up the type. */
861 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
865 new_decl
= lookup_decl (decl
, ctx
);
867 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
869 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
870 && DECL_HAS_VALUE_EXPR_P (decl
))
872 tree ve
= DECL_VALUE_EXPR (decl
);
873 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
874 SET_DECL_VALUE_EXPR (new_decl
, ve
);
875 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
878 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
880 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
881 if (size
== error_mark_node
)
882 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
883 DECL_SIZE (new_decl
) = size
;
885 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
886 if (size
== error_mark_node
)
887 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
888 DECL_SIZE_UNIT (new_decl
) = size
;
892 /* The callback for remap_decl. Search all containing contexts for a
893 mapping of the variable; this avoids having to duplicate the splay
894 tree ahead of time. We know a mapping doesn't already exist in the
895 given context. Create new mappings to implement default semantics. */
898 omp_copy_decl (tree var
, copy_body_data
*cb
)
900 omp_context
*ctx
= (omp_context
*) cb
;
903 if (TREE_CODE (var
) == LABEL_DECL
)
905 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
907 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
908 DECL_CONTEXT (new_var
) = current_function_decl
;
909 insert_decl_map (&ctx
->cb
, var
, new_var
);
913 while (!is_taskreg_ctx (ctx
))
918 new_var
= maybe_lookup_decl (var
, ctx
);
923 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
926 return error_mark_node
;
929 /* Create a new context, with OUTER_CTX being the surrounding context. */
932 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
934 omp_context
*ctx
= XCNEW (omp_context
);
936 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
937 (splay_tree_value
) ctx
);
942 ctx
->outer
= outer_ctx
;
943 ctx
->cb
= outer_ctx
->cb
;
944 ctx
->cb
.block
= NULL
;
945 ctx
->depth
= outer_ctx
->depth
+ 1;
949 ctx
->cb
.src_fn
= current_function_decl
;
950 ctx
->cb
.dst_fn
= current_function_decl
;
951 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
952 gcc_checking_assert (ctx
->cb
.src_node
);
953 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
954 ctx
->cb
.src_cfun
= cfun
;
955 ctx
->cb
.copy_decl
= omp_copy_decl
;
956 ctx
->cb
.eh_lp_nr
= 0;
957 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
958 ctx
->cb
.adjust_array_error_bounds
= true;
959 ctx
->cb
.dont_remap_vla_if_no_change
= true;
963 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
968 static gimple_seq
maybe_catch_exception (gimple_seq
);
970 /* Finalize task copyfn. */
973 finalize_task_copyfn (gomp_task
*task_stmt
)
975 struct function
*child_cfun
;
977 gimple_seq seq
= NULL
, new_seq
;
980 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
981 if (child_fn
== NULL_TREE
)
984 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
985 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
987 push_cfun (child_cfun
);
988 bind
= gimplify_body (child_fn
, false);
989 gimple_seq_add_stmt (&seq
, bind
);
990 new_seq
= maybe_catch_exception (seq
);
993 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
995 gimple_seq_add_stmt (&seq
, bind
);
997 gimple_set_body (child_fn
, seq
);
1000 /* Inform the callgraph about the new function. */
1001 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
1002 node
->parallelized_function
= 1;
1003 cgraph_node::add_new_function (child_fn
, false);
1006 /* Destroy a omp_context data structures. Called through the splay tree
1007 value delete callback. */
1010 delete_omp_context (splay_tree_value value
)
1012 omp_context
*ctx
= (omp_context
*) value
;
1014 delete ctx
->cb
.decl_map
;
1017 splay_tree_delete (ctx
->field_map
);
1018 if (ctx
->sfield_map
)
1019 splay_tree_delete (ctx
->sfield_map
);
1021 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1022 it produces corrupt debug information. */
1023 if (ctx
->record_type
)
1026 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1027 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1029 if (ctx
->srecord_type
)
1032 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1033 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1036 if (is_task_ctx (ctx
))
1037 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
1039 if (ctx
->task_reduction_map
)
1041 ctx
->task_reductions
.release ();
1042 delete ctx
->task_reduction_map
;
1045 delete ctx
->lastprivate_conditional_map
;
1050 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1054 fixup_child_record_type (omp_context
*ctx
)
1056 tree f
, type
= ctx
->record_type
;
1058 if (!ctx
->receiver_decl
)
1060 /* ??? It isn't sufficient to just call remap_type here, because
1061 variably_modified_type_p doesn't work the way we expect for
1062 record types. Testing each field for whether it needs remapping
1063 and creating a new record by hand works, however. */
1064 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1065 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1069 tree name
, new_fields
= NULL
;
1071 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1072 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1073 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1074 TYPE_DECL
, name
, type
);
1075 TYPE_NAME (type
) = name
;
1077 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1079 tree new_f
= copy_node (f
);
1080 DECL_CONTEXT (new_f
) = type
;
1081 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1082 DECL_CHAIN (new_f
) = new_fields
;
1083 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1084 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1086 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1090 /* Arrange to be able to look up the receiver field
1091 given the sender field. */
1092 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1093 (splay_tree_value
) new_f
);
1095 TYPE_FIELDS (type
) = nreverse (new_fields
);
1099 /* In a target region we never modify any of the pointers in *.omp_data_i,
1100 so attempt to help the optimizers. */
1101 if (is_gimple_omp_offloaded (ctx
->stmt
))
1102 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1104 TREE_TYPE (ctx
->receiver_decl
)
1105 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1108 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1109 specified by CLAUSES. */
1112 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1115 bool scan_array_reductions
= false;
1117 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1121 switch (OMP_CLAUSE_CODE (c
))
1123 case OMP_CLAUSE_PRIVATE
:
1124 decl
= OMP_CLAUSE_DECL (c
);
1125 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1127 else if (!is_variable_sized (decl
))
1128 install_var_local (decl
, ctx
);
1131 case OMP_CLAUSE_SHARED
:
1132 decl
= OMP_CLAUSE_DECL (c
);
1133 /* Ignore shared directives in teams construct inside of
1134 target construct. */
1135 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1136 && !is_host_teams_ctx (ctx
))
1138 /* Global variables don't need to be copied,
1139 the receiver side will use them directly. */
1140 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1141 if (is_global_var (odecl
))
1143 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1146 gcc_assert (is_taskreg_ctx (ctx
));
1147 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1148 || !is_variable_sized (decl
));
1149 /* Global variables don't need to be copied,
1150 the receiver side will use them directly. */
1151 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1153 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1155 use_pointer_for_field (decl
, ctx
);
1158 by_ref
= use_pointer_for_field (decl
, NULL
);
1159 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1160 || TREE_ADDRESSABLE (decl
)
1162 || omp_is_reference (decl
))
1164 by_ref
= use_pointer_for_field (decl
, ctx
);
1165 install_var_field (decl
, by_ref
, 3, ctx
);
1166 install_var_local (decl
, ctx
);
1169 /* We don't need to copy const scalar vars back. */
1170 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1173 case OMP_CLAUSE_REDUCTION
:
1174 if (is_oacc_parallel_or_serial (ctx
) || is_oacc_kernels (ctx
))
1175 ctx
->local_reduction_clauses
1176 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1179 case OMP_CLAUSE_IN_REDUCTION
:
1180 decl
= OMP_CLAUSE_DECL (c
);
1181 if (TREE_CODE (decl
) == MEM_REF
)
1183 tree t
= TREE_OPERAND (decl
, 0);
1184 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1185 t
= TREE_OPERAND (t
, 0);
1186 if (TREE_CODE (t
) == INDIRECT_REF
1187 || TREE_CODE (t
) == ADDR_EXPR
)
1188 t
= TREE_OPERAND (t
, 0);
1189 install_var_local (t
, ctx
);
1190 if (is_taskreg_ctx (ctx
)
1191 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1192 || (is_task_ctx (ctx
)
1193 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1194 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1195 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1196 == POINTER_TYPE
)))))
1197 && !is_variable_sized (t
)
1198 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1199 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1200 && !is_task_ctx (ctx
))))
1202 by_ref
= use_pointer_for_field (t
, NULL
);
1203 if (is_task_ctx (ctx
)
1204 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1205 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1207 install_var_field (t
, false, 1, ctx
);
1208 install_var_field (t
, by_ref
, 2, ctx
);
1211 install_var_field (t
, by_ref
, 3, ctx
);
1215 if (is_task_ctx (ctx
)
1216 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1217 && OMP_CLAUSE_REDUCTION_TASK (c
)
1218 && is_parallel_ctx (ctx
)))
1220 /* Global variables don't need to be copied,
1221 the receiver side will use them directly. */
1222 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1224 by_ref
= use_pointer_for_field (decl
, ctx
);
1225 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1226 install_var_field (decl
, by_ref
, 3, ctx
);
1228 install_var_local (decl
, ctx
);
1231 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1232 && OMP_CLAUSE_REDUCTION_TASK (c
))
1234 install_var_local (decl
, ctx
);
1239 case OMP_CLAUSE_LASTPRIVATE
:
1240 /* Let the corresponding firstprivate clause create
1242 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1246 case OMP_CLAUSE_FIRSTPRIVATE
:
1247 case OMP_CLAUSE_LINEAR
:
1248 decl
= OMP_CLAUSE_DECL (c
);
1250 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1251 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1252 && is_gimple_omp_offloaded (ctx
->stmt
))
1254 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1255 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1256 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1257 install_var_field (decl
, true, 3, ctx
);
1259 install_var_field (decl
, false, 3, ctx
);
1261 if (is_variable_sized (decl
))
1263 if (is_task_ctx (ctx
))
1264 install_var_field (decl
, false, 1, ctx
);
1267 else if (is_taskreg_ctx (ctx
))
1270 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1271 by_ref
= use_pointer_for_field (decl
, NULL
);
1273 if (is_task_ctx (ctx
)
1274 && (global
|| by_ref
|| omp_is_reference (decl
)))
1276 install_var_field (decl
, false, 1, ctx
);
1278 install_var_field (decl
, by_ref
, 2, ctx
);
1281 install_var_field (decl
, by_ref
, 3, ctx
);
1283 install_var_local (decl
, ctx
);
1286 case OMP_CLAUSE_USE_DEVICE_PTR
:
1287 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1288 decl
= OMP_CLAUSE_DECL (c
);
1290 /* Fortran array descriptors. */
1291 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1292 install_var_field (decl
, false, 19, ctx
);
1293 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1294 && !omp_is_reference (decl
)
1295 && !omp_is_allocatable_or_ptr (decl
))
1296 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1297 install_var_field (decl
, true, 11, ctx
);
1299 install_var_field (decl
, false, 11, ctx
);
1300 if (DECL_SIZE (decl
)
1301 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1303 tree decl2
= DECL_VALUE_EXPR (decl
);
1304 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1305 decl2
= TREE_OPERAND (decl2
, 0);
1306 gcc_assert (DECL_P (decl2
));
1307 install_var_local (decl2
, ctx
);
1309 install_var_local (decl
, ctx
);
1312 case OMP_CLAUSE_IS_DEVICE_PTR
:
1313 decl
= OMP_CLAUSE_DECL (c
);
1316 case OMP_CLAUSE__LOOPTEMP_
:
1317 case OMP_CLAUSE__REDUCTEMP_
:
1318 gcc_assert (is_taskreg_ctx (ctx
));
1319 decl
= OMP_CLAUSE_DECL (c
);
1320 install_var_field (decl
, false, 3, ctx
);
1321 install_var_local (decl
, ctx
);
1324 case OMP_CLAUSE_COPYPRIVATE
:
1325 case OMP_CLAUSE_COPYIN
:
1326 decl
= OMP_CLAUSE_DECL (c
);
1327 by_ref
= use_pointer_for_field (decl
, NULL
);
1328 install_var_field (decl
, by_ref
, 3, ctx
);
1331 case OMP_CLAUSE_FINAL
:
1333 case OMP_CLAUSE_NUM_THREADS
:
1334 case OMP_CLAUSE_NUM_TEAMS
:
1335 case OMP_CLAUSE_THREAD_LIMIT
:
1336 case OMP_CLAUSE_DEVICE
:
1337 case OMP_CLAUSE_SCHEDULE
:
1338 case OMP_CLAUSE_DIST_SCHEDULE
:
1339 case OMP_CLAUSE_DEPEND
:
1340 case OMP_CLAUSE_PRIORITY
:
1341 case OMP_CLAUSE_GRAINSIZE
:
1342 case OMP_CLAUSE_NUM_TASKS
:
1343 case OMP_CLAUSE_NUM_GANGS
:
1344 case OMP_CLAUSE_NUM_WORKERS
:
1345 case OMP_CLAUSE_VECTOR_LENGTH
:
1347 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1351 case OMP_CLAUSE_FROM
:
1352 case OMP_CLAUSE_MAP
:
1354 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1355 decl
= OMP_CLAUSE_DECL (c
);
1356 /* Global variables with "omp declare target" attribute
1357 don't need to be copied, the receiver side will use them
1358 directly. However, global variables with "omp declare target link"
1359 attribute need to be copied. Or when ALWAYS modifier is used. */
1360 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1362 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1363 && (OMP_CLAUSE_MAP_KIND (c
)
1364 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
1365 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
1366 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
)
1367 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1368 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1369 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1370 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1371 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1372 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1373 && varpool_node::get_create (decl
)->offloadable
1374 && !lookup_attribute ("omp declare target link",
1375 DECL_ATTRIBUTES (decl
)))
1377 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1378 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1380 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1381 not offloaded; there is nothing to map for those. */
1382 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1383 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1384 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1387 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1389 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1390 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1391 && is_omp_target (ctx
->stmt
))
1393 /* If this is an offloaded region, an attach operation should
1394 only exist when the pointer variable is mapped in a prior
1396 if (is_gimple_omp_offloaded (ctx
->stmt
))
1398 (maybe_lookup_decl (decl
, ctx
)
1399 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1400 && lookup_attribute ("omp declare target",
1401 DECL_ATTRIBUTES (decl
))));
1403 /* By itself, attach/detach is generated as part of pointer
1404 variable mapping and should not create new variables in the
1405 offloaded region, however sender refs for it must be created
1406 for its address to be passed to the runtime. */
1408 = build_decl (OMP_CLAUSE_LOCATION (c
),
1409 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1410 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1411 insert_field_into_struct (ctx
->record_type
, field
);
1412 /* To not clash with a map of the pointer variable itself,
1413 attach/detach maps have their field looked up by the *clause*
1414 tree expression, not the decl. */
1415 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1416 (splay_tree_key
) c
));
1417 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) c
,
1418 (splay_tree_value
) field
);
1421 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1422 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1423 || (OMP_CLAUSE_MAP_KIND (c
)
1424 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1426 if (TREE_CODE (decl
) == COMPONENT_REF
1427 || (TREE_CODE (decl
) == INDIRECT_REF
1428 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1429 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1430 == REFERENCE_TYPE
)))
1432 if (DECL_SIZE (decl
)
1433 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1435 tree decl2
= DECL_VALUE_EXPR (decl
);
1436 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1437 decl2
= TREE_OPERAND (decl2
, 0);
1438 gcc_assert (DECL_P (decl2
));
1439 install_var_local (decl2
, ctx
);
1441 install_var_local (decl
, ctx
);
1446 if (DECL_SIZE (decl
)
1447 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1449 tree decl2
= DECL_VALUE_EXPR (decl
);
1450 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1451 decl2
= TREE_OPERAND (decl2
, 0);
1452 gcc_assert (DECL_P (decl2
));
1453 install_var_field (decl2
, true, 3, ctx
);
1454 install_var_local (decl2
, ctx
);
1455 install_var_local (decl
, ctx
);
1459 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1460 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1461 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1462 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1463 install_var_field (decl
, true, 7, ctx
);
1465 install_var_field (decl
, true, 3, ctx
);
1466 if (is_gimple_omp_offloaded (ctx
->stmt
)
1467 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1468 install_var_local (decl
, ctx
);
1473 tree base
= get_base_address (decl
);
1474 tree nc
= OMP_CLAUSE_CHAIN (c
);
1477 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1478 && OMP_CLAUSE_DECL (nc
) == base
1479 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1480 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1482 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1483 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1489 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1490 decl
= OMP_CLAUSE_DECL (c
);
1492 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1493 (splay_tree_key
) decl
));
1495 = build_decl (OMP_CLAUSE_LOCATION (c
),
1496 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1497 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1498 insert_field_into_struct (ctx
->record_type
, field
);
1499 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1500 (splay_tree_value
) field
);
1505 case OMP_CLAUSE_ORDER
:
1506 ctx
->order_concurrent
= true;
1509 case OMP_CLAUSE_BIND
:
1513 case OMP_CLAUSE_NOWAIT
:
1514 case OMP_CLAUSE_ORDERED
:
1515 case OMP_CLAUSE_COLLAPSE
:
1516 case OMP_CLAUSE_UNTIED
:
1517 case OMP_CLAUSE_MERGEABLE
:
1518 case OMP_CLAUSE_PROC_BIND
:
1519 case OMP_CLAUSE_SAFELEN
:
1520 case OMP_CLAUSE_SIMDLEN
:
1521 case OMP_CLAUSE_THREADS
:
1522 case OMP_CLAUSE_SIMD
:
1523 case OMP_CLAUSE_NOGROUP
:
1524 case OMP_CLAUSE_DEFAULTMAP
:
1525 case OMP_CLAUSE_ASYNC
:
1526 case OMP_CLAUSE_WAIT
:
1527 case OMP_CLAUSE_GANG
:
1528 case OMP_CLAUSE_WORKER
:
1529 case OMP_CLAUSE_VECTOR
:
1530 case OMP_CLAUSE_INDEPENDENT
:
1531 case OMP_CLAUSE_AUTO
:
1532 case OMP_CLAUSE_SEQ
:
1533 case OMP_CLAUSE_TILE
:
1534 case OMP_CLAUSE__SIMT_
:
1535 case OMP_CLAUSE_DEFAULT
:
1536 case OMP_CLAUSE_NONTEMPORAL
:
1537 case OMP_CLAUSE_IF_PRESENT
:
1538 case OMP_CLAUSE_FINALIZE
:
1539 case OMP_CLAUSE_TASK_REDUCTION
:
1540 case OMP_CLAUSE_ALLOCATE
:
1543 case OMP_CLAUSE_ALIGNED
:
1544 decl
= OMP_CLAUSE_DECL (c
);
1545 if (is_global_var (decl
)
1546 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1547 install_var_local (decl
, ctx
);
1550 case OMP_CLAUSE__CONDTEMP_
:
1551 decl
= OMP_CLAUSE_DECL (c
);
1552 if (is_parallel_ctx (ctx
))
1554 install_var_field (decl
, false, 3, ctx
);
1555 install_var_local (decl
, ctx
);
1557 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1558 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1559 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1560 install_var_local (decl
, ctx
);
1563 case OMP_CLAUSE__CACHE_
:
1569 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1571 switch (OMP_CLAUSE_CODE (c
))
1573 case OMP_CLAUSE_LASTPRIVATE
:
1574 /* Let the corresponding firstprivate clause create
1576 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1577 scan_array_reductions
= true;
1578 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1582 case OMP_CLAUSE_FIRSTPRIVATE
:
1583 case OMP_CLAUSE_PRIVATE
:
1584 case OMP_CLAUSE_LINEAR
:
1585 case OMP_CLAUSE_IS_DEVICE_PTR
:
1586 decl
= OMP_CLAUSE_DECL (c
);
1587 if (is_variable_sized (decl
))
1589 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1590 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1591 && is_gimple_omp_offloaded (ctx
->stmt
))
1593 tree decl2
= DECL_VALUE_EXPR (decl
);
1594 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1595 decl2
= TREE_OPERAND (decl2
, 0);
1596 gcc_assert (DECL_P (decl2
));
1597 install_var_local (decl2
, ctx
);
1598 fixup_remapped_decl (decl2
, ctx
, false);
1600 install_var_local (decl
, ctx
);
1602 fixup_remapped_decl (decl
, ctx
,
1603 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1604 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1605 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1606 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1607 scan_array_reductions
= true;
1610 case OMP_CLAUSE_REDUCTION
:
1611 case OMP_CLAUSE_IN_REDUCTION
:
1612 decl
= OMP_CLAUSE_DECL (c
);
1613 if (TREE_CODE (decl
) != MEM_REF
)
1615 if (is_variable_sized (decl
))
1616 install_var_local (decl
, ctx
);
1617 fixup_remapped_decl (decl
, ctx
, false);
1619 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1620 scan_array_reductions
= true;
1623 case OMP_CLAUSE_TASK_REDUCTION
:
1624 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1625 scan_array_reductions
= true;
1628 case OMP_CLAUSE_SHARED
:
1629 /* Ignore shared directives in teams construct inside of
1630 target construct. */
1631 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1632 && !is_host_teams_ctx (ctx
))
1634 decl
= OMP_CLAUSE_DECL (c
);
1635 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1637 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1639 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1642 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1643 install_var_field (decl
, by_ref
, 11, ctx
);
1646 fixup_remapped_decl (decl
, ctx
, false);
1649 case OMP_CLAUSE_MAP
:
1650 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1652 decl
= OMP_CLAUSE_DECL (c
);
1654 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1655 && (OMP_CLAUSE_MAP_KIND (c
)
1656 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1657 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1658 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1659 && varpool_node::get_create (decl
)->offloadable
)
1661 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1662 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1663 && is_omp_target (ctx
->stmt
)
1664 && !is_gimple_omp_offloaded (ctx
->stmt
))
1668 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1669 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1670 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1671 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1673 tree new_decl
= lookup_decl (decl
, ctx
);
1674 TREE_TYPE (new_decl
)
1675 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1677 else if (DECL_SIZE (decl
)
1678 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1680 tree decl2
= DECL_VALUE_EXPR (decl
);
1681 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1682 decl2
= TREE_OPERAND (decl2
, 0);
1683 gcc_assert (DECL_P (decl2
));
1684 fixup_remapped_decl (decl2
, ctx
, false);
1685 fixup_remapped_decl (decl
, ctx
, true);
1688 fixup_remapped_decl (decl
, ctx
, false);
1692 case OMP_CLAUSE_COPYPRIVATE
:
1693 case OMP_CLAUSE_COPYIN
:
1694 case OMP_CLAUSE_DEFAULT
:
1696 case OMP_CLAUSE_NUM_THREADS
:
1697 case OMP_CLAUSE_NUM_TEAMS
:
1698 case OMP_CLAUSE_THREAD_LIMIT
:
1699 case OMP_CLAUSE_DEVICE
:
1700 case OMP_CLAUSE_SCHEDULE
:
1701 case OMP_CLAUSE_DIST_SCHEDULE
:
1702 case OMP_CLAUSE_NOWAIT
:
1703 case OMP_CLAUSE_ORDERED
:
1704 case OMP_CLAUSE_COLLAPSE
:
1705 case OMP_CLAUSE_UNTIED
:
1706 case OMP_CLAUSE_FINAL
:
1707 case OMP_CLAUSE_MERGEABLE
:
1708 case OMP_CLAUSE_PROC_BIND
:
1709 case OMP_CLAUSE_SAFELEN
:
1710 case OMP_CLAUSE_SIMDLEN
:
1711 case OMP_CLAUSE_ALIGNED
:
1712 case OMP_CLAUSE_DEPEND
:
1713 case OMP_CLAUSE_ALLOCATE
:
1714 case OMP_CLAUSE__LOOPTEMP_
:
1715 case OMP_CLAUSE__REDUCTEMP_
:
1717 case OMP_CLAUSE_FROM
:
1718 case OMP_CLAUSE_PRIORITY
:
1719 case OMP_CLAUSE_GRAINSIZE
:
1720 case OMP_CLAUSE_NUM_TASKS
:
1721 case OMP_CLAUSE_THREADS
:
1722 case OMP_CLAUSE_SIMD
:
1723 case OMP_CLAUSE_NOGROUP
:
1724 case OMP_CLAUSE_DEFAULTMAP
:
1725 case OMP_CLAUSE_ORDER
:
1726 case OMP_CLAUSE_BIND
:
1727 case OMP_CLAUSE_USE_DEVICE_PTR
:
1728 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1729 case OMP_CLAUSE_NONTEMPORAL
:
1730 case OMP_CLAUSE_ASYNC
:
1731 case OMP_CLAUSE_WAIT
:
1732 case OMP_CLAUSE_NUM_GANGS
:
1733 case OMP_CLAUSE_NUM_WORKERS
:
1734 case OMP_CLAUSE_VECTOR_LENGTH
:
1735 case OMP_CLAUSE_GANG
:
1736 case OMP_CLAUSE_WORKER
:
1737 case OMP_CLAUSE_VECTOR
:
1738 case OMP_CLAUSE_INDEPENDENT
:
1739 case OMP_CLAUSE_AUTO
:
1740 case OMP_CLAUSE_SEQ
:
1741 case OMP_CLAUSE_TILE
:
1742 case OMP_CLAUSE__SIMT_
:
1743 case OMP_CLAUSE_IF_PRESENT
:
1744 case OMP_CLAUSE_FINALIZE
:
1745 case OMP_CLAUSE__CONDTEMP_
:
1748 case OMP_CLAUSE__CACHE_
:
1754 gcc_checking_assert (!scan_array_reductions
1755 || !is_gimple_omp_oacc (ctx
->stmt
));
1756 if (scan_array_reductions
)
1758 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1759 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1760 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1761 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1762 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1764 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1765 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1767 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1768 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1769 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1770 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1771 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1772 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1776 /* Create a new name for omp child function. Returns an identifier. */
1779 create_omp_child_function_name (bool task_copy
)
1781 return clone_function_name_numbered (current_function_decl
,
1782 task_copy
? "_omp_cpyfn" : "_omp_fn");
1785 /* Return true if CTX may belong to offloaded code: either if current function
1786 is offloaded, or any enclosing context corresponds to a target region. */
1789 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1791 if (cgraph_node::get (current_function_decl
)->offloadable
)
1793 for (; ctx
; ctx
= ctx
->outer
)
1794 if (is_gimple_omp_offloaded (ctx
->stmt
))
1799 /* Build a decl for the omp child function. It'll not contain a body
1800 yet, just the bare decl. */
1803 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1805 tree decl
, type
, name
, t
;
1807 name
= create_omp_child_function_name (task_copy
);
1809 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1810 ptr_type_node
, NULL_TREE
);
1812 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1814 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1816 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1819 ctx
->cb
.dst_fn
= decl
;
1821 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1823 TREE_STATIC (decl
) = 1;
1824 TREE_USED (decl
) = 1;
1825 DECL_ARTIFICIAL (decl
) = 1;
1826 DECL_IGNORED_P (decl
) = 0;
1827 TREE_PUBLIC (decl
) = 0;
1828 DECL_UNINLINABLE (decl
) = 1;
1829 DECL_EXTERNAL (decl
) = 0;
1830 DECL_CONTEXT (decl
) = NULL_TREE
;
1831 DECL_INITIAL (decl
) = make_node (BLOCK
);
1832 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1833 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1834 /* Remove omp declare simd attribute from the new attributes. */
1835 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1837 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1840 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1841 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1842 *p
= TREE_CHAIN (*p
);
1845 tree chain
= TREE_CHAIN (*p
);
1846 *p
= copy_node (*p
);
1847 p
= &TREE_CHAIN (*p
);
1851 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1852 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1853 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1854 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1855 DECL_FUNCTION_VERSIONED (decl
)
1856 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1858 if (omp_maybe_offloaded_ctx (ctx
))
1860 cgraph_node::get_create (decl
)->offloadable
= 1;
1861 if (ENABLE_OFFLOADING
)
1862 g
->have_offload
= true;
1865 if (cgraph_node::get_create (decl
)->offloadable
1866 && !lookup_attribute ("omp declare target",
1867 DECL_ATTRIBUTES (current_function_decl
)))
1869 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1870 ? "omp target entrypoint"
1871 : "omp declare target");
1872 DECL_ATTRIBUTES (decl
)
1873 = tree_cons (get_identifier (target_attr
),
1874 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1877 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1878 RESULT_DECL
, NULL_TREE
, void_type_node
);
1879 DECL_ARTIFICIAL (t
) = 1;
1880 DECL_IGNORED_P (t
) = 1;
1881 DECL_CONTEXT (t
) = decl
;
1882 DECL_RESULT (decl
) = t
;
1884 tree data_name
= get_identifier (".omp_data_i");
1885 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1887 DECL_ARTIFICIAL (t
) = 1;
1888 DECL_NAMELESS (t
) = 1;
1889 DECL_ARG_TYPE (t
) = ptr_type_node
;
1890 DECL_CONTEXT (t
) = current_function_decl
;
1892 TREE_READONLY (t
) = 1;
1893 DECL_ARGUMENTS (decl
) = t
;
1895 ctx
->receiver_decl
= t
;
1898 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1899 PARM_DECL
, get_identifier (".omp_data_o"),
1901 DECL_ARTIFICIAL (t
) = 1;
1902 DECL_NAMELESS (t
) = 1;
1903 DECL_ARG_TYPE (t
) = ptr_type_node
;
1904 DECL_CONTEXT (t
) = current_function_decl
;
1906 TREE_ADDRESSABLE (t
) = 1;
1907 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1908 DECL_ARGUMENTS (decl
) = t
;
1911 /* Allocate memory for the function structure. The call to
1912 allocate_struct_function clobbers CFUN, so we need to restore
1914 push_struct_function (decl
);
1915 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1916 init_tree_ssa (cfun
);
1920 /* Callback for walk_gimple_seq. Check if combined parallel
1921 contains gimple_omp_for_combined_into_p OMP_FOR. */
1924 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1925 bool *handled_ops_p
,
1926 struct walk_stmt_info
*wi
)
1928 gimple
*stmt
= gsi_stmt (*gsi_p
);
1930 *handled_ops_p
= true;
1931 switch (gimple_code (stmt
))
1935 case GIMPLE_OMP_FOR
:
1936 if (gimple_omp_for_combined_into_p (stmt
)
1937 && gimple_omp_for_kind (stmt
)
1938 == *(const enum gf_mask
*) (wi
->info
))
1941 return integer_zero_node
;
1950 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1953 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1954 omp_context
*outer_ctx
)
1956 struct walk_stmt_info wi
;
1958 memset (&wi
, 0, sizeof (wi
));
1960 wi
.info
= (void *) &msk
;
1961 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1962 if (wi
.info
!= (void *) &msk
)
1964 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1965 struct omp_for_data fd
;
1966 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1967 /* We need two temporaries with fd.loop.v type (istart/iend)
1968 and then (fd.collapse - 1) temporaries with the same
1969 type for count2 ... countN-1 vars if not constant. */
1970 size_t count
= 2, i
;
1971 tree type
= fd
.iter_type
;
1973 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1975 count
+= fd
.collapse
- 1;
1976 /* If there are lastprivate clauses on the inner
1977 GIMPLE_OMP_FOR, add one more temporaries for the total number
1978 of iterations (product of count1 ... countN-1). */
1979 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1980 OMP_CLAUSE_LASTPRIVATE
)
1981 || (msk
== GF_OMP_FOR_KIND_FOR
1982 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1983 OMP_CLAUSE_LASTPRIVATE
)))
1985 tree temp
= create_tmp_var (type
);
1986 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
1987 OMP_CLAUSE__LOOPTEMP_
);
1988 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1989 OMP_CLAUSE_DECL (c
) = temp
;
1990 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1991 gimple_omp_taskreg_set_clauses (stmt
, c
);
1994 && fd
.last_nonrect
== fd
.first_nonrect
+ 1)
1995 if (tree v
= gimple_omp_for_index (for_stmt
, fd
.last_nonrect
))
1996 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
1998 v
= gimple_omp_for_index (for_stmt
, fd
.first_nonrect
);
1999 tree type2
= TREE_TYPE (v
);
2001 for (i
= 0; i
< 3; i
++)
2003 tree temp
= create_tmp_var (type2
);
2004 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2005 OMP_CLAUSE__LOOPTEMP_
);
2006 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2007 OMP_CLAUSE_DECL (c
) = temp
;
2008 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2009 gimple_omp_taskreg_set_clauses (stmt
, c
);
2013 for (i
= 0; i
< count
; i
++)
2015 tree temp
= create_tmp_var (type
);
2016 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
2017 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2018 OMP_CLAUSE_DECL (c
) = temp
;
2019 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2020 gimple_omp_taskreg_set_clauses (stmt
, c
);
2023 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
2024 && omp_find_clause (gimple_omp_task_clauses (stmt
),
2025 OMP_CLAUSE_REDUCTION
))
2027 tree type
= build_pointer_type (pointer_sized_int_node
);
2028 tree temp
= create_tmp_var (type
);
2029 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2030 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2031 OMP_CLAUSE_DECL (c
) = temp
;
2032 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
2033 gimple_omp_task_set_clauses (stmt
, c
);
2037 /* Scan an OpenMP parallel directive. */
2040 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2044 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
2046 /* Ignore parallel directives with empty bodies, unless there
2047 are copyin clauses. */
2049 && empty_body_p (gimple_omp_body (stmt
))
2050 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2051 OMP_CLAUSE_COPYIN
) == NULL
)
2053 gsi_replace (gsi
, gimple_build_nop (), false);
2057 if (gimple_omp_parallel_combined_p (stmt
))
2058 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
2059 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2060 OMP_CLAUSE_REDUCTION
);
2061 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
2062 if (OMP_CLAUSE_REDUCTION_TASK (c
))
2064 tree type
= build_pointer_type (pointer_sized_int_node
);
2065 tree temp
= create_tmp_var (type
);
2066 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2068 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2069 OMP_CLAUSE_DECL (c
) = temp
;
2070 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
2071 gimple_omp_parallel_set_clauses (stmt
, c
);
2074 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
2077 ctx
= new_omp_context (stmt
, outer_ctx
);
2078 taskreg_contexts
.safe_push (ctx
);
2079 if (taskreg_nesting_level
> 1)
2080 ctx
->is_nested
= true;
2081 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2082 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2083 name
= create_tmp_var_name (".omp_data_s");
2084 name
= build_decl (gimple_location (stmt
),
2085 TYPE_DECL
, name
, ctx
->record_type
);
2086 DECL_ARTIFICIAL (name
) = 1;
2087 DECL_NAMELESS (name
) = 1;
2088 TYPE_NAME (ctx
->record_type
) = name
;
2089 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2090 create_omp_child_function (ctx
, false);
2091 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2093 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2094 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2096 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2097 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2100 /* Scan an OpenMP task directive. */
2103 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2107 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2109 /* Ignore task directives with empty bodies, unless they have depend
2112 && gimple_omp_body (stmt
)
2113 && empty_body_p (gimple_omp_body (stmt
))
2114 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2116 gsi_replace (gsi
, gimple_build_nop (), false);
2120 if (gimple_omp_task_taskloop_p (stmt
))
2121 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2123 ctx
= new_omp_context (stmt
, outer_ctx
);
2125 if (gimple_omp_task_taskwait_p (stmt
))
2127 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2131 taskreg_contexts
.safe_push (ctx
);
2132 if (taskreg_nesting_level
> 1)
2133 ctx
->is_nested
= true;
2134 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2135 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2136 name
= create_tmp_var_name (".omp_data_s");
2137 name
= build_decl (gimple_location (stmt
),
2138 TYPE_DECL
, name
, ctx
->record_type
);
2139 DECL_ARTIFICIAL (name
) = 1;
2140 DECL_NAMELESS (name
) = 1;
2141 TYPE_NAME (ctx
->record_type
) = name
;
2142 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2143 create_omp_child_function (ctx
, false);
2144 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2146 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2148 if (ctx
->srecord_type
)
2150 name
= create_tmp_var_name (".omp_data_a");
2151 name
= build_decl (gimple_location (stmt
),
2152 TYPE_DECL
, name
, ctx
->srecord_type
);
2153 DECL_ARTIFICIAL (name
) = 1;
2154 DECL_NAMELESS (name
) = 1;
2155 TYPE_NAME (ctx
->srecord_type
) = name
;
2156 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2157 create_omp_child_function (ctx
, true);
2160 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2162 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2164 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2165 t
= build_int_cst (long_integer_type_node
, 0);
2166 gimple_omp_task_set_arg_size (stmt
, t
);
2167 t
= build_int_cst (long_integer_type_node
, 1);
2168 gimple_omp_task_set_arg_align (stmt
, t
);
2172 /* Helper function for finish_taskreg_scan, called through walk_tree.
2173 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2174 tree, replace it in the expression. */
2177 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2181 omp_context
*ctx
= (omp_context
*) data
;
2182 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2185 if (DECL_HAS_VALUE_EXPR_P (t
))
2186 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2191 else if (IS_TYPE_OR_DECL_P (*tp
))
2196 /* If any decls have been made addressable during scan_omp,
2197 adjust their fields if needed, and layout record types
2198 of parallel/task constructs. */
2201 finish_taskreg_scan (omp_context
*ctx
)
2203 if (ctx
->record_type
== NULL_TREE
)
2206 /* If any task_shared_vars were needed, verify all
2207 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2208 statements if use_pointer_for_field hasn't changed
2209 because of that. If it did, update field types now. */
2210 if (task_shared_vars
)
2214 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2215 c
; c
= OMP_CLAUSE_CHAIN (c
))
2216 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2217 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2219 tree decl
= OMP_CLAUSE_DECL (c
);
2221 /* Global variables don't need to be copied,
2222 the receiver side will use them directly. */
2223 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2225 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2226 || !use_pointer_for_field (decl
, ctx
))
2228 tree field
= lookup_field (decl
, ctx
);
2229 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2230 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2232 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2233 TREE_THIS_VOLATILE (field
) = 0;
2234 DECL_USER_ALIGN (field
) = 0;
2235 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2236 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2237 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2238 if (ctx
->srecord_type
)
2240 tree sfield
= lookup_sfield (decl
, ctx
);
2241 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2242 TREE_THIS_VOLATILE (sfield
) = 0;
2243 DECL_USER_ALIGN (sfield
) = 0;
2244 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2245 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2246 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2251 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2253 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2254 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2257 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2258 expects to find it at the start of data. */
2259 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2260 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2264 *p
= DECL_CHAIN (*p
);
2268 p
= &DECL_CHAIN (*p
);
2269 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2270 TYPE_FIELDS (ctx
->record_type
) = f
;
2272 layout_type (ctx
->record_type
);
2273 fixup_child_record_type (ctx
);
2275 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2277 layout_type (ctx
->record_type
);
2278 fixup_child_record_type (ctx
);
2282 location_t loc
= gimple_location (ctx
->stmt
);
2283 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2284 /* Move VLA fields to the end. */
2285 p
= &TYPE_FIELDS (ctx
->record_type
);
2287 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2288 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2291 *p
= TREE_CHAIN (*p
);
2292 TREE_CHAIN (*q
) = NULL_TREE
;
2293 q
= &TREE_CHAIN (*q
);
2296 p
= &DECL_CHAIN (*p
);
2298 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2300 /* Move fields corresponding to first and second _looptemp_
2301 clause first. There are filled by GOMP_taskloop
2302 and thus need to be in specific positions. */
2303 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2304 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2305 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2306 OMP_CLAUSE__LOOPTEMP_
);
2307 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2308 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2309 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2310 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2311 p
= &TYPE_FIELDS (ctx
->record_type
);
2313 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2314 *p
= DECL_CHAIN (*p
);
2316 p
= &DECL_CHAIN (*p
);
2317 DECL_CHAIN (f1
) = f2
;
2320 DECL_CHAIN (f2
) = f3
;
2321 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2324 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2325 TYPE_FIELDS (ctx
->record_type
) = f1
;
2326 if (ctx
->srecord_type
)
2328 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2329 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2331 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2332 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2334 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2335 *p
= DECL_CHAIN (*p
);
2337 p
= &DECL_CHAIN (*p
);
2338 DECL_CHAIN (f1
) = f2
;
2339 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2342 DECL_CHAIN (f2
) = f3
;
2343 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2346 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2347 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2350 layout_type (ctx
->record_type
);
2351 fixup_child_record_type (ctx
);
2352 if (ctx
->srecord_type
)
2353 layout_type (ctx
->srecord_type
);
2354 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2355 TYPE_SIZE_UNIT (ctx
->record_type
));
2356 if (TREE_CODE (t
) != INTEGER_CST
)
2358 t
= unshare_expr (t
);
2359 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2361 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2362 t
= build_int_cst (long_integer_type_node
,
2363 TYPE_ALIGN_UNIT (ctx
->record_type
));
2364 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2368 /* Find the enclosing offload context. */
2370 static omp_context
*
2371 enclosing_target_ctx (omp_context
*ctx
)
2373 for (; ctx
; ctx
= ctx
->outer
)
2374 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2380 /* Return true if ctx is part of an oacc kernels region. */
2383 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2385 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2387 gimple
*stmt
= ctx
->stmt
;
2388 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2389 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2396 /* Check the parallelism clauses inside a kernels regions.
2397 Until kernels handling moves to use the same loop indirection
2398 scheme as parallel, we need to do this checking early. */
2401 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2403 bool checking
= true;
2404 unsigned outer_mask
= 0;
2405 unsigned this_mask
= 0;
2406 bool has_seq
= false, has_auto
= false;
2409 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2413 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2415 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2418 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2420 switch (OMP_CLAUSE_CODE (c
))
2422 case OMP_CLAUSE_GANG
:
2423 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2425 case OMP_CLAUSE_WORKER
:
2426 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2428 case OMP_CLAUSE_VECTOR
:
2429 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2431 case OMP_CLAUSE_SEQ
:
2434 case OMP_CLAUSE_AUTO
:
2444 if (has_seq
&& (this_mask
|| has_auto
))
2445 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2446 " OpenACC loop specifiers");
2447 else if (has_auto
&& this_mask
)
2448 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2449 " OpenACC loop specifiers");
2451 if (this_mask
& outer_mask
)
2452 error_at (gimple_location (stmt
), "inner loop uses same"
2453 " OpenACC parallelism as containing loop");
2456 return outer_mask
| this_mask
;
2459 /* Scan a GIMPLE_OMP_FOR. */
2461 static omp_context
*
2462 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2466 tree clauses
= gimple_omp_for_clauses (stmt
);
2468 ctx
= new_omp_context (stmt
, outer_ctx
);
2470 if (is_gimple_omp_oacc (stmt
))
2472 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2474 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
2475 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2478 switch (OMP_CLAUSE_CODE (c
))
2480 case OMP_CLAUSE_GANG
:
2481 c_op0
= OMP_CLAUSE_GANG_EXPR (c
);
2484 case OMP_CLAUSE_WORKER
:
2485 c_op0
= OMP_CLAUSE_WORKER_EXPR (c
);
2488 case OMP_CLAUSE_VECTOR
:
2489 c_op0
= OMP_CLAUSE_VECTOR_EXPR (c
);
2498 error_at (OMP_CLAUSE_LOCATION (c
),
2499 "argument not permitted on %qs clause",
2500 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
2502 inform (gimple_location (tgt
->stmt
),
2503 "enclosing parent compute construct");
2504 else if (oacc_get_fn_attrib (current_function_decl
))
2505 inform (DECL_SOURCE_LOCATION (current_function_decl
),
2506 "enclosing routine");
2512 if (tgt
&& is_oacc_kernels (tgt
))
2513 check_oacc_kernel_gwv (stmt
, ctx
);
2515 /* Collect all variables named in reductions on this loop. Ensure
2516 that, if this loop has a reduction on some variable v, and there is
2517 a reduction on v somewhere in an outer context, then there is a
2518 reduction on v on all intervening loops as well. */
2519 tree local_reduction_clauses
= NULL
;
2520 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2522 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2523 local_reduction_clauses
2524 = tree_cons (NULL
, c
, local_reduction_clauses
);
2526 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2527 ctx
->outer_reduction_clauses
2528 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2529 ctx
->outer
->outer_reduction_clauses
);
2530 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2531 tree local_iter
= local_reduction_clauses
;
2532 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2534 tree local_clause
= TREE_VALUE (local_iter
);
2535 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2536 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2537 bool have_outer_reduction
= false;
2538 tree ctx_iter
= outer_reduction_clauses
;
2539 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2541 tree outer_clause
= TREE_VALUE (ctx_iter
);
2542 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2543 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2544 if (outer_var
== local_var
&& outer_op
!= local_op
)
2546 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2547 "conflicting reduction operations for %qE",
2549 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2550 "location of the previous reduction for %qE",
2553 if (outer_var
== local_var
)
2555 have_outer_reduction
= true;
2559 if (have_outer_reduction
)
2561 /* There is a reduction on outer_var both on this loop and on
2562 some enclosing loop. Walk up the context tree until such a
2563 loop with a reduction on outer_var is found, and complain
2564 about all intervening loops that do not have such a
2566 struct omp_context
*curr_loop
= ctx
->outer
;
2568 while (curr_loop
!= NULL
)
2570 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2571 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2573 tree curr_clause
= TREE_VALUE (curr_iter
);
2574 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2575 if (curr_var
== local_var
)
2582 warning_at (gimple_location (curr_loop
->stmt
), 0,
2583 "nested loop in reduction needs "
2584 "reduction clause for %qE",
2588 curr_loop
= curr_loop
->outer
;
2592 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2593 ctx
->outer_reduction_clauses
2594 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2595 ctx
->outer_reduction_clauses
);
2597 if (tgt
&& is_oacc_kernels (tgt
))
2599 /* Strip out reductions, as they are not handled yet. */
2600 tree
*prev_ptr
= &clauses
;
2602 while (tree probe
= *prev_ptr
)
2604 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2606 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2607 *prev_ptr
= *next_ptr
;
2609 prev_ptr
= next_ptr
;
2612 gimple_omp_for_set_clauses (stmt
, clauses
);
2616 scan_sharing_clauses (clauses
, ctx
);
2618 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2619 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2621 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2622 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2623 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2624 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2626 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2630 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2633 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2634 omp_context
*outer_ctx
)
2636 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2637 gsi_replace (gsi
, bind
, false);
2638 gimple_seq seq
= NULL
;
2639 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2640 tree cond
= create_tmp_var_raw (integer_type_node
);
2641 DECL_CONTEXT (cond
) = current_function_decl
;
2642 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2643 gimple_bind_set_vars (bind
, cond
);
2644 gimple_call_set_lhs (g
, cond
);
2645 gimple_seq_add_stmt (&seq
, g
);
2646 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2647 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2648 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2649 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2650 gimple_seq_add_stmt (&seq
, g
);
2651 g
= gimple_build_label (lab1
);
2652 gimple_seq_add_stmt (&seq
, g
);
2653 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2654 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2655 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2656 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2657 gimple_omp_for_set_clauses (new_stmt
, clause
);
2658 gimple_seq_add_stmt (&seq
, new_stmt
);
2659 g
= gimple_build_goto (lab3
);
2660 gimple_seq_add_stmt (&seq
, g
);
2661 g
= gimple_build_label (lab2
);
2662 gimple_seq_add_stmt (&seq
, g
);
2663 gimple_seq_add_stmt (&seq
, stmt
);
2664 g
= gimple_build_label (lab3
);
2665 gimple_seq_add_stmt (&seq
, g
);
2666 gimple_bind_set_body (bind
, seq
);
2668 scan_omp_for (new_stmt
, outer_ctx
);
2669 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2672 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2673 struct walk_stmt_info
*);
2674 static omp_context
*maybe_lookup_ctx (gimple
*);
2676 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2677 for scan phase loop. */
2680 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2681 omp_context
*outer_ctx
)
2683 /* The only change between inclusive and exclusive scan will be
2684 within the first simd loop, so just use inclusive in the
2685 worksharing loop. */
2686 outer_ctx
->scan_inclusive
= true;
2687 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2688 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2690 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2691 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2692 gsi_replace (gsi
, input_stmt
, false);
2693 gimple_seq input_body
= NULL
;
2694 gimple_seq_add_stmt (&input_body
, stmt
);
2695 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2697 gimple_stmt_iterator input1_gsi
= gsi_none ();
2698 struct walk_stmt_info wi
;
2699 memset (&wi
, 0, sizeof (wi
));
2701 wi
.info
= (void *) &input1_gsi
;
2702 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2703 gcc_assert (!gsi_end_p (input1_gsi
));
2705 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2706 gsi_next (&input1_gsi
);
2707 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2708 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2709 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
2710 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2711 std::swap (input_stmt1
, scan_stmt1
);
2713 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
2714 gimple_omp_set_body (input_stmt1
, NULL
);
2716 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
2717 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
2719 gimple_omp_set_body (input_stmt1
, input_body1
);
2720 gimple_omp_set_body (scan_stmt1
, NULL
);
2722 gimple_stmt_iterator input2_gsi
= gsi_none ();
2723 memset (&wi
, 0, sizeof (wi
));
2725 wi
.info
= (void *) &input2_gsi
;
2726 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
2728 gcc_assert (!gsi_end_p (input2_gsi
));
2730 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
2731 gsi_next (&input2_gsi
);
2732 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
2733 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
2734 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2735 std::swap (input_stmt2
, scan_stmt2
);
2737 gimple_omp_set_body (input_stmt2
, NULL
);
2739 gimple_omp_set_body (input_stmt
, input_body
);
2740 gimple_omp_set_body (scan_stmt
, scan_body
);
2742 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
2743 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
2745 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
2746 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
2748 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
2751 /* Scan an OpenMP sections directive. */
2754 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2758 ctx
= new_omp_context (stmt
, outer_ctx
);
2759 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2760 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2763 /* Scan an OpenMP single directive. */
2766 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2771 ctx
= new_omp_context (stmt
, outer_ctx
);
2772 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2773 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2774 name
= create_tmp_var_name (".omp_copy_s");
2775 name
= build_decl (gimple_location (stmt
),
2776 TYPE_DECL
, name
, ctx
->record_type
);
2777 TYPE_NAME (ctx
->record_type
) = name
;
2779 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2780 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2782 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2783 ctx
->record_type
= NULL
;
2785 layout_type (ctx
->record_type
);
2788 /* Scan a GIMPLE_OMP_TARGET. */
2791 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2795 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2796 tree clauses
= gimple_omp_target_clauses (stmt
);
2798 ctx
= new_omp_context (stmt
, outer_ctx
);
2799 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2800 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2801 name
= create_tmp_var_name (".omp_data_t");
2802 name
= build_decl (gimple_location (stmt
),
2803 TYPE_DECL
, name
, ctx
->record_type
);
2804 DECL_ARTIFICIAL (name
) = 1;
2805 DECL_NAMELESS (name
) = 1;
2806 TYPE_NAME (ctx
->record_type
) = name
;
2807 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2811 create_omp_child_function (ctx
, false);
2812 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2815 scan_sharing_clauses (clauses
, ctx
);
2816 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2818 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2819 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2822 TYPE_FIELDS (ctx
->record_type
)
2823 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2826 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2827 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2829 field
= DECL_CHAIN (field
))
2830 gcc_assert (DECL_ALIGN (field
) == align
);
2832 layout_type (ctx
->record_type
);
2834 fixup_child_record_type (ctx
);
2838 /* Scan an OpenMP teams directive. */
2841 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2843 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2845 if (!gimple_omp_teams_host (stmt
))
2847 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2848 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2851 taskreg_contexts
.safe_push (ctx
);
2852 gcc_assert (taskreg_nesting_level
== 1);
2853 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2854 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2855 tree name
= create_tmp_var_name (".omp_data_s");
2856 name
= build_decl (gimple_location (stmt
),
2857 TYPE_DECL
, name
, ctx
->record_type
);
2858 DECL_ARTIFICIAL (name
) = 1;
2859 DECL_NAMELESS (name
) = 1;
2860 TYPE_NAME (ctx
->record_type
) = name
;
2861 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2862 create_omp_child_function (ctx
, false);
2863 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2865 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2866 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2868 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2869 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2872 /* Check nesting restrictions. */
2874 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2878 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2879 inside an OpenACC CTX. */
2880 if (!(is_gimple_omp (stmt
)
2881 && is_gimple_omp_oacc (stmt
))
2882 /* Except for atomic codes that we share with OpenMP. */
2883 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2884 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2886 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2888 error_at (gimple_location (stmt
),
2889 "non-OpenACC construct inside of OpenACC routine");
2893 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2894 if (is_gimple_omp (octx
->stmt
)
2895 && is_gimple_omp_oacc (octx
->stmt
))
2897 error_at (gimple_location (stmt
),
2898 "non-OpenACC construct inside of OpenACC region");
2905 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
2907 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
2909 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2910 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
2914 if (ctx
->order_concurrent
2915 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
2916 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2917 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2919 error_at (gimple_location (stmt
),
2920 "OpenMP constructs other than %<parallel%>, %<loop%>"
2921 " or %<simd%> may not be nested inside a region with"
2922 " the %<order(concurrent)%> clause");
2925 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2927 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2928 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2930 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2931 && (ctx
->outer
== NULL
2932 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2933 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2934 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2935 != GF_OMP_FOR_KIND_FOR
)
2936 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2938 error_at (gimple_location (stmt
),
2939 "%<ordered simd threads%> must be closely "
2940 "nested inside of %<for simd%> region");
2946 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2947 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
2948 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
2950 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
2951 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
2953 error_at (gimple_location (stmt
),
2954 "OpenMP constructs other than "
2955 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2956 "not be nested inside %<simd%> region");
2959 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2961 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2962 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
2963 && omp_find_clause (gimple_omp_for_clauses (stmt
),
2964 OMP_CLAUSE_BIND
) == NULL_TREE
))
2965 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2967 error_at (gimple_location (stmt
),
2968 "only %<distribute%>, %<parallel%> or %<loop%> "
2969 "regions are allowed to be strictly nested inside "
2970 "%<teams%> region");
2974 else if (ctx
->order_concurrent
2975 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
2976 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
2977 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
2978 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
2981 error_at (gimple_location (stmt
),
2982 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2983 "%<simd%> may not be nested inside a %<loop%> region");
2985 error_at (gimple_location (stmt
),
2986 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2987 "%<simd%> may not be nested inside a region with "
2988 "the %<order(concurrent)%> clause");
2992 switch (gimple_code (stmt
))
2994 case GIMPLE_OMP_FOR
:
2995 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
2997 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2999 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
3001 error_at (gimple_location (stmt
),
3002 "%<distribute%> region must be strictly nested "
3003 "inside %<teams%> construct");
3008 /* We split taskloop into task and nested taskloop in it. */
3009 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3011 /* For now, hope this will change and loop bind(parallel) will not
3012 be allowed in lots of contexts. */
3013 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
3014 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
3016 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
3021 switch (gimple_code (ctx
->stmt
))
3023 case GIMPLE_OMP_FOR
:
3024 ok
= (gimple_omp_for_kind (ctx
->stmt
)
3025 == GF_OMP_FOR_KIND_OACC_LOOP
);
3028 case GIMPLE_OMP_TARGET
:
3029 switch (gimple_omp_target_kind (ctx
->stmt
))
3031 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3032 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3033 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3044 else if (oacc_get_fn_attrib (current_function_decl
))
3048 error_at (gimple_location (stmt
),
3049 "OpenACC loop directive must be associated with"
3050 " an OpenACC compute region");
3056 if (is_gimple_call (stmt
)
3057 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3058 == BUILT_IN_GOMP_CANCEL
3059 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3060 == BUILT_IN_GOMP_CANCELLATION_POINT
))
3062 const char *bad
= NULL
;
3063 const char *kind
= NULL
;
3064 const char *construct
3065 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3066 == BUILT_IN_GOMP_CANCEL
)
3068 : "cancellation point";
3071 error_at (gimple_location (stmt
), "orphaned %qs construct",
3075 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
3076 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
3080 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
3082 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3083 == BUILT_IN_GOMP_CANCEL
3084 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3085 ctx
->cancellable
= true;
3089 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3090 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
3092 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3093 == BUILT_IN_GOMP_CANCEL
3094 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3096 ctx
->cancellable
= true;
3097 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3099 warning_at (gimple_location (stmt
), 0,
3100 "%<cancel for%> inside "
3101 "%<nowait%> for construct");
3102 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3103 OMP_CLAUSE_ORDERED
))
3104 warning_at (gimple_location (stmt
), 0,
3105 "%<cancel for%> inside "
3106 "%<ordered%> for construct");
3111 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3112 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3114 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3115 == BUILT_IN_GOMP_CANCEL
3116 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3118 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3120 ctx
->cancellable
= true;
3121 if (omp_find_clause (gimple_omp_sections_clauses
3124 warning_at (gimple_location (stmt
), 0,
3125 "%<cancel sections%> inside "
3126 "%<nowait%> sections construct");
3130 gcc_assert (ctx
->outer
3131 && gimple_code (ctx
->outer
->stmt
)
3132 == GIMPLE_OMP_SECTIONS
);
3133 ctx
->outer
->cancellable
= true;
3134 if (omp_find_clause (gimple_omp_sections_clauses
3137 warning_at (gimple_location (stmt
), 0,
3138 "%<cancel sections%> inside "
3139 "%<nowait%> sections construct");
3145 if (!is_task_ctx (ctx
)
3146 && (!is_taskloop_ctx (ctx
)
3147 || ctx
->outer
== NULL
3148 || !is_task_ctx (ctx
->outer
)))
3152 for (omp_context
*octx
= ctx
->outer
;
3153 octx
; octx
= octx
->outer
)
3155 switch (gimple_code (octx
->stmt
))
3157 case GIMPLE_OMP_TASKGROUP
:
3159 case GIMPLE_OMP_TARGET
:
3160 if (gimple_omp_target_kind (octx
->stmt
)
3161 != GF_OMP_TARGET_KIND_REGION
)
3164 case GIMPLE_OMP_PARALLEL
:
3165 case GIMPLE_OMP_TEAMS
:
3166 error_at (gimple_location (stmt
),
3167 "%<%s taskgroup%> construct not closely "
3168 "nested inside of %<taskgroup%> region",
3171 case GIMPLE_OMP_TASK
:
3172 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3174 && is_taskloop_ctx (octx
->outer
))
3177 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3178 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3187 ctx
->cancellable
= true;
3192 error_at (gimple_location (stmt
), "invalid arguments");
3197 error_at (gimple_location (stmt
),
3198 "%<%s %s%> construct not closely nested inside of %qs",
3199 construct
, kind
, bad
);
3204 case GIMPLE_OMP_SECTIONS
:
3205 case GIMPLE_OMP_SINGLE
:
3206 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3207 switch (gimple_code (ctx
->stmt
))
3209 case GIMPLE_OMP_FOR
:
3210 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3211 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3214 case GIMPLE_OMP_SECTIONS
:
3215 case GIMPLE_OMP_SINGLE
:
3216 case GIMPLE_OMP_ORDERED
:
3217 case GIMPLE_OMP_MASTER
:
3218 case GIMPLE_OMP_TASK
:
3219 case GIMPLE_OMP_CRITICAL
:
3220 if (is_gimple_call (stmt
))
3222 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3223 != BUILT_IN_GOMP_BARRIER
)
3225 error_at (gimple_location (stmt
),
3226 "barrier region may not be closely nested inside "
3227 "of work-sharing, %<loop%>, %<critical%>, "
3228 "%<ordered%>, %<master%>, explicit %<task%> or "
3229 "%<taskloop%> region");
3232 error_at (gimple_location (stmt
),
3233 "work-sharing region may not be closely nested inside "
3234 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3235 "%<master%>, explicit %<task%> or %<taskloop%> region");
3237 case GIMPLE_OMP_PARALLEL
:
3238 case GIMPLE_OMP_TEAMS
:
3240 case GIMPLE_OMP_TARGET
:
3241 if (gimple_omp_target_kind (ctx
->stmt
)
3242 == GF_OMP_TARGET_KIND_REGION
)
3249 case GIMPLE_OMP_MASTER
:
3250 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3251 switch (gimple_code (ctx
->stmt
))
3253 case GIMPLE_OMP_FOR
:
3254 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3255 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3258 case GIMPLE_OMP_SECTIONS
:
3259 case GIMPLE_OMP_SINGLE
:
3260 case GIMPLE_OMP_TASK
:
3261 error_at (gimple_location (stmt
),
3262 "%<master%> region may not be closely nested inside "
3263 "of work-sharing, %<loop%>, explicit %<task%> or "
3264 "%<taskloop%> region");
3266 case GIMPLE_OMP_PARALLEL
:
3267 case GIMPLE_OMP_TEAMS
:
3269 case GIMPLE_OMP_TARGET
:
3270 if (gimple_omp_target_kind (ctx
->stmt
)
3271 == GF_OMP_TARGET_KIND_REGION
)
3278 case GIMPLE_OMP_TASK
:
3279 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3280 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3281 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3282 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3284 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3285 error_at (OMP_CLAUSE_LOCATION (c
),
3286 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3287 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3291 case GIMPLE_OMP_ORDERED
:
3292 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3293 c
; c
= OMP_CLAUSE_CHAIN (c
))
3295 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
3297 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3298 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3301 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3302 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
3303 || kind
== OMP_CLAUSE_DEPEND_SINK
)
3306 /* Look for containing ordered(N) loop. */
3308 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3310 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3311 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3313 error_at (OMP_CLAUSE_LOCATION (c
),
3314 "%<ordered%> construct with %<depend%> clause "
3315 "must be closely nested inside an %<ordered%> "
3319 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
3321 error_at (OMP_CLAUSE_LOCATION (c
),
3322 "%<ordered%> construct with %<depend%> clause "
3323 "must be closely nested inside a loop with "
3324 "%<ordered%> clause with a parameter");
3330 error_at (OMP_CLAUSE_LOCATION (c
),
3331 "invalid depend kind in omp %<ordered%> %<depend%>");
3335 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3336 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3338 /* ordered simd must be closely nested inside of simd region,
3339 and simd region must not encounter constructs other than
3340 ordered simd, therefore ordered simd may be either orphaned,
3341 or ctx->stmt must be simd. The latter case is handled already
3345 error_at (gimple_location (stmt
),
3346 "%<ordered%> %<simd%> must be closely nested inside "
3351 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3352 switch (gimple_code (ctx
->stmt
))
3354 case GIMPLE_OMP_CRITICAL
:
3355 case GIMPLE_OMP_TASK
:
3356 case GIMPLE_OMP_ORDERED
:
3357 ordered_in_taskloop
:
3358 error_at (gimple_location (stmt
),
3359 "%<ordered%> region may not be closely nested inside "
3360 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3361 "%<taskloop%> region");
3363 case GIMPLE_OMP_FOR
:
3364 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3365 goto ordered_in_taskloop
;
3367 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3368 OMP_CLAUSE_ORDERED
);
3371 error_at (gimple_location (stmt
),
3372 "%<ordered%> region must be closely nested inside "
3373 "a loop region with an %<ordered%> clause");
3376 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3377 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3379 error_at (gimple_location (stmt
),
3380 "%<ordered%> region without %<depend%> clause may "
3381 "not be closely nested inside a loop region with "
3382 "an %<ordered%> clause with a parameter");
3386 case GIMPLE_OMP_TARGET
:
3387 if (gimple_omp_target_kind (ctx
->stmt
)
3388 != GF_OMP_TARGET_KIND_REGION
)
3391 case GIMPLE_OMP_PARALLEL
:
3392 case GIMPLE_OMP_TEAMS
:
3393 error_at (gimple_location (stmt
),
3394 "%<ordered%> region must be closely nested inside "
3395 "a loop region with an %<ordered%> clause");
3401 case GIMPLE_OMP_CRITICAL
:
3404 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3405 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3406 if (gomp_critical
*other_crit
3407 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3408 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3410 error_at (gimple_location (stmt
),
3411 "%<critical%> region may not be nested inside "
3412 "a %<critical%> region with the same name");
3417 case GIMPLE_OMP_TEAMS
:
3420 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3421 || (gimple_omp_target_kind (ctx
->stmt
)
3422 != GF_OMP_TARGET_KIND_REGION
))
3424 /* Teams construct can appear either strictly nested inside of
3425 target construct with no intervening stmts, or can be encountered
3426 only by initial task (so must not appear inside any OpenMP
3428 error_at (gimple_location (stmt
),
3429 "%<teams%> construct must be closely nested inside of "
3430 "%<target%> construct or not nested in any OpenMP "
3435 case GIMPLE_OMP_TARGET
:
3436 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3437 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3438 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3439 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3441 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3442 error_at (OMP_CLAUSE_LOCATION (c
),
3443 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3444 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3447 if (is_gimple_omp_offloaded (stmt
)
3448 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3450 error_at (gimple_location (stmt
),
3451 "OpenACC region inside of OpenACC routine, nested "
3452 "parallelism not supported yet");
3455 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3457 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3459 if (is_gimple_omp (stmt
)
3460 && is_gimple_omp_oacc (stmt
)
3461 && is_gimple_omp (ctx
->stmt
))
3463 error_at (gimple_location (stmt
),
3464 "OpenACC construct inside of non-OpenACC region");
3470 const char *stmt_name
, *ctx_stmt_name
;
3471 switch (gimple_omp_target_kind (stmt
))
3473 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3474 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3475 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3476 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3477 stmt_name
= "target enter data"; break;
3478 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3479 stmt_name
= "target exit data"; break;
3480 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3481 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3482 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3483 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3484 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3485 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
3486 stmt_name
= "enter/exit data"; break;
3487 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3488 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3490 default: gcc_unreachable ();
3492 switch (gimple_omp_target_kind (ctx
->stmt
))
3494 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3495 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3496 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3497 ctx_stmt_name
= "parallel"; break;
3498 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3499 ctx_stmt_name
= "kernels"; break;
3500 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3501 ctx_stmt_name
= "serial"; break;
3502 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3503 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3504 ctx_stmt_name
= "host_data"; break;
3505 default: gcc_unreachable ();
3508 /* OpenACC/OpenMP mismatch? */
3509 if (is_gimple_omp_oacc (stmt
)
3510 != is_gimple_omp_oacc (ctx
->stmt
))
3512 error_at (gimple_location (stmt
),
3513 "%s %qs construct inside of %s %qs region",
3514 (is_gimple_omp_oacc (stmt
)
3515 ? "OpenACC" : "OpenMP"), stmt_name
,
3516 (is_gimple_omp_oacc (ctx
->stmt
)
3517 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3520 if (is_gimple_omp_offloaded (ctx
->stmt
))
3522 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3523 if (is_gimple_omp_oacc (ctx
->stmt
))
3525 error_at (gimple_location (stmt
),
3526 "%qs construct inside of %qs region",
3527 stmt_name
, ctx_stmt_name
);
3532 warning_at (gimple_location (stmt
), 0,
3533 "%qs construct inside of %qs region",
3534 stmt_name
, ctx_stmt_name
);
3546 /* Helper function scan_omp.
3548 Callback for walk_tree or operators in walk_gimple_stmt used to
3549 scan for OMP directives in TP. */
3552 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3554 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3555 omp_context
*ctx
= (omp_context
*) wi
->info
;
3558 switch (TREE_CODE (t
))
3566 tree repl
= remap_decl (t
, &ctx
->cb
);
3567 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3573 if (ctx
&& TYPE_P (t
))
3574 *tp
= remap_type (t
, &ctx
->cb
);
3575 else if (!DECL_P (t
))
3580 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3581 if (tem
!= TREE_TYPE (t
))
3583 if (TREE_CODE (t
) == INTEGER_CST
)
3584 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3586 TREE_TYPE (t
) = tem
;
3596 /* Return true if FNDECL is a setjmp or a longjmp. */
3599 setjmp_or_longjmp_p (const_tree fndecl
)
3601 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3602 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3605 tree declname
= DECL_NAME (fndecl
);
3607 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3608 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3609 || !TREE_PUBLIC (fndecl
))
3612 const char *name
= IDENTIFIER_POINTER (declname
);
3613 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3616 /* Return true if FNDECL is an omp_* runtime API call. */
3619 omp_runtime_api_call (const_tree fndecl
)
3621 tree declname
= DECL_NAME (fndecl
);
3623 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3624 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3625 || !TREE_PUBLIC (fndecl
))
3628 const char *name
= IDENTIFIER_POINTER (declname
);
3629 if (strncmp (name
, "omp_", 4) != 0)
3632 static const char *omp_runtime_apis
[] =
3634 /* This array has 3 sections. First omp_* calls that don't
3635 have any suffixes. */
3637 "target_associate_ptr",
3638 "target_disassociate_ptr",
3640 "target_is_present",
3642 "target_memcpy_rect",
3644 /* Now omp_* calls that are available as omp_* and omp_*_. */
3647 "destroy_nest_lock",
3650 "get_affinity_format",
3652 "get_default_device",
3654 "get_initial_device",
3656 "get_max_active_levels",
3657 "get_max_task_priority",
3665 "get_partition_num_places",
3677 "is_initial_device",
3679 "pause_resource_all",
3680 "set_affinity_format",
3688 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3689 "get_ancestor_thread_num",
3690 "get_partition_place_nums",
3691 "get_place_num_procs",
3692 "get_place_proc_ids",
3695 "set_default_device",
3697 "set_max_active_levels",
3704 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
3706 if (omp_runtime_apis
[i
] == NULL
)
3711 size_t len
= strlen (omp_runtime_apis
[i
]);
3712 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
3713 && (name
[4 + len
] == '\0'
3715 && name
[4 + len
] == '_'
3716 && (name
[4 + len
+ 1] == '\0'
3718 && strcmp (name
+ 4 + len
+ 1, "8_") == 0)))))
3724 /* Helper function for scan_omp.
3726 Callback for walk_gimple_stmt used to scan for OMP directives in
3727 the current statement in GSI. */
3730 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3731 struct walk_stmt_info
*wi
)
3733 gimple
*stmt
= gsi_stmt (*gsi
);
3734 omp_context
*ctx
= (omp_context
*) wi
->info
;
3736 if (gimple_has_location (stmt
))
3737 input_location
= gimple_location (stmt
);
3739 /* Check the nesting restrictions. */
3740 bool remove
= false;
3741 if (is_gimple_omp (stmt
))
3742 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3743 else if (is_gimple_call (stmt
))
3745 tree fndecl
= gimple_call_fndecl (stmt
);
3749 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3750 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3751 && setjmp_or_longjmp_p (fndecl
)
3755 error_at (gimple_location (stmt
),
3756 "setjmp/longjmp inside %<simd%> construct");
3758 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3759 switch (DECL_FUNCTION_CODE (fndecl
))
3761 case BUILT_IN_GOMP_BARRIER
:
3762 case BUILT_IN_GOMP_CANCEL
:
3763 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3764 case BUILT_IN_GOMP_TASKYIELD
:
3765 case BUILT_IN_GOMP_TASKWAIT
:
3766 case BUILT_IN_GOMP_TASKGROUP_START
:
3767 case BUILT_IN_GOMP_TASKGROUP_END
:
3768 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3775 omp_context
*octx
= ctx
;
3776 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
3778 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
3781 error_at (gimple_location (stmt
),
3782 "OpenMP runtime API call %qD in a region with "
3783 "%<order(concurrent)%> clause", fndecl
);
3790 stmt
= gimple_build_nop ();
3791 gsi_replace (gsi
, stmt
, false);
3794 *handled_ops_p
= true;
3796 switch (gimple_code (stmt
))
3798 case GIMPLE_OMP_PARALLEL
:
3799 taskreg_nesting_level
++;
3800 scan_omp_parallel (gsi
, ctx
);
3801 taskreg_nesting_level
--;
3804 case GIMPLE_OMP_TASK
:
3805 taskreg_nesting_level
++;
3806 scan_omp_task (gsi
, ctx
);
3807 taskreg_nesting_level
--;
3810 case GIMPLE_OMP_FOR
:
3811 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3812 == GF_OMP_FOR_KIND_SIMD
)
3813 && gimple_omp_for_combined_into_p (stmt
)
3814 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
3816 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
3817 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
3818 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
3820 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3824 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3825 == GF_OMP_FOR_KIND_SIMD
)
3826 && omp_maybe_offloaded_ctx (ctx
)
3827 && omp_max_simt_vf ()
3828 && gimple_omp_for_collapse (stmt
) == 1)
3829 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3831 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3834 case GIMPLE_OMP_SECTIONS
:
3835 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3838 case GIMPLE_OMP_SINGLE
:
3839 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3842 case GIMPLE_OMP_SCAN
:
3843 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
3845 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
3846 ctx
->scan_inclusive
= true;
3847 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
3848 ctx
->scan_exclusive
= true;
3851 case GIMPLE_OMP_SECTION
:
3852 case GIMPLE_OMP_MASTER
:
3853 case GIMPLE_OMP_ORDERED
:
3854 case GIMPLE_OMP_CRITICAL
:
3855 ctx
= new_omp_context (stmt
, ctx
);
3856 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3859 case GIMPLE_OMP_TASKGROUP
:
3860 ctx
= new_omp_context (stmt
, ctx
);
3861 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
3862 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3865 case GIMPLE_OMP_TARGET
:
3866 if (is_gimple_omp_offloaded (stmt
))
3868 taskreg_nesting_level
++;
3869 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3870 taskreg_nesting_level
--;
3873 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3876 case GIMPLE_OMP_TEAMS
:
3877 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
3879 taskreg_nesting_level
++;
3880 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3881 taskreg_nesting_level
--;
3884 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3891 *handled_ops_p
= false;
3893 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3895 var
= DECL_CHAIN (var
))
3896 insert_decl_map (&ctx
->cb
, var
, var
);
3900 *handled_ops_p
= false;
3908 /* Scan all the statements starting at the current statement. CTX
3909 contains context information about the OMP directives and
3910 clauses found during the scan. */
3913 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3915 location_t saved_location
;
3916 struct walk_stmt_info wi
;
3918 memset (&wi
, 0, sizeof (wi
));
3920 wi
.want_locations
= true;
3922 saved_location
= input_location
;
3923 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3924 input_location
= saved_location
;
3927 /* Re-gimplification and code generation routines. */
3929 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3930 of BIND if in a method. */
3933 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3935 if (DECL_ARGUMENTS (current_function_decl
)
3936 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3937 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3940 tree vars
= gimple_bind_vars (bind
);
3941 for (tree
*pvar
= &vars
; *pvar
; )
3942 if (omp_member_access_dummy_var (*pvar
))
3943 *pvar
= DECL_CHAIN (*pvar
);
3945 pvar
= &DECL_CHAIN (*pvar
);
3946 gimple_bind_set_vars (bind
, vars
);
3950 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3951 block and its subblocks. */
3954 remove_member_access_dummy_vars (tree block
)
3956 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3957 if (omp_member_access_dummy_var (*pvar
))
3958 *pvar
= DECL_CHAIN (*pvar
);
3960 pvar
= &DECL_CHAIN (*pvar
);
3962 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3963 remove_member_access_dummy_vars (block
);
3966 /* If a context was created for STMT when it was scanned, return it. */
3968 static omp_context
*
3969 maybe_lookup_ctx (gimple
*stmt
)
3972 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3973 return n
? (omp_context
*) n
->value
: NULL
;
3977 /* Find the mapping for DECL in CTX or the immediately enclosing
3978 context that has a mapping for DECL.
3980 If CTX is a nested parallel directive, we may have to use the decl
3981 mappings created in CTX's parent context. Suppose that we have the
3982 following parallel nesting (variable UIDs showed for clarity):
3985 #omp parallel shared(iD.1562) -> outer parallel
3986 iD.1562 = iD.1562 + 1;
3988 #omp parallel shared (iD.1562) -> inner parallel
3989 iD.1562 = iD.1562 - 1;
3991 Each parallel structure will create a distinct .omp_data_s structure
3992 for copying iD.1562 in/out of the directive:
3994 outer parallel .omp_data_s.1.i -> iD.1562
3995 inner parallel .omp_data_s.2.i -> iD.1562
3997 A shared variable mapping will produce a copy-out operation before
3998 the parallel directive and a copy-in operation after it. So, in
3999 this case we would have:
4002 .omp_data_o.1.i = iD.1562;
4003 #omp parallel shared(iD.1562) -> outer parallel
4004 .omp_data_i.1 = &.omp_data_o.1
4005 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4007 .omp_data_o.2.i = iD.1562; -> **
4008 #omp parallel shared(iD.1562) -> inner parallel
4009 .omp_data_i.2 = &.omp_data_o.2
4010 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4013 ** This is a problem. The symbol iD.1562 cannot be referenced
4014 inside the body of the outer parallel region. But since we are
4015 emitting this copy operation while expanding the inner parallel
4016 directive, we need to access the CTX structure of the outer
4017 parallel directive to get the correct mapping:
4019 .omp_data_o.2.i = .omp_data_i.1->i
4021 Since there may be other workshare or parallel directives enclosing
4022 the parallel directive, it may be necessary to walk up the context
4023 parent chain. This is not a problem in general because nested
4024 parallelism happens only rarely. */
4027 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4032 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4033 t
= maybe_lookup_decl (decl
, up
);
4035 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
4037 return t
? t
: decl
;
4041 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4042 in outer contexts. */
4045 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4050 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4051 t
= maybe_lookup_decl (decl
, up
);
4053 return t
? t
: decl
;
4057 /* Construct the initialization value for reduction operation OP. */
4060 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
4069 case TRUTH_ORIF_EXPR
:
4070 case TRUTH_XOR_EXPR
:
4072 return build_zero_cst (type
);
4075 case TRUTH_AND_EXPR
:
4076 case TRUTH_ANDIF_EXPR
:
4078 return fold_convert_loc (loc
, type
, integer_one_node
);
4081 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
4084 if (SCALAR_FLOAT_TYPE_P (type
))
4086 REAL_VALUE_TYPE max
, min
;
4087 if (HONOR_INFINITIES (type
))
4090 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
4093 real_maxval (&min
, 1, TYPE_MODE (type
));
4094 return build_real (type
, min
);
4096 else if (POINTER_TYPE_P (type
))
4099 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4100 return wide_int_to_tree (type
, min
);
4104 gcc_assert (INTEGRAL_TYPE_P (type
));
4105 return TYPE_MIN_VALUE (type
);
4109 if (SCALAR_FLOAT_TYPE_P (type
))
4111 REAL_VALUE_TYPE max
;
4112 if (HONOR_INFINITIES (type
))
4115 real_maxval (&max
, 0, TYPE_MODE (type
));
4116 return build_real (type
, max
);
4118 else if (POINTER_TYPE_P (type
))
4121 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4122 return wide_int_to_tree (type
, max
);
4126 gcc_assert (INTEGRAL_TYPE_P (type
));
4127 return TYPE_MAX_VALUE (type
);
4135 /* Construct the initialization value for reduction CLAUSE. */
4138 omp_reduction_init (tree clause
, tree type
)
4140 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4141 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4144 /* Return alignment to be assumed for var in CLAUSE, which should be
4145 OMP_CLAUSE_ALIGNED. */
4148 omp_clause_aligned_alignment (tree clause
)
4150 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4151 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4153 /* Otherwise return implementation defined alignment. */
4154 unsigned int al
= 1;
4155 opt_scalar_mode mode_iter
;
4156 auto_vector_modes modes
;
4157 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4158 static enum mode_class classes
[]
4159 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4160 for (int i
= 0; i
< 4; i
+= 2)
4161 /* The for loop above dictates that we only walk through scalar classes. */
4162 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4164 scalar_mode mode
= mode_iter
.require ();
4165 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4166 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4168 machine_mode alt_vmode
;
4169 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4170 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4171 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4174 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4175 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4177 type
= build_vector_type_for_mode (type
, vmode
);
4178 if (TYPE_MODE (type
) != vmode
)
4180 if (TYPE_ALIGN_UNIT (type
) > al
)
4181 al
= TYPE_ALIGN_UNIT (type
);
4183 return build_int_cst (integer_type_node
, al
);
4187 /* This structure is part of the interface between lower_rec_simd_input_clauses
4188 and lower_rec_input_clauses. */
4190 class omplow_simd_context
{
4192 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4196 vec
<tree
, va_heap
> simt_eargs
;
4197 gimple_seq simt_dlist
;
4198 poly_uint64_pod max_vf
;
4202 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4206 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4207 omplow_simd_context
*sctx
, tree
&ivar
,
4208 tree
&lvar
, tree
*rvar
= NULL
,
4211 if (known_eq (sctx
->max_vf
, 0U))
4213 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4214 if (maybe_gt (sctx
->max_vf
, 1U))
4216 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4217 OMP_CLAUSE_SAFELEN
);
4220 poly_uint64 safe_len
;
4221 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4222 || maybe_lt (safe_len
, 1U))
4225 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4228 if (maybe_gt (sctx
->max_vf
, 1U))
4230 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4231 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4234 if (known_eq (sctx
->max_vf
, 1U))
4239 if (is_gimple_reg (new_var
))
4241 ivar
= lvar
= new_var
;
4244 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4245 ivar
= lvar
= create_tmp_var (type
);
4246 TREE_ADDRESSABLE (ivar
) = 1;
4247 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4248 NULL
, DECL_ATTRIBUTES (ivar
));
4249 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4250 tree clobber
= build_clobber (type
);
4251 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4252 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4256 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4257 tree avar
= create_tmp_var_raw (atype
);
4258 if (TREE_ADDRESSABLE (new_var
))
4259 TREE_ADDRESSABLE (avar
) = 1;
4260 DECL_ATTRIBUTES (avar
)
4261 = tree_cons (get_identifier ("omp simd array"), NULL
,
4262 DECL_ATTRIBUTES (avar
));
4263 gimple_add_tmp_var (avar
);
4265 if (rvar
&& !ctx
->for_simd_scan_phase
)
4267 /* For inscan reductions, create another array temporary,
4268 which will hold the reduced value. */
4269 iavar
= create_tmp_var_raw (atype
);
4270 if (TREE_ADDRESSABLE (new_var
))
4271 TREE_ADDRESSABLE (iavar
) = 1;
4272 DECL_ATTRIBUTES (iavar
)
4273 = tree_cons (get_identifier ("omp simd array"), NULL
,
4274 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4275 DECL_ATTRIBUTES (iavar
)));
4276 gimple_add_tmp_var (iavar
);
4277 ctx
->cb
.decl_map
->put (avar
, iavar
);
4278 if (sctx
->lastlane
== NULL_TREE
)
4279 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4280 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4281 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4282 TREE_THIS_NOTRAP (*rvar
) = 1;
4284 if (ctx
->scan_exclusive
)
4286 /* And for exclusive scan yet another one, which will
4287 hold the value during the scan phase. */
4288 tree savar
= create_tmp_var_raw (atype
);
4289 if (TREE_ADDRESSABLE (new_var
))
4290 TREE_ADDRESSABLE (savar
) = 1;
4291 DECL_ATTRIBUTES (savar
)
4292 = tree_cons (get_identifier ("omp simd array"), NULL
,
4293 tree_cons (get_identifier ("omp simd inscan "
4295 DECL_ATTRIBUTES (savar
)));
4296 gimple_add_tmp_var (savar
);
4297 ctx
->cb
.decl_map
->put (iavar
, savar
);
4298 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4299 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4300 TREE_THIS_NOTRAP (*rvar2
) = 1;
4303 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4304 NULL_TREE
, NULL_TREE
);
4305 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4306 NULL_TREE
, NULL_TREE
);
4307 TREE_THIS_NOTRAP (ivar
) = 1;
4308 TREE_THIS_NOTRAP (lvar
) = 1;
4310 if (DECL_P (new_var
))
4312 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4313 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4318 /* Helper function of lower_rec_input_clauses. For a reference
4319 in simd reduction, add an underlying variable it will reference. */
4322 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4324 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4325 if (TREE_CONSTANT (z
))
4327 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4328 get_name (new_vard
));
4329 gimple_add_tmp_var (z
);
4330 TREE_ADDRESSABLE (z
) = 1;
4331 z
= build_fold_addr_expr_loc (loc
, z
);
4332 gimplify_assign (new_vard
, z
, ilist
);
4336 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4337 code to emit (type) (tskred_temp[idx]). */
4340 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4343 unsigned HOST_WIDE_INT sz
4344 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4345 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4346 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4348 tree v
= create_tmp_var (pointer_sized_int_node
);
4349 gimple
*g
= gimple_build_assign (v
, r
);
4350 gimple_seq_add_stmt (ilist
, g
);
4351 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4353 v
= create_tmp_var (type
);
4354 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4355 gimple_seq_add_stmt (ilist
, g
);
4360 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4361 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4362 private variables. Initialization statements go in ILIST, while calls
4363 to destructors go in DLIST. */
4366 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4367 omp_context
*ctx
, struct omp_for_data
*fd
)
4369 tree c
, copyin_seq
, x
, ptr
;
4370 bool copyin_by_ref
= false;
4371 bool lastprivate_firstprivate
= false;
4372 bool reduction_omp_orig_ref
= false;
4374 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4375 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4376 omplow_simd_context sctx
= omplow_simd_context ();
4377 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4378 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4379 gimple_seq llist
[4] = { };
4380 tree nonconst_simd_if
= NULL_TREE
;
4383 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4385 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4386 with data sharing clauses referencing variable sized vars. That
4387 is unnecessarily hard to support and very unlikely to result in
4388 vectorized code anyway. */
4390 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4391 switch (OMP_CLAUSE_CODE (c
))
4393 case OMP_CLAUSE_LINEAR
:
4394 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4397 case OMP_CLAUSE_PRIVATE
:
4398 case OMP_CLAUSE_FIRSTPRIVATE
:
4399 case OMP_CLAUSE_LASTPRIVATE
:
4400 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4402 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4404 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4405 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4409 case OMP_CLAUSE_REDUCTION
:
4410 case OMP_CLAUSE_IN_REDUCTION
:
4411 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4412 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4414 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4416 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4417 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4422 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4424 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4425 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4427 case OMP_CLAUSE_SIMDLEN
:
4428 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4431 case OMP_CLAUSE__CONDTEMP_
:
4432 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4440 /* Add a placeholder for simduid. */
4441 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4442 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4444 unsigned task_reduction_cnt
= 0;
4445 unsigned task_reduction_cntorig
= 0;
4446 unsigned task_reduction_cnt_full
= 0;
4447 unsigned task_reduction_cntorig_full
= 0;
4448 unsigned task_reduction_other_cnt
= 0;
4449 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4450 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4451 /* Do all the fixed sized types in the first pass, and the variable sized
4452 types in the second pass. This makes sure that the scalar arguments to
4453 the variable sized types are processed before we use them in the
4454 variable sized operations. For task reductions we use 4 passes, in the
4455 first two we ignore them, in the third one gather arguments for
4456 GOMP_task_reduction_remap call and in the last pass actually handle
4457 the task reductions. */
4458 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
4461 if (pass
== 2 && task_reduction_cnt
)
4464 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
4465 + task_reduction_cntorig
);
4466 tskred_avar
= create_tmp_var_raw (tskred_atype
);
4467 gimple_add_tmp_var (tskred_avar
);
4468 TREE_ADDRESSABLE (tskred_avar
) = 1;
4469 task_reduction_cnt_full
= task_reduction_cnt
;
4470 task_reduction_cntorig_full
= task_reduction_cntorig
;
4472 else if (pass
== 3 && task_reduction_cnt
)
4474 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
4476 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
4477 size_int (task_reduction_cntorig
),
4478 build_fold_addr_expr (tskred_avar
));
4479 gimple_seq_add_stmt (ilist
, g
);
4481 if (pass
== 3 && task_reduction_other_cnt
)
4483 /* For reduction clauses, build
4484 tskred_base = (void *) tskred_temp[2]
4485 + omp_get_thread_num () * tskred_temp[1]
4486 or if tskred_temp[1] is known to be constant, that constant
4487 directly. This is the start of the private reduction copy block
4488 for the current thread. */
4489 tree v
= create_tmp_var (integer_type_node
);
4490 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
4491 gimple
*g
= gimple_build_call (x
, 0);
4492 gimple_call_set_lhs (g
, v
);
4493 gimple_seq_add_stmt (ilist
, g
);
4494 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
4495 tskred_temp
= OMP_CLAUSE_DECL (c
);
4496 if (is_taskreg_ctx (ctx
))
4497 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
4498 tree v2
= create_tmp_var (sizetype
);
4499 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
4500 gimple_seq_add_stmt (ilist
, g
);
4501 if (ctx
->task_reductions
[0])
4502 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
4504 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4505 tree v3
= create_tmp_var (sizetype
);
4506 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4507 gimple_seq_add_stmt (ilist
, g
);
4508 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4509 tskred_base
= create_tmp_var (ptr_type_node
);
4510 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4511 gimple_seq_add_stmt (ilist
, g
);
4513 task_reduction_cnt
= 0;
4514 task_reduction_cntorig
= 0;
4515 task_reduction_other_cnt
= 0;
4516 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4518 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4521 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4522 bool task_reduction_p
= false;
4523 bool task_reduction_needs_orig_p
= false;
4524 tree cond
= NULL_TREE
;
4528 case OMP_CLAUSE_PRIVATE
:
4529 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4532 case OMP_CLAUSE_SHARED
:
4533 /* Ignore shared directives in teams construct inside
4534 of target construct. */
4535 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4536 && !is_host_teams_ctx (ctx
))
4538 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
4540 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
4541 || is_global_var (OMP_CLAUSE_DECL (c
)));
4544 case OMP_CLAUSE_FIRSTPRIVATE
:
4545 case OMP_CLAUSE_COPYIN
:
4547 case OMP_CLAUSE_LINEAR
:
4548 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
4549 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4550 lastprivate_firstprivate
= true;
4552 case OMP_CLAUSE_REDUCTION
:
4553 case OMP_CLAUSE_IN_REDUCTION
:
4554 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
4556 task_reduction_p
= true;
4557 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4559 task_reduction_other_cnt
++;
4564 task_reduction_cnt
++;
4565 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4567 var
= OMP_CLAUSE_DECL (c
);
4568 /* If var is a global variable that isn't privatized
4569 in outer contexts, we don't need to look up the
4570 original address, it is always the address of the
4571 global variable itself. */
4573 || omp_is_reference (var
)
4575 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4577 task_reduction_needs_orig_p
= true;
4578 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4579 task_reduction_cntorig
++;
4583 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4584 reduction_omp_orig_ref
= true;
4586 case OMP_CLAUSE__REDUCTEMP_
:
4587 if (!is_taskreg_ctx (ctx
))
4590 case OMP_CLAUSE__LOOPTEMP_
:
4591 /* Handle _looptemp_/_reductemp_ clauses only on
4596 case OMP_CLAUSE_LASTPRIVATE
:
4597 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4599 lastprivate_firstprivate
= true;
4600 if (pass
!= 0 || is_taskloop_ctx (ctx
))
4603 /* Even without corresponding firstprivate, if
4604 decl is Fortran allocatable, it needs outer var
4607 && lang_hooks
.decls
.omp_private_outer_ref
4608 (OMP_CLAUSE_DECL (c
)))
4609 lastprivate_firstprivate
= true;
4611 case OMP_CLAUSE_ALIGNED
:
4614 var
= OMP_CLAUSE_DECL (c
);
4615 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
4616 && !is_global_var (var
))
4618 new_var
= maybe_lookup_decl (var
, ctx
);
4619 if (new_var
== NULL_TREE
)
4620 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4621 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4622 tree alarg
= omp_clause_aligned_alignment (c
);
4623 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4624 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
4625 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4626 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4627 gimplify_and_add (x
, ilist
);
4629 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
4630 && is_global_var (var
))
4632 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
4633 new_var
= lookup_decl (var
, ctx
);
4634 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4635 t
= build_fold_addr_expr_loc (clause_loc
, t
);
4636 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4637 tree alarg
= omp_clause_aligned_alignment (c
);
4638 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4639 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
4640 t
= fold_convert_loc (clause_loc
, ptype
, t
);
4641 x
= create_tmp_var (ptype
);
4642 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
4643 gimplify_and_add (t
, ilist
);
4644 t
= build_simple_mem_ref_loc (clause_loc
, x
);
4645 SET_DECL_VALUE_EXPR (new_var
, t
);
4646 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4649 case OMP_CLAUSE__CONDTEMP_
:
4650 if (is_parallel_ctx (ctx
)
4651 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
4658 if (task_reduction_p
!= (pass
>= 2))
4661 new_var
= var
= OMP_CLAUSE_DECL (c
);
4662 if ((c_kind
== OMP_CLAUSE_REDUCTION
4663 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4664 && TREE_CODE (var
) == MEM_REF
)
4666 var
= TREE_OPERAND (var
, 0);
4667 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
4668 var
= TREE_OPERAND (var
, 0);
4669 if (TREE_CODE (var
) == INDIRECT_REF
4670 || TREE_CODE (var
) == ADDR_EXPR
)
4671 var
= TREE_OPERAND (var
, 0);
4672 if (is_variable_sized (var
))
4674 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
4675 var
= DECL_VALUE_EXPR (var
);
4676 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
4677 var
= TREE_OPERAND (var
, 0);
4678 gcc_assert (DECL_P (var
));
4682 if (c_kind
!= OMP_CLAUSE_COPYIN
)
4683 new_var
= lookup_decl (var
, ctx
);
4685 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
4690 /* C/C++ array section reductions. */
4691 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4692 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4693 && var
!= OMP_CLAUSE_DECL (c
))
4698 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
4699 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
4701 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
4703 tree b
= TREE_OPERAND (orig_var
, 1);
4704 b
= maybe_lookup_decl (b
, ctx
);
4707 b
= TREE_OPERAND (orig_var
, 1);
4708 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
4710 if (integer_zerop (bias
))
4714 bias
= fold_convert_loc (clause_loc
,
4715 TREE_TYPE (b
), bias
);
4716 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4717 TREE_TYPE (b
), b
, bias
);
4719 orig_var
= TREE_OPERAND (orig_var
, 0);
4723 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4724 if (is_global_var (out
)
4725 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
4726 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
4727 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
4732 bool by_ref
= use_pointer_for_field (var
, NULL
);
4733 x
= build_receiver_ref (var
, by_ref
, ctx
);
4734 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
4735 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
4737 x
= build_fold_addr_expr (x
);
4739 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
4740 x
= build_simple_mem_ref (x
);
4741 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
4743 if (var
== TREE_OPERAND (orig_var
, 0))
4744 x
= build_fold_addr_expr (x
);
4746 bias
= fold_convert (sizetype
, bias
);
4747 x
= fold_convert (ptr_type_node
, x
);
4748 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
4749 TREE_TYPE (x
), x
, bias
);
4750 unsigned cnt
= task_reduction_cnt
- 1;
4751 if (!task_reduction_needs_orig_p
)
4752 cnt
+= (task_reduction_cntorig_full
4753 - task_reduction_cntorig
);
4755 cnt
= task_reduction_cntorig
- 1;
4756 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4757 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4758 gimplify_assign (r
, x
, ilist
);
4762 if (TREE_CODE (orig_var
) == INDIRECT_REF
4763 || TREE_CODE (orig_var
) == ADDR_EXPR
)
4764 orig_var
= TREE_OPERAND (orig_var
, 0);
4765 tree d
= OMP_CLAUSE_DECL (c
);
4766 tree type
= TREE_TYPE (d
);
4767 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
4768 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
4769 const char *name
= get_name (orig_var
);
4772 tree xv
= create_tmp_var (ptr_type_node
);
4773 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4775 unsigned cnt
= task_reduction_cnt
- 1;
4776 if (!task_reduction_needs_orig_p
)
4777 cnt
+= (task_reduction_cntorig_full
4778 - task_reduction_cntorig
);
4780 cnt
= task_reduction_cntorig
- 1;
4781 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4782 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4784 gimple
*g
= gimple_build_assign (xv
, x
);
4785 gimple_seq_add_stmt (ilist
, g
);
4789 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4791 if (ctx
->task_reductions
[1 + idx
])
4792 off
= fold_convert (sizetype
,
4793 ctx
->task_reductions
[1 + idx
]);
4795 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4797 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
4799 gimple_seq_add_stmt (ilist
, g
);
4801 x
= fold_convert (build_pointer_type (boolean_type_node
),
4803 if (TREE_CONSTANT (v
))
4804 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
4805 TYPE_SIZE_UNIT (type
));
4808 tree t
= maybe_lookup_decl (v
, ctx
);
4812 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4813 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
4815 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4817 build_int_cst (TREE_TYPE (v
), 1));
4818 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4820 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4821 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4823 cond
= create_tmp_var (TREE_TYPE (x
));
4824 gimplify_assign (cond
, x
, ilist
);
4827 else if (TREE_CONSTANT (v
))
4829 x
= create_tmp_var_raw (type
, name
);
4830 gimple_add_tmp_var (x
);
4831 TREE_ADDRESSABLE (x
) = 1;
4832 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4837 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4838 tree t
= maybe_lookup_decl (v
, ctx
);
4842 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4843 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
4844 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4846 build_int_cst (TREE_TYPE (v
), 1));
4847 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4849 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4850 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
4851 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
4854 tree ptype
= build_pointer_type (TREE_TYPE (type
));
4855 x
= fold_convert_loc (clause_loc
, ptype
, x
);
4856 tree y
= create_tmp_var (ptype
, name
);
4857 gimplify_assign (y
, x
, ilist
);
4861 if (!integer_zerop (bias
))
4863 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4865 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4867 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
4868 pointer_sized_int_node
, yb
, bias
);
4869 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
4870 yb
= create_tmp_var (ptype
, name
);
4871 gimplify_assign (yb
, x
, ilist
);
4875 d
= TREE_OPERAND (d
, 0);
4876 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
4877 d
= TREE_OPERAND (d
, 0);
4878 if (TREE_CODE (d
) == ADDR_EXPR
)
4880 if (orig_var
!= var
)
4882 gcc_assert (is_variable_sized (orig_var
));
4883 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
4885 gimplify_assign (new_var
, x
, ilist
);
4886 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
4887 tree t
= build_fold_indirect_ref (new_var
);
4888 DECL_IGNORED_P (new_var
) = 0;
4889 TREE_THIS_NOTRAP (t
) = 1;
4890 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
4891 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
4895 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
4896 build_int_cst (ptype
, 0));
4897 SET_DECL_VALUE_EXPR (new_var
, x
);
4898 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4903 gcc_assert (orig_var
== var
);
4904 if (TREE_CODE (d
) == INDIRECT_REF
)
4906 x
= create_tmp_var (ptype
, name
);
4907 TREE_ADDRESSABLE (x
) = 1;
4908 gimplify_assign (x
, yb
, ilist
);
4909 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4911 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4912 gimplify_assign (new_var
, x
, ilist
);
4914 /* GOMP_taskgroup_reduction_register memsets the whole
4915 array to zero. If the initializer is zero, we don't
4916 need to initialize it again, just mark it as ever
4917 used unconditionally, i.e. cond = true. */
4919 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
4920 && initializer_zerop (omp_reduction_init (c
,
4923 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
4925 gimple_seq_add_stmt (ilist
, g
);
4928 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4932 if (!is_parallel_ctx (ctx
))
4934 tree condv
= create_tmp_var (boolean_type_node
);
4935 g
= gimple_build_assign (condv
,
4936 build_simple_mem_ref (cond
));
4937 gimple_seq_add_stmt (ilist
, g
);
4938 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
4939 g
= gimple_build_cond (NE_EXPR
, condv
,
4940 boolean_false_node
, end
, lab1
);
4941 gimple_seq_add_stmt (ilist
, g
);
4942 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
4944 g
= gimple_build_assign (build_simple_mem_ref (cond
),
4946 gimple_seq_add_stmt (ilist
, g
);
4949 tree y1
= create_tmp_var (ptype
);
4950 gimplify_assign (y1
, y
, ilist
);
4951 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
4952 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
4953 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
4954 if (task_reduction_needs_orig_p
)
4956 y3
= create_tmp_var (ptype
);
4958 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4959 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4960 size_int (task_reduction_cnt_full
4961 + task_reduction_cntorig
- 1),
4962 NULL_TREE
, NULL_TREE
);
4965 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4966 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
4969 gimplify_assign (y3
, ref
, ilist
);
4971 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
4975 y2
= create_tmp_var (ptype
);
4976 gimplify_assign (y2
, y
, ilist
);
4978 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4980 tree ref
= build_outer_var_ref (var
, ctx
);
4981 /* For ref build_outer_var_ref already performs this. */
4982 if (TREE_CODE (d
) == INDIRECT_REF
)
4983 gcc_assert (omp_is_reference (var
));
4984 else if (TREE_CODE (d
) == ADDR_EXPR
)
4985 ref
= build_fold_addr_expr (ref
);
4986 else if (omp_is_reference (var
))
4987 ref
= build_fold_addr_expr (ref
);
4988 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
4989 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
4990 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4992 y3
= create_tmp_var (ptype
);
4993 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
4997 y4
= create_tmp_var (ptype
);
4998 gimplify_assign (y4
, ref
, dlist
);
5002 tree i
= create_tmp_var (TREE_TYPE (v
));
5003 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
5004 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5005 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
5008 i2
= create_tmp_var (TREE_TYPE (v
));
5009 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
5010 body2
= create_artificial_label (UNKNOWN_LOCATION
);
5011 end2
= create_artificial_label (UNKNOWN_LOCATION
);
5012 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
5014 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5016 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5017 tree decl_placeholder
5018 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5019 SET_DECL_VALUE_EXPR (decl_placeholder
,
5020 build_simple_mem_ref (y1
));
5021 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5022 SET_DECL_VALUE_EXPR (placeholder
,
5023 y3
? build_simple_mem_ref (y3
)
5025 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5026 x
= lang_hooks
.decls
.omp_clause_default_ctor
5027 (c
, build_simple_mem_ref (y1
),
5028 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
5030 gimplify_and_add (x
, ilist
);
5031 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5033 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5034 lower_omp (&tseq
, ctx
);
5035 gimple_seq_add_seq (ilist
, tseq
);
5037 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5040 SET_DECL_VALUE_EXPR (decl_placeholder
,
5041 build_simple_mem_ref (y2
));
5042 SET_DECL_VALUE_EXPR (placeholder
,
5043 build_simple_mem_ref (y4
));
5044 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5045 lower_omp (&tseq
, ctx
);
5046 gimple_seq_add_seq (dlist
, tseq
);
5047 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5049 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5050 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
5053 x
= lang_hooks
.decls
.omp_clause_dtor
5054 (c
, build_simple_mem_ref (y2
));
5056 gimplify_and_add (x
, dlist
);
5061 x
= omp_reduction_init (c
, TREE_TYPE (type
));
5062 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5064 /* reduction(-:var) sums up the partial results, so it
5065 acts identically to reduction(+:var). */
5066 if (code
== MINUS_EXPR
)
5069 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
5072 x
= build2 (code
, TREE_TYPE (type
),
5073 build_simple_mem_ref (y4
),
5074 build_simple_mem_ref (y2
));
5075 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
5079 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
5080 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5081 gimple_seq_add_stmt (ilist
, g
);
5084 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
5085 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5086 gimple_seq_add_stmt (ilist
, g
);
5088 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5089 build_int_cst (TREE_TYPE (i
), 1));
5090 gimple_seq_add_stmt (ilist
, g
);
5091 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5092 gimple_seq_add_stmt (ilist
, g
);
5093 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5096 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5097 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5098 gimple_seq_add_stmt (dlist
, g
);
5101 g
= gimple_build_assign
5102 (y4
, POINTER_PLUS_EXPR
, y4
,
5103 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5104 gimple_seq_add_stmt (dlist
, g
);
5106 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5107 build_int_cst (TREE_TYPE (i2
), 1));
5108 gimple_seq_add_stmt (dlist
, g
);
5109 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5110 gimple_seq_add_stmt (dlist
, g
);
5111 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5117 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5121 bool by_ref
= use_pointer_for_field (var
, ctx
);
5122 x
= build_receiver_ref (var
, by_ref
, ctx
);
5124 if (!omp_is_reference (var
))
5125 x
= build_fold_addr_expr (x
);
5126 x
= fold_convert (ptr_type_node
, x
);
5127 unsigned cnt
= task_reduction_cnt
- 1;
5128 if (!task_reduction_needs_orig_p
)
5129 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5131 cnt
= task_reduction_cntorig
- 1;
5132 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5133 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5134 gimplify_assign (r
, x
, ilist
);
5139 tree type
= TREE_TYPE (new_var
);
5140 if (!omp_is_reference (var
))
5141 type
= build_pointer_type (type
);
5142 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5144 unsigned cnt
= task_reduction_cnt
- 1;
5145 if (!task_reduction_needs_orig_p
)
5146 cnt
+= (task_reduction_cntorig_full
5147 - task_reduction_cntorig
);
5149 cnt
= task_reduction_cntorig
- 1;
5150 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5151 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5155 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5157 if (ctx
->task_reductions
[1 + idx
])
5158 off
= fold_convert (sizetype
,
5159 ctx
->task_reductions
[1 + idx
]);
5161 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5163 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5166 x
= fold_convert (type
, x
);
5168 if (omp_is_reference (var
))
5170 gimplify_assign (new_var
, x
, ilist
);
5172 new_var
= build_simple_mem_ref (new_var
);
5176 t
= create_tmp_var (type
);
5177 gimplify_assign (t
, x
, ilist
);
5178 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5179 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5181 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5182 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5183 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5184 cond
= create_tmp_var (TREE_TYPE (t
));
5185 gimplify_assign (cond
, t
, ilist
);
5187 else if (is_variable_sized (var
))
5189 /* For variable sized types, we need to allocate the
5190 actual storage here. Call alloca and store the
5191 result in the pointer decl that we created elsewhere. */
5195 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5200 ptr
= DECL_VALUE_EXPR (new_var
);
5201 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5202 ptr
= TREE_OPERAND (ptr
, 0);
5203 gcc_assert (DECL_P (ptr
));
5204 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5206 /* void *tmp = __builtin_alloca */
5207 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5208 stmt
= gimple_build_call (atmp
, 2, x
,
5209 size_int (DECL_ALIGN (var
)));
5210 cfun
->calls_alloca
= 1;
5211 tmp
= create_tmp_var_raw (ptr_type_node
);
5212 gimple_add_tmp_var (tmp
);
5213 gimple_call_set_lhs (stmt
, tmp
);
5215 gimple_seq_add_stmt (ilist
, stmt
);
5217 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5218 gimplify_assign (ptr
, x
, ilist
);
5221 else if (omp_is_reference (var
)
5222 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5223 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5225 /* For references that are being privatized for Fortran,
5226 allocate new backing storage for the new pointer
5227 variable. This allows us to avoid changing all the
5228 code that expects a pointer to something that expects
5229 a direct variable. */
5233 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5234 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5236 x
= build_receiver_ref (var
, false, ctx
);
5237 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5239 else if (TREE_CONSTANT (x
))
5241 /* For reduction in SIMD loop, defer adding the
5242 initialization of the reference, because if we decide
5243 to use SIMD array for it, the initilization could cause
5244 expansion ICE. Ditto for other privatization clauses. */
5249 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5251 gimple_add_tmp_var (x
);
5252 TREE_ADDRESSABLE (x
) = 1;
5253 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5259 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5260 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5261 tree al
= size_int (TYPE_ALIGN (rtype
));
5262 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5267 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5268 gimplify_assign (new_var
, x
, ilist
);
5271 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5273 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5274 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5275 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5283 switch (OMP_CLAUSE_CODE (c
))
5285 case OMP_CLAUSE_SHARED
:
5286 /* Ignore shared directives in teams construct inside
5287 target construct. */
5288 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5289 && !is_host_teams_ctx (ctx
))
5291 /* Shared global vars are just accessed directly. */
5292 if (is_global_var (new_var
))
5294 /* For taskloop firstprivate/lastprivate, represented
5295 as firstprivate and shared clause on the task, new_var
5296 is the firstprivate var. */
5297 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5299 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5300 needs to be delayed until after fixup_child_record_type so
5301 that we get the correct type during the dereference. */
5302 by_ref
= use_pointer_for_field (var
, ctx
);
5303 x
= build_receiver_ref (var
, by_ref
, ctx
);
5304 SET_DECL_VALUE_EXPR (new_var
, x
);
5305 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5307 /* ??? If VAR is not passed by reference, and the variable
5308 hasn't been initialized yet, then we'll get a warning for
5309 the store into the omp_data_s structure. Ideally, we'd be
5310 able to notice this and not store anything at all, but
5311 we're generating code too early. Suppress the warning. */
5313 TREE_NO_WARNING (var
) = 1;
5316 case OMP_CLAUSE__CONDTEMP_
:
5317 if (is_parallel_ctx (ctx
))
5319 x
= build_receiver_ref (var
, false, ctx
);
5320 SET_DECL_VALUE_EXPR (new_var
, x
);
5321 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5323 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5325 x
= build_zero_cst (TREE_TYPE (var
));
5330 case OMP_CLAUSE_LASTPRIVATE
:
5331 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5335 case OMP_CLAUSE_PRIVATE
:
5336 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5337 x
= build_outer_var_ref (var
, ctx
);
5338 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5340 if (is_task_ctx (ctx
))
5341 x
= build_receiver_ref (var
, false, ctx
);
5343 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5351 nx
= unshare_expr (new_var
);
5353 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5354 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5357 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5359 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5362 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5363 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5364 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5365 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5366 || (gimple_omp_for_index (ctx
->stmt
, 0)
5368 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5369 || omp_is_reference (var
))
5370 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5373 if (omp_is_reference (var
))
5375 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5376 tree new_vard
= TREE_OPERAND (new_var
, 0);
5377 gcc_assert (DECL_P (new_vard
));
5378 SET_DECL_VALUE_EXPR (new_vard
,
5379 build_fold_addr_expr (lvar
));
5380 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5385 tree iv
= unshare_expr (ivar
);
5387 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
5390 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
5394 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
5396 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
5397 unshare_expr (ivar
), x
);
5401 gimplify_and_add (x
, &llist
[0]);
5402 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5403 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5408 gcc_assert (TREE_CODE (v
) == MEM_REF
);
5409 v
= TREE_OPERAND (v
, 0);
5410 gcc_assert (DECL_P (v
));
5412 v
= *ctx
->lastprivate_conditional_map
->get (v
);
5413 tree t
= create_tmp_var (TREE_TYPE (v
));
5414 tree z
= build_zero_cst (TREE_TYPE (v
));
5416 = build_outer_var_ref (var
, ctx
,
5417 OMP_CLAUSE_LASTPRIVATE
);
5418 gimple_seq_add_stmt (dlist
,
5419 gimple_build_assign (t
, z
));
5420 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
5421 tree civar
= DECL_VALUE_EXPR (v
);
5422 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
5423 civar
= unshare_expr (civar
);
5424 TREE_OPERAND (civar
, 1) = sctx
.idx
;
5425 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
5426 unshare_expr (civar
));
5427 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
5428 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
5429 orig_v
, unshare_expr (ivar
)));
5430 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
5432 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
5434 gimple_seq tseq
= NULL
;
5435 gimplify_and_add (x
, &tseq
);
5437 lower_omp (&tseq
, ctx
->outer
);
5438 gimple_seq_add_seq (&llist
[1], tseq
);
5440 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5441 && ctx
->for_simd_scan_phase
)
5443 x
= unshare_expr (ivar
);
5445 = build_outer_var_ref (var
, ctx
,
5446 OMP_CLAUSE_LASTPRIVATE
);
5447 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5449 gimplify_and_add (x
, &llist
[0]);
5453 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5455 gimplify_and_add (y
, &llist
[1]);
5459 if (omp_is_reference (var
))
5461 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5462 tree new_vard
= TREE_OPERAND (new_var
, 0);
5463 gcc_assert (DECL_P (new_vard
));
5464 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5465 x
= TYPE_SIZE_UNIT (type
);
5466 if (TREE_CONSTANT (x
))
5468 x
= create_tmp_var_raw (type
, get_name (var
));
5469 gimple_add_tmp_var (x
);
5470 TREE_ADDRESSABLE (x
) = 1;
5471 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5472 x
= fold_convert_loc (clause_loc
,
5473 TREE_TYPE (new_vard
), x
);
5474 gimplify_assign (new_vard
, x
, ilist
);
5479 gimplify_and_add (nx
, ilist
);
5480 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5482 && ctx
->for_simd_scan_phase
)
5484 tree orig_v
= build_outer_var_ref (var
, ctx
,
5485 OMP_CLAUSE_LASTPRIVATE
);
5486 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
5488 gimplify_and_add (x
, ilist
);
5493 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5495 gimplify_and_add (x
, dlist
);
5498 case OMP_CLAUSE_LINEAR
:
5499 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
5500 goto do_firstprivate
;
5501 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5504 x
= build_outer_var_ref (var
, ctx
);
5507 case OMP_CLAUSE_FIRSTPRIVATE
:
5508 if (is_task_ctx (ctx
))
5510 if ((omp_is_reference (var
)
5511 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
5512 || is_variable_sized (var
))
5514 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
5516 || use_pointer_for_field (var
, NULL
))
5518 x
= build_receiver_ref (var
, false, ctx
);
5519 SET_DECL_VALUE_EXPR (new_var
, x
);
5520 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5524 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
5525 && omp_is_reference (var
))
5527 x
= build_outer_var_ref (var
, ctx
);
5528 gcc_assert (TREE_CODE (x
) == MEM_REF
5529 && integer_zerop (TREE_OPERAND (x
, 1)));
5530 x
= TREE_OPERAND (x
, 0);
5531 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5532 (c
, unshare_expr (new_var
), x
);
5533 gimplify_and_add (x
, ilist
);
5537 x
= build_outer_var_ref (var
, ctx
);
5540 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5541 && gimple_omp_for_combined_into_p (ctx
->stmt
))
5543 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5544 tree stept
= TREE_TYPE (t
);
5545 tree ct
= omp_find_clause (clauses
,
5546 OMP_CLAUSE__LOOPTEMP_
);
5548 tree l
= OMP_CLAUSE_DECL (ct
);
5549 tree n1
= fd
->loop
.n1
;
5550 tree step
= fd
->loop
.step
;
5551 tree itype
= TREE_TYPE (l
);
5552 if (POINTER_TYPE_P (itype
))
5553 itype
= signed_type_for (itype
);
5554 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
5555 if (TYPE_UNSIGNED (itype
)
5556 && fd
->loop
.cond_code
== GT_EXPR
)
5557 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
5558 fold_build1 (NEGATE_EXPR
, itype
, l
),
5559 fold_build1 (NEGATE_EXPR
,
5562 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
5563 t
= fold_build2 (MULT_EXPR
, stept
,
5564 fold_convert (stept
, l
), t
);
5566 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
5568 if (omp_is_reference (var
))
5570 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5571 tree new_vard
= TREE_OPERAND (new_var
, 0);
5572 gcc_assert (DECL_P (new_vard
));
5573 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5574 nx
= TYPE_SIZE_UNIT (type
);
5575 if (TREE_CONSTANT (nx
))
5577 nx
= create_tmp_var_raw (type
,
5579 gimple_add_tmp_var (nx
);
5580 TREE_ADDRESSABLE (nx
) = 1;
5581 nx
= build_fold_addr_expr_loc (clause_loc
,
5583 nx
= fold_convert_loc (clause_loc
,
5584 TREE_TYPE (new_vard
),
5586 gimplify_assign (new_vard
, nx
, ilist
);
5590 x
= lang_hooks
.decls
.omp_clause_linear_ctor
5592 gimplify_and_add (x
, ilist
);
5596 if (POINTER_TYPE_P (TREE_TYPE (x
)))
5597 x
= fold_build2 (POINTER_PLUS_EXPR
,
5598 TREE_TYPE (x
), x
, t
);
5600 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5603 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
5604 || TREE_ADDRESSABLE (new_var
)
5605 || omp_is_reference (var
))
5606 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5609 if (omp_is_reference (var
))
5611 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5612 tree new_vard
= TREE_OPERAND (new_var
, 0);
5613 gcc_assert (DECL_P (new_vard
));
5614 SET_DECL_VALUE_EXPR (new_vard
,
5615 build_fold_addr_expr (lvar
));
5616 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5618 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
5620 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
5621 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
5622 gimplify_and_add (x
, ilist
);
5623 gimple_stmt_iterator gsi
5624 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5626 = gimple_build_assign (unshare_expr (lvar
), iv
);
5627 gsi_insert_before_without_update (&gsi
, g
,
5629 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5630 enum tree_code code
= PLUS_EXPR
;
5631 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
5632 code
= POINTER_PLUS_EXPR
;
5633 g
= gimple_build_assign (iv
, code
, iv
, t
);
5634 gsi_insert_before_without_update (&gsi
, g
,
5638 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5639 (c
, unshare_expr (ivar
), x
);
5640 gimplify_and_add (x
, &llist
[0]);
5641 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5643 gimplify_and_add (x
, &llist
[1]);
5646 if (omp_is_reference (var
))
5648 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5649 tree new_vard
= TREE_OPERAND (new_var
, 0);
5650 gcc_assert (DECL_P (new_vard
));
5651 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5652 nx
= TYPE_SIZE_UNIT (type
);
5653 if (TREE_CONSTANT (nx
))
5655 nx
= create_tmp_var_raw (type
, get_name (var
));
5656 gimple_add_tmp_var (nx
);
5657 TREE_ADDRESSABLE (nx
) = 1;
5658 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
5659 nx
= fold_convert_loc (clause_loc
,
5660 TREE_TYPE (new_vard
), nx
);
5661 gimplify_assign (new_vard
, nx
, ilist
);
5665 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5666 (c
, unshare_expr (new_var
), x
);
5667 gimplify_and_add (x
, ilist
);
5670 case OMP_CLAUSE__LOOPTEMP_
:
5671 case OMP_CLAUSE__REDUCTEMP_
:
5672 gcc_assert (is_taskreg_ctx (ctx
));
5673 x
= build_outer_var_ref (var
, ctx
);
5674 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5675 gimplify_and_add (x
, ilist
);
5678 case OMP_CLAUSE_COPYIN
:
5679 by_ref
= use_pointer_for_field (var
, NULL
);
5680 x
= build_receiver_ref (var
, by_ref
, ctx
);
5681 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
5682 append_to_statement_list (x
, ©in_seq
);
5683 copyin_by_ref
|= by_ref
;
5686 case OMP_CLAUSE_REDUCTION
:
5687 case OMP_CLAUSE_IN_REDUCTION
:
5688 /* OpenACC reductions are initialized using the
5689 GOACC_REDUCTION internal function. */
5690 if (is_gimple_omp_oacc (ctx
->stmt
))
5692 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5694 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5696 tree ptype
= TREE_TYPE (placeholder
);
5699 x
= error_mark_node
;
5700 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
5701 && !task_reduction_needs_orig_p
)
5703 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5705 tree pptype
= build_pointer_type (ptype
);
5706 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5707 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5708 size_int (task_reduction_cnt_full
5709 + task_reduction_cntorig
- 1),
5710 NULL_TREE
, NULL_TREE
);
5714 = *ctx
->task_reduction_map
->get (c
);
5715 x
= task_reduction_read (ilist
, tskred_temp
,
5716 pptype
, 7 + 3 * idx
);
5718 x
= fold_convert (pptype
, x
);
5719 x
= build_simple_mem_ref (x
);
5724 x
= build_outer_var_ref (var
, ctx
);
5726 if (omp_is_reference (var
)
5727 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
5728 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5730 SET_DECL_VALUE_EXPR (placeholder
, x
);
5731 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5732 tree new_vard
= new_var
;
5733 if (omp_is_reference (var
))
5735 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5736 new_vard
= TREE_OPERAND (new_var
, 0);
5737 gcc_assert (DECL_P (new_vard
));
5739 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5741 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5742 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5745 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5749 if (new_vard
== new_var
)
5751 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
5752 SET_DECL_VALUE_EXPR (new_var
, ivar
);
5756 SET_DECL_VALUE_EXPR (new_vard
,
5757 build_fold_addr_expr (ivar
));
5758 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5760 x
= lang_hooks
.decls
.omp_clause_default_ctor
5761 (c
, unshare_expr (ivar
),
5762 build_outer_var_ref (var
, ctx
));
5763 if (rvarp
&& ctx
->for_simd_scan_phase
)
5766 gimplify_and_add (x
, &llist
[0]);
5767 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5769 gimplify_and_add (x
, &llist
[1]);
5776 gimplify_and_add (x
, &llist
[0]);
5778 tree ivar2
= unshare_expr (lvar
);
5779 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5780 x
= lang_hooks
.decls
.omp_clause_default_ctor
5781 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
5782 gimplify_and_add (x
, &llist
[0]);
5786 x
= lang_hooks
.decls
.omp_clause_default_ctor
5787 (c
, unshare_expr (rvar2
),
5788 build_outer_var_ref (var
, ctx
));
5789 gimplify_and_add (x
, &llist
[0]);
5792 /* For types that need construction, add another
5793 private var which will be default constructed
5794 and optionally initialized with
5795 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5796 loop we want to assign this value instead of
5797 constructing and destructing it in each
5799 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
5800 gimple_add_tmp_var (nv
);
5801 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
5805 x
= lang_hooks
.decls
.omp_clause_default_ctor
5806 (c
, nv
, build_outer_var_ref (var
, ctx
));
5807 gimplify_and_add (x
, ilist
);
5809 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5811 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5812 x
= DECL_VALUE_EXPR (new_vard
);
5814 if (new_vard
!= new_var
)
5815 vexpr
= build_fold_addr_expr (nv
);
5816 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5817 lower_omp (&tseq
, ctx
);
5818 SET_DECL_VALUE_EXPR (new_vard
, x
);
5819 gimple_seq_add_seq (ilist
, tseq
);
5820 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5823 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5825 gimplify_and_add (x
, dlist
);
5828 tree ref
= build_outer_var_ref (var
, ctx
);
5829 x
= unshare_expr (ivar
);
5830 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5832 gimplify_and_add (x
, &llist
[0]);
5834 ref
= build_outer_var_ref (var
, ctx
);
5835 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
5837 gimplify_and_add (x
, &llist
[3]);
5839 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5840 if (new_vard
== new_var
)
5841 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5843 SET_DECL_VALUE_EXPR (new_vard
,
5844 build_fold_addr_expr (lvar
));
5846 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5848 gimplify_and_add (x
, &llist
[1]);
5850 tree ivar2
= unshare_expr (lvar
);
5851 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5852 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
5854 gimplify_and_add (x
, &llist
[1]);
5858 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
5860 gimplify_and_add (x
, &llist
[1]);
5865 gimplify_and_add (x
, &llist
[0]);
5866 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5868 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5869 lower_omp (&tseq
, ctx
);
5870 gimple_seq_add_seq (&llist
[0], tseq
);
5872 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5873 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5874 lower_omp (&tseq
, ctx
);
5875 gimple_seq_add_seq (&llist
[1], tseq
);
5876 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5877 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5878 if (new_vard
== new_var
)
5879 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5881 SET_DECL_VALUE_EXPR (new_vard
,
5882 build_fold_addr_expr (lvar
));
5883 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5885 gimplify_and_add (x
, &llist
[1]);
5888 /* If this is a reference to constant size reduction var
5889 with placeholder, we haven't emitted the initializer
5890 for it because it is undesirable if SIMD arrays are used.
5891 But if they aren't used, we need to emit the deferred
5892 initialization now. */
5893 else if (omp_is_reference (var
) && is_simd
)
5894 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5896 tree lab2
= NULL_TREE
;
5900 if (!is_parallel_ctx (ctx
))
5902 tree condv
= create_tmp_var (boolean_type_node
);
5903 tree m
= build_simple_mem_ref (cond
);
5904 g
= gimple_build_assign (condv
, m
);
5905 gimple_seq_add_stmt (ilist
, g
);
5907 = create_artificial_label (UNKNOWN_LOCATION
);
5908 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5909 g
= gimple_build_cond (NE_EXPR
, condv
,
5912 gimple_seq_add_stmt (ilist
, g
);
5913 gimple_seq_add_stmt (ilist
,
5914 gimple_build_label (lab1
));
5916 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5918 gimple_seq_add_stmt (ilist
, g
);
5920 x
= lang_hooks
.decls
.omp_clause_default_ctor
5921 (c
, unshare_expr (new_var
),
5923 : build_outer_var_ref (var
, ctx
));
5925 gimplify_and_add (x
, ilist
);
5927 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5928 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5930 if (ctx
->for_simd_scan_phase
)
5933 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
5935 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
5936 gimple_add_tmp_var (nv
);
5937 ctx
->cb
.decl_map
->put (new_vard
, nv
);
5938 x
= lang_hooks
.decls
.omp_clause_default_ctor
5939 (c
, nv
, build_outer_var_ref (var
, ctx
));
5941 gimplify_and_add (x
, ilist
);
5942 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5944 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5946 if (new_vard
!= new_var
)
5947 vexpr
= build_fold_addr_expr (nv
);
5948 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5949 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5950 lower_omp (&tseq
, ctx
);
5951 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
5952 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
5953 gimple_seq_add_seq (ilist
, tseq
);
5955 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5956 if (is_simd
&& ctx
->scan_exclusive
)
5959 = create_tmp_var_raw (TREE_TYPE (new_var
));
5960 gimple_add_tmp_var (nv2
);
5961 ctx
->cb
.decl_map
->put (nv
, nv2
);
5962 x
= lang_hooks
.decls
.omp_clause_default_ctor
5963 (c
, nv2
, build_outer_var_ref (var
, ctx
));
5964 gimplify_and_add (x
, ilist
);
5965 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5967 gimplify_and_add (x
, dlist
);
5969 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5971 gimplify_and_add (x
, dlist
);
5974 && ctx
->scan_exclusive
5975 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
5977 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
5978 gimple_add_tmp_var (nv2
);
5979 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
5980 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5982 gimplify_and_add (x
, dlist
);
5984 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5988 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5990 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5991 lower_omp (&tseq
, ctx
);
5992 gimple_seq_add_seq (ilist
, tseq
);
5994 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5997 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5998 lower_omp (&tseq
, ctx
);
5999 gimple_seq_add_seq (dlist
, tseq
);
6000 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6002 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6006 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6013 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
6014 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
6015 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6020 tree lab2
= NULL_TREE
;
6021 /* GOMP_taskgroup_reduction_register memsets the whole
6022 array to zero. If the initializer is zero, we don't
6023 need to initialize it again, just mark it as ever
6024 used unconditionally, i.e. cond = true. */
6025 if (initializer_zerop (x
))
6027 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6029 gimple_seq_add_stmt (ilist
, g
);
6034 if (!cond) { cond = true; new_var = x; } */
6035 if (!is_parallel_ctx (ctx
))
6037 tree condv
= create_tmp_var (boolean_type_node
);
6038 tree m
= build_simple_mem_ref (cond
);
6039 g
= gimple_build_assign (condv
, m
);
6040 gimple_seq_add_stmt (ilist
, g
);
6042 = create_artificial_label (UNKNOWN_LOCATION
);
6043 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6044 g
= gimple_build_cond (NE_EXPR
, condv
,
6047 gimple_seq_add_stmt (ilist
, g
);
6048 gimple_seq_add_stmt (ilist
,
6049 gimple_build_label (lab1
));
6051 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6053 gimple_seq_add_stmt (ilist
, g
);
6054 gimplify_assign (new_var
, x
, ilist
);
6056 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6060 /* reduction(-:var) sums up the partial results, so it
6061 acts identically to reduction(+:var). */
6062 if (code
== MINUS_EXPR
)
6065 tree new_vard
= new_var
;
6066 if (is_simd
&& omp_is_reference (var
))
6068 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6069 new_vard
= TREE_OPERAND (new_var
, 0);
6070 gcc_assert (DECL_P (new_vard
));
6072 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6074 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6075 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6078 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6082 if (new_vard
!= new_var
)
6084 SET_DECL_VALUE_EXPR (new_vard
,
6085 build_fold_addr_expr (lvar
));
6086 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6089 tree ref
= build_outer_var_ref (var
, ctx
);
6093 if (ctx
->for_simd_scan_phase
)
6095 gimplify_assign (ivar
, ref
, &llist
[0]);
6096 ref
= build_outer_var_ref (var
, ctx
);
6097 gimplify_assign (ref
, rvar
, &llist
[3]);
6101 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6106 simt_lane
= create_tmp_var (unsigned_type_node
);
6107 x
= build_call_expr_internal_loc
6108 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6109 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6110 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
6111 gimplify_assign (ivar
, x
, &llist
[2]);
6113 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
6114 ref
= build_outer_var_ref (var
, ctx
);
6115 gimplify_assign (ref
, x
, &llist
[1]);
6120 if (omp_is_reference (var
) && is_simd
)
6121 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6122 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6123 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6125 gimplify_assign (new_var
, x
, ilist
);
6128 tree ref
= build_outer_var_ref (var
, ctx
);
6130 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
6131 ref
= build_outer_var_ref (var
, ctx
);
6132 gimplify_assign (ref
, x
, dlist
);
6145 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6146 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6149 if (known_eq (sctx
.max_vf
, 1U))
6151 sctx
.is_simt
= false;
6152 if (ctx
->lastprivate_conditional_map
)
6154 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6156 /* Signal to lower_omp_1 that it should use parent context. */
6157 ctx
->combined_into_simd_safelen1
= true;
6158 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6159 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6160 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6162 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6163 omp_context
*outer
= ctx
->outer
;
6164 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6165 outer
= outer
->outer
;
6166 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6167 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6168 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6174 /* When not vectorized, treat lastprivate(conditional:) like
6175 normal lastprivate, as there will be just one simd lane
6176 writing the privatized variable. */
6177 delete ctx
->lastprivate_conditional_map
;
6178 ctx
->lastprivate_conditional_map
= NULL
;
6183 if (nonconst_simd_if
)
6185 if (sctx
.lane
== NULL_TREE
)
6187 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6188 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6190 /* FIXME: For now. */
6191 sctx
.is_simt
= false;
6194 if (sctx
.lane
|| sctx
.is_simt
)
6196 uid
= create_tmp_var (ptr_type_node
, "simduid");
6197 /* Don't want uninit warnings on simduid, it is always uninitialized,
6198 but we use it not for the value, but for the DECL_UID only. */
6199 TREE_NO_WARNING (uid
) = 1;
6200 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6201 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6202 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6203 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6205 /* Emit calls denoting privatized variables and initializing a pointer to
6206 structure that holds private variables as fields after ompdevlow pass. */
6209 sctx
.simt_eargs
[0] = uid
;
6211 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6212 gimple_call_set_lhs (g
, uid
);
6213 gimple_seq_add_stmt (ilist
, g
);
6214 sctx
.simt_eargs
.release ();
6216 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6217 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6218 gimple_call_set_lhs (g
, simtrec
);
6219 gimple_seq_add_stmt (ilist
, g
);
6223 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6224 2 + (nonconst_simd_if
!= NULL
),
6225 uid
, integer_zero_node
,
6227 gimple_call_set_lhs (g
, sctx
.lane
);
6228 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6229 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6230 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6231 build_int_cst (unsigned_type_node
, 0));
6232 gimple_seq_add_stmt (ilist
, g
);
6235 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6237 gimple_call_set_lhs (g
, sctx
.lastlane
);
6238 gimple_seq_add_stmt (dlist
, g
);
6239 gimple_seq_add_seq (dlist
, llist
[3]);
6241 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6244 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6245 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6246 gimple_call_set_lhs (g
, simt_vf
);
6247 gimple_seq_add_stmt (dlist
, g
);
6249 tree t
= build_int_cst (unsigned_type_node
, 1);
6250 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6251 gimple_seq_add_stmt (dlist
, g
);
6253 t
= build_int_cst (unsigned_type_node
, 0);
6254 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6255 gimple_seq_add_stmt (dlist
, g
);
6257 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6258 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6259 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6260 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6261 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6263 gimple_seq_add_seq (dlist
, llist
[2]);
6265 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6266 gimple_seq_add_stmt (dlist
, g
);
6268 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6269 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6270 gimple_seq_add_stmt (dlist
, g
);
6272 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6274 for (int i
= 0; i
< 2; i
++)
6277 tree vf
= create_tmp_var (unsigned_type_node
);
6278 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6279 gimple_call_set_lhs (g
, vf
);
6280 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
6281 gimple_seq_add_stmt (seq
, g
);
6282 tree t
= build_int_cst (unsigned_type_node
, 0);
6283 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6284 gimple_seq_add_stmt (seq
, g
);
6285 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6286 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6287 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6288 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
6289 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
6290 gimple_seq_add_seq (seq
, llist
[i
]);
6291 t
= build_int_cst (unsigned_type_node
, 1);
6292 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
6293 gimple_seq_add_stmt (seq
, g
);
6294 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
6295 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
6296 gimple_seq_add_stmt (seq
, g
);
6297 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
6302 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
6304 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
6305 gimple_seq_add_stmt (dlist
, g
);
6308 /* The copyin sequence is not to be executed by the main thread, since
6309 that would result in self-copies. Perhaps not visible to scalars,
6310 but it certainly is to C++ operator=. */
6313 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
6315 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
6316 build_int_cst (TREE_TYPE (x
), 0));
6317 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
6318 gimplify_and_add (x
, ilist
);
6321 /* If any copyin variable is passed by reference, we must ensure the
6322 master thread doesn't modify it before it is copied over in all
6323 threads. Similarly for variables in both firstprivate and
6324 lastprivate clauses we need to ensure the lastprivate copying
6325 happens after firstprivate copying in all threads. And similarly
6326 for UDRs if initializer expression refers to omp_orig. */
6327 if (copyin_by_ref
|| lastprivate_firstprivate
6328 || (reduction_omp_orig_ref
6329 && !ctx
->scan_inclusive
6330 && !ctx
->scan_exclusive
))
6332 /* Don't add any barrier for #pragma omp simd or
6333 #pragma omp distribute. */
6334 if (!is_task_ctx (ctx
)
6335 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
6336 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
6337 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
6340 /* If max_vf is non-zero, then we can use only a vectorization factor
6341 up to the max_vf we chose. So stick it into the safelen clause. */
6342 if (maybe_ne (sctx
.max_vf
, 0U))
6344 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
6345 OMP_CLAUSE_SAFELEN
);
6346 poly_uint64 safe_len
;
6348 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
6349 && maybe_gt (safe_len
, sctx
.max_vf
)))
6351 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
6352 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
6354 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6355 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6360 /* Create temporary variables for lastprivate(conditional:) implementation
6361 in context CTX with CLAUSES. */
6364 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
6366 tree iter_type
= NULL_TREE
;
6367 tree cond_ptr
= NULL_TREE
;
6368 tree iter_var
= NULL_TREE
;
6369 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6370 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
6371 tree next
= *clauses
;
6372 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6373 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6374 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6378 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
6380 if (iter_type
== NULL_TREE
)
6382 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
6383 iter_var
= create_tmp_var_raw (iter_type
);
6384 DECL_CONTEXT (iter_var
) = current_function_decl
;
6385 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6386 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6387 ctx
->block_vars
= iter_var
;
6389 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6390 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6391 OMP_CLAUSE_DECL (c3
) = iter_var
;
6392 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
6394 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6396 next
= OMP_CLAUSE_CHAIN (cc
);
6397 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6398 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
6399 ctx
->lastprivate_conditional_map
->put (o
, v
);
6402 if (iter_type
== NULL
)
6404 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
6406 struct omp_for_data fd
;
6407 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
6409 iter_type
= unsigned_type_for (fd
.iter_type
);
6411 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
6412 iter_type
= unsigned_type_node
;
6413 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
6417 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
6418 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6422 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
6423 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
6424 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
6425 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
6426 ctx
->block_vars
= cond_ptr
;
6427 c2
= build_omp_clause (UNKNOWN_LOCATION
,
6428 OMP_CLAUSE__CONDTEMP_
);
6429 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6430 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
6433 iter_var
= create_tmp_var_raw (iter_type
);
6434 DECL_CONTEXT (iter_var
) = current_function_decl
;
6435 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6436 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6437 ctx
->block_vars
= iter_var
;
6439 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6440 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6441 OMP_CLAUSE_DECL (c3
) = iter_var
;
6442 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
6443 OMP_CLAUSE_CHAIN (c2
) = c3
;
6444 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6446 tree v
= create_tmp_var_raw (iter_type
);
6447 DECL_CONTEXT (v
) = current_function_decl
;
6448 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
6449 DECL_CHAIN (v
) = ctx
->block_vars
;
6450 ctx
->block_vars
= v
;
6451 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6452 ctx
->lastprivate_conditional_map
->put (o
, v
);
6457 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6458 both parallel and workshare constructs. PREDICATE may be NULL if it's
6459 always true. BODY_P is the sequence to insert early initialization
6460 if needed, STMT_LIST is where the non-conditional lastprivate handling
6461 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6465 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
6466 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
6469 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
6470 bool par_clauses
= false;
6471 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
6472 unsigned HOST_WIDE_INT conditional_off
= 0;
6473 gimple_seq post_stmt_list
= NULL
;
6475 /* Early exit if there are no lastprivate or linear clauses. */
6476 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
6477 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
6478 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
6479 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
6481 if (clauses
== NULL
)
6483 /* If this was a workshare clause, see if it had been combined
6484 with its parallel. In that case, look for the clauses on the
6485 parallel statement itself. */
6486 if (is_parallel_ctx (ctx
))
6490 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6493 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6494 OMP_CLAUSE_LASTPRIVATE
);
6495 if (clauses
== NULL
)
6500 bool maybe_simt
= false;
6501 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6502 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6504 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
6505 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
6507 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
6513 tree label_true
, arm1
, arm2
;
6514 enum tree_code pred_code
= TREE_CODE (predicate
);
6516 label
= create_artificial_label (UNKNOWN_LOCATION
);
6517 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
6518 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
6520 arm1
= TREE_OPERAND (predicate
, 0);
6521 arm2
= TREE_OPERAND (predicate
, 1);
6522 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6523 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6528 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6529 arm2
= boolean_false_node
;
6530 pred_code
= NE_EXPR
;
6534 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
6535 c
= fold_convert (integer_type_node
, c
);
6536 simtcond
= create_tmp_var (integer_type_node
);
6537 gimplify_assign (simtcond
, c
, stmt_list
);
6538 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
6540 c
= create_tmp_var (integer_type_node
);
6541 gimple_call_set_lhs (g
, c
);
6542 gimple_seq_add_stmt (stmt_list
, g
);
6543 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
6547 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
6548 gimple_seq_add_stmt (stmt_list
, stmt
);
6549 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
6552 tree cond_ptr
= NULL_TREE
;
6553 for (c
= clauses
; c
;)
6556 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6557 gimple_seq
*this_stmt_list
= stmt_list
;
6558 tree lab2
= NULL_TREE
;
6560 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6561 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6562 && ctx
->lastprivate_conditional_map
6563 && !ctx
->combined_into_simd_safelen1
)
6565 gcc_assert (body_p
);
6568 if (cond_ptr
== NULL_TREE
)
6570 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
6571 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
6573 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
6574 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6575 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
6576 gimplify_assign (v
, build_zero_cst (type
), body_p
);
6577 this_stmt_list
= cstmt_list
;
6579 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
6581 mem
= build2 (MEM_REF
, type
, cond_ptr
,
6582 build_int_cst (TREE_TYPE (cond_ptr
),
6584 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
6587 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
6588 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
6589 tree mem2
= copy_node (mem
);
6590 gimple_seq seq
= NULL
;
6591 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
6592 gimple_seq_add_seq (this_stmt_list
, seq
);
6593 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
6594 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6595 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
6596 gimple_seq_add_stmt (this_stmt_list
, g
);
6597 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
6598 gimplify_assign (mem2
, v
, this_stmt_list
);
6601 && ctx
->combined_into_simd_safelen1
6602 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6603 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6604 && ctx
->lastprivate_conditional_map
)
6605 this_stmt_list
= &post_stmt_list
;
6607 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6608 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6609 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
6611 var
= OMP_CLAUSE_DECL (c
);
6612 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6613 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
6614 && is_taskloop_ctx (ctx
))
6616 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
6617 new_var
= lookup_decl (var
, ctx
->outer
);
6621 new_var
= lookup_decl (var
, ctx
);
6622 /* Avoid uninitialized warnings for lastprivate and
6623 for linear iterators. */
6625 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6626 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
6627 TREE_NO_WARNING (new_var
) = 1;
6630 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
6632 tree val
= DECL_VALUE_EXPR (new_var
);
6633 if (TREE_CODE (val
) == ARRAY_REF
6634 && VAR_P (TREE_OPERAND (val
, 0))
6635 && lookup_attribute ("omp simd array",
6636 DECL_ATTRIBUTES (TREE_OPERAND (val
,
6639 if (lastlane
== NULL
)
6641 lastlane
= create_tmp_var (unsigned_type_node
);
6643 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6645 TREE_OPERAND (val
, 1));
6646 gimple_call_set_lhs (g
, lastlane
);
6647 gimple_seq_add_stmt (this_stmt_list
, g
);
6649 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
6650 TREE_OPERAND (val
, 0), lastlane
,
6651 NULL_TREE
, NULL_TREE
);
6652 TREE_THIS_NOTRAP (new_var
) = 1;
6655 else if (maybe_simt
)
6657 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
6658 ? DECL_VALUE_EXPR (new_var
)
6660 if (simtlast
== NULL
)
6662 simtlast
= create_tmp_var (unsigned_type_node
);
6663 gcall
*g
= gimple_build_call_internal
6664 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
6665 gimple_call_set_lhs (g
, simtlast
);
6666 gimple_seq_add_stmt (this_stmt_list
, g
);
6668 x
= build_call_expr_internal_loc
6669 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
6670 TREE_TYPE (val
), 2, val
, simtlast
);
6671 new_var
= unshare_expr (new_var
);
6672 gimplify_assign (new_var
, x
, this_stmt_list
);
6673 new_var
= unshare_expr (new_var
);
6676 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6677 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
6679 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
6680 gimple_seq_add_seq (this_stmt_list
,
6681 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
6682 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
6684 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6685 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
6687 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
6688 gimple_seq_add_seq (this_stmt_list
,
6689 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
6690 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
6694 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6695 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
6696 && is_taskloop_ctx (ctx
))
6698 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
6700 if (is_global_var (ovar
))
6704 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
6705 if (omp_is_reference (var
))
6706 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6707 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
6708 gimplify_and_add (x
, this_stmt_list
);
6711 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
6715 c
= OMP_CLAUSE_CHAIN (c
);
6716 if (c
== NULL
&& !par_clauses
)
6718 /* If this was a workshare clause, see if it had been combined
6719 with its parallel. In that case, continue looking for the
6720 clauses also on the parallel statement itself. */
6721 if (is_parallel_ctx (ctx
))
6725 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6728 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6729 OMP_CLAUSE_LASTPRIVATE
);
6735 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
6736 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
6739 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6740 (which might be a placeholder). INNER is true if this is an inner
6741 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6742 join markers. Generate the before-loop forking sequence in
6743 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6744 general form of these sequences is
6746 GOACC_REDUCTION_SETUP
6748 GOACC_REDUCTION_INIT
6750 GOACC_REDUCTION_FINI
6752 GOACC_REDUCTION_TEARDOWN. */
6755 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
6756 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
6757 gimple_seq
*join_seq
, omp_context
*ctx
)
6759 gimple_seq before_fork
= NULL
;
6760 gimple_seq after_fork
= NULL
;
6761 gimple_seq before_join
= NULL
;
6762 gimple_seq after_join
= NULL
;
6763 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
6764 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
6765 unsigned offset
= 0;
6767 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6768 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
6770 tree orig
= OMP_CLAUSE_DECL (c
);
6771 tree var
= maybe_lookup_decl (orig
, ctx
);
6772 tree ref_to_res
= NULL_TREE
;
6773 tree incoming
, outgoing
, v1
, v2
, v3
;
6774 bool is_private
= false;
6776 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
6777 if (rcode
== MINUS_EXPR
)
6779 else if (rcode
== TRUTH_ANDIF_EXPR
)
6780 rcode
= BIT_AND_EXPR
;
6781 else if (rcode
== TRUTH_ORIF_EXPR
)
6782 rcode
= BIT_IOR_EXPR
;
6783 tree op
= build_int_cst (unsigned_type_node
, rcode
);
6788 incoming
= outgoing
= var
;
6792 /* See if an outer construct also reduces this variable. */
6793 omp_context
*outer
= ctx
;
6795 while (omp_context
*probe
= outer
->outer
)
6797 enum gimple_code type
= gimple_code (probe
->stmt
);
6802 case GIMPLE_OMP_FOR
:
6803 cls
= gimple_omp_for_clauses (probe
->stmt
);
6806 case GIMPLE_OMP_TARGET
:
6807 if ((gimple_omp_target_kind (probe
->stmt
)
6808 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
6809 && (gimple_omp_target_kind (probe
->stmt
)
6810 != GF_OMP_TARGET_KIND_OACC_SERIAL
))
6813 cls
= gimple_omp_target_clauses (probe
->stmt
);
6821 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
6822 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
6823 && orig
== OMP_CLAUSE_DECL (cls
))
6825 incoming
= outgoing
= lookup_decl (orig
, probe
);
6826 goto has_outer_reduction
;
6828 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
6829 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
6830 && orig
== OMP_CLAUSE_DECL (cls
))
6838 /* This is the outermost construct with this reduction,
6839 see if there's a mapping for it. */
6840 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
6841 && maybe_lookup_field (orig
, outer
) && !is_private
)
6843 ref_to_res
= build_receiver_ref (orig
, false, outer
);
6844 if (omp_is_reference (orig
))
6845 ref_to_res
= build_simple_mem_ref (ref_to_res
);
6847 tree type
= TREE_TYPE (var
);
6848 if (POINTER_TYPE_P (type
))
6849 type
= TREE_TYPE (type
);
6852 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
6856 /* Try to look at enclosing contexts for reduction var,
6857 use original if no mapping found. */
6859 omp_context
*c
= ctx
->outer
;
6862 t
= maybe_lookup_decl (orig
, c
);
6865 incoming
= outgoing
= (t
? t
: orig
);
6868 has_outer_reduction
:;
6872 ref_to_res
= integer_zero_node
;
6874 if (omp_is_reference (orig
))
6876 tree type
= TREE_TYPE (var
);
6877 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
6881 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
6882 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
6885 v1
= create_tmp_var (type
, id
);
6886 v2
= create_tmp_var (type
, id
);
6887 v3
= create_tmp_var (type
, id
);
6889 gimplify_assign (v1
, var
, fork_seq
);
6890 gimplify_assign (v2
, var
, fork_seq
);
6891 gimplify_assign (v3
, var
, fork_seq
);
6893 var
= build_simple_mem_ref (var
);
6894 v1
= build_simple_mem_ref (v1
);
6895 v2
= build_simple_mem_ref (v2
);
6896 v3
= build_simple_mem_ref (v3
);
6897 outgoing
= build_simple_mem_ref (outgoing
);
6899 if (!TREE_CONSTANT (incoming
))
6900 incoming
= build_simple_mem_ref (incoming
);
6905 /* Determine position in reduction buffer, which may be used
6906 by target. The parser has ensured that this is not a
6907 variable-sized type. */
6908 fixed_size_mode mode
6909 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
6910 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
6911 offset
= (offset
+ align
- 1) & ~(align
- 1);
6912 tree off
= build_int_cst (sizetype
, offset
);
6913 offset
+= GET_MODE_SIZE (mode
);
6917 init_code
= build_int_cst (integer_type_node
,
6918 IFN_GOACC_REDUCTION_INIT
);
6919 fini_code
= build_int_cst (integer_type_node
,
6920 IFN_GOACC_REDUCTION_FINI
);
6921 setup_code
= build_int_cst (integer_type_node
,
6922 IFN_GOACC_REDUCTION_SETUP
);
6923 teardown_code
= build_int_cst (integer_type_node
,
6924 IFN_GOACC_REDUCTION_TEARDOWN
);
6928 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6929 TREE_TYPE (var
), 6, setup_code
,
6930 unshare_expr (ref_to_res
),
6931 incoming
, level
, op
, off
);
6933 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6934 TREE_TYPE (var
), 6, init_code
,
6935 unshare_expr (ref_to_res
),
6936 v1
, level
, op
, off
);
6938 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6939 TREE_TYPE (var
), 6, fini_code
,
6940 unshare_expr (ref_to_res
),
6941 v2
, level
, op
, off
);
6943 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6944 TREE_TYPE (var
), 6, teardown_code
,
6945 ref_to_res
, v3
, level
, op
, off
);
6947 gimplify_assign (v1
, setup_call
, &before_fork
);
6948 gimplify_assign (v2
, init_call
, &after_fork
);
6949 gimplify_assign (v3
, fini_call
, &before_join
);
6950 gimplify_assign (outgoing
, teardown_call
, &after_join
);
6953 /* Now stitch things together. */
6954 gimple_seq_add_seq (fork_seq
, before_fork
);
6956 gimple_seq_add_stmt (fork_seq
, fork
);
6957 gimple_seq_add_seq (fork_seq
, after_fork
);
6959 gimple_seq_add_seq (join_seq
, before_join
);
6961 gimple_seq_add_stmt (join_seq
, join
);
6962 gimple_seq_add_seq (join_seq
, after_join
);
6965 /* Generate code to implement the REDUCTION clauses, append it
6966 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6967 that should be emitted also inside of the critical section,
6968 in that case clear *CLIST afterwards, otherwise leave it as is
6969 and let the caller emit it itself. */
6972 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
6973 gimple_seq
*clist
, omp_context
*ctx
)
6975 gimple_seq sub_seq
= NULL
;
6980 /* OpenACC loop reductions are handled elsewhere. */
6981 if (is_gimple_omp_oacc (ctx
->stmt
))
6984 /* SIMD reductions are handled in lower_rec_input_clauses. */
6985 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6986 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6989 /* inscan reductions are handled elsewhere. */
6990 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
6993 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6994 update in that case, otherwise use a lock. */
6995 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
6996 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6997 && !OMP_CLAUSE_REDUCTION_TASK (c
))
6999 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
7000 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7002 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7012 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7014 tree var
, ref
, new_var
, orig_var
;
7015 enum tree_code code
;
7016 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7018 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7019 || OMP_CLAUSE_REDUCTION_TASK (c
))
7022 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
7023 orig_var
= var
= OMP_CLAUSE_DECL (c
);
7024 if (TREE_CODE (var
) == MEM_REF
)
7026 var
= TREE_OPERAND (var
, 0);
7027 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7028 var
= TREE_OPERAND (var
, 0);
7029 if (TREE_CODE (var
) == ADDR_EXPR
)
7030 var
= TREE_OPERAND (var
, 0);
7033 /* If this is a pointer or referenced based array
7034 section, the var could be private in the outer
7035 context e.g. on orphaned loop construct. Pretend this
7036 is private variable's outer reference. */
7037 ccode
= OMP_CLAUSE_PRIVATE
;
7038 if (TREE_CODE (var
) == INDIRECT_REF
)
7039 var
= TREE_OPERAND (var
, 0);
7042 if (is_variable_sized (var
))
7044 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7045 var
= DECL_VALUE_EXPR (var
);
7046 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7047 var
= TREE_OPERAND (var
, 0);
7048 gcc_assert (DECL_P (var
));
7051 new_var
= lookup_decl (var
, ctx
);
7052 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
7053 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7054 ref
= build_outer_var_ref (var
, ctx
, ccode
);
7055 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
7057 /* reduction(-:var) sums up the partial results, so it acts
7058 identically to reduction(+:var). */
7059 if (code
== MINUS_EXPR
)
7064 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
7066 addr
= save_expr (addr
);
7067 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
7068 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
7069 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
7070 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
7071 gimplify_and_add (x
, stmt_seqp
);
7074 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7076 tree d
= OMP_CLAUSE_DECL (c
);
7077 tree type
= TREE_TYPE (d
);
7078 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7079 tree i
= create_tmp_var (TREE_TYPE (v
));
7080 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7081 tree bias
= TREE_OPERAND (d
, 1);
7082 d
= TREE_OPERAND (d
, 0);
7083 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
7085 tree b
= TREE_OPERAND (d
, 1);
7086 b
= maybe_lookup_decl (b
, ctx
);
7089 b
= TREE_OPERAND (d
, 1);
7090 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7092 if (integer_zerop (bias
))
7096 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7097 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7098 TREE_TYPE (b
), b
, bias
);
7100 d
= TREE_OPERAND (d
, 0);
7102 /* For ref build_outer_var_ref already performs this, so
7103 only new_var needs a dereference. */
7104 if (TREE_CODE (d
) == INDIRECT_REF
)
7106 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7107 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
7109 else if (TREE_CODE (d
) == ADDR_EXPR
)
7111 if (orig_var
== var
)
7113 new_var
= build_fold_addr_expr (new_var
);
7114 ref
= build_fold_addr_expr (ref
);
7119 gcc_assert (orig_var
== var
);
7120 if (omp_is_reference (var
))
7121 ref
= build_fold_addr_expr (ref
);
7125 tree t
= maybe_lookup_decl (v
, ctx
);
7129 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7130 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7132 if (!integer_zerop (bias
))
7134 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7135 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7136 TREE_TYPE (new_var
), new_var
,
7137 unshare_expr (bias
));
7138 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7139 TREE_TYPE (ref
), ref
, bias
);
7141 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7142 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7143 tree m
= create_tmp_var (ptype
);
7144 gimplify_assign (m
, new_var
, stmt_seqp
);
7146 m
= create_tmp_var (ptype
);
7147 gimplify_assign (m
, ref
, stmt_seqp
);
7149 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7150 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7151 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7152 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7153 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7154 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7155 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7157 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7158 tree decl_placeholder
7159 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7160 SET_DECL_VALUE_EXPR (placeholder
, out
);
7161 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7162 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7163 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7164 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7165 gimple_seq_add_seq (&sub_seq
,
7166 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7167 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7168 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7169 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7173 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
7174 out
= unshare_expr (out
);
7175 gimplify_assign (out
, x
, &sub_seq
);
7177 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7178 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7179 gimple_seq_add_stmt (&sub_seq
, g
);
7180 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7181 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7182 gimple_seq_add_stmt (&sub_seq
, g
);
7183 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7184 build_int_cst (TREE_TYPE (i
), 1));
7185 gimple_seq_add_stmt (&sub_seq
, g
);
7186 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7187 gimple_seq_add_stmt (&sub_seq
, g
);
7188 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7190 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7192 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7194 if (omp_is_reference (var
)
7195 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7197 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7198 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7199 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7200 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7201 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7202 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7203 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7207 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
7208 ref
= build_outer_var_ref (var
, ctx
);
7209 gimplify_assign (ref
, x
, &sub_seq
);
7213 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7215 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7217 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7221 gimple_seq_add_seq (stmt_seqp
, *clist
);
7225 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7227 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7231 /* Generate code to implement the COPYPRIVATE clauses. */
7234 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
7239 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7241 tree var
, new_var
, ref
, x
;
7243 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7245 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
7248 var
= OMP_CLAUSE_DECL (c
);
7249 by_ref
= use_pointer_for_field (var
, NULL
);
7251 ref
= build_sender_ref (var
, ctx
);
7252 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
7255 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
7256 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
7258 gimplify_assign (ref
, x
, slist
);
7260 ref
= build_receiver_ref (var
, false, ctx
);
7263 ref
= fold_convert_loc (clause_loc
,
7264 build_pointer_type (TREE_TYPE (new_var
)),
7266 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
7268 if (omp_is_reference (var
))
7270 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
7271 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
7272 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7274 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
7275 gimplify_and_add (x
, rlist
);
7280 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7281 and REDUCTION from the sender (aka parent) side. */
7284 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
7288 int ignored_looptemp
= 0;
7289 bool is_taskloop
= false;
7291 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7292 by GOMP_taskloop. */
7293 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
7295 ignored_looptemp
= 2;
7299 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7301 tree val
, ref
, x
, var
;
7302 bool by_ref
, do_in
= false, do_out
= false;
7303 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7305 switch (OMP_CLAUSE_CODE (c
))
7307 case OMP_CLAUSE_PRIVATE
:
7308 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7311 case OMP_CLAUSE_FIRSTPRIVATE
:
7312 case OMP_CLAUSE_COPYIN
:
7313 case OMP_CLAUSE_LASTPRIVATE
:
7314 case OMP_CLAUSE_IN_REDUCTION
:
7315 case OMP_CLAUSE__REDUCTEMP_
:
7317 case OMP_CLAUSE_REDUCTION
:
7318 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
7321 case OMP_CLAUSE_SHARED
:
7322 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7325 case OMP_CLAUSE__LOOPTEMP_
:
7326 if (ignored_looptemp
)
7336 val
= OMP_CLAUSE_DECL (c
);
7337 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7338 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
7339 && TREE_CODE (val
) == MEM_REF
)
7341 val
= TREE_OPERAND (val
, 0);
7342 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
7343 val
= TREE_OPERAND (val
, 0);
7344 if (TREE_CODE (val
) == INDIRECT_REF
7345 || TREE_CODE (val
) == ADDR_EXPR
)
7346 val
= TREE_OPERAND (val
, 0);
7347 if (is_variable_sized (val
))
7351 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7352 outer taskloop region. */
7353 omp_context
*ctx_for_o
= ctx
;
7355 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
7356 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7357 ctx_for_o
= ctx
->outer
;
7359 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
7361 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
7362 && is_global_var (var
)
7363 && (val
== OMP_CLAUSE_DECL (c
)
7364 || !is_task_ctx (ctx
)
7365 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
7366 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
7367 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
7368 != POINTER_TYPE
)))))
7371 t
= omp_member_access_dummy_var (var
);
7374 var
= DECL_VALUE_EXPR (var
);
7375 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
7377 var
= unshare_and_remap (var
, t
, o
);
7379 var
= unshare_expr (var
);
7382 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
7384 /* Handle taskloop firstprivate/lastprivate, where the
7385 lastprivate on GIMPLE_OMP_TASK is represented as
7386 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7387 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
7388 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
7389 if (use_pointer_for_field (val
, ctx
))
7390 var
= build_fold_addr_expr (var
);
7391 gimplify_assign (x
, var
, ilist
);
7392 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
7396 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7397 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
7398 || val
== OMP_CLAUSE_DECL (c
))
7399 && is_variable_sized (val
))
7401 by_ref
= use_pointer_for_field (val
, NULL
);
7403 switch (OMP_CLAUSE_CODE (c
))
7405 case OMP_CLAUSE_FIRSTPRIVATE
:
7406 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
7408 && is_task_ctx (ctx
))
7409 TREE_NO_WARNING (var
) = 1;
7413 case OMP_CLAUSE_PRIVATE
:
7414 case OMP_CLAUSE_COPYIN
:
7415 case OMP_CLAUSE__LOOPTEMP_
:
7416 case OMP_CLAUSE__REDUCTEMP_
:
7420 case OMP_CLAUSE_LASTPRIVATE
:
7421 if (by_ref
|| omp_is_reference (val
))
7423 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
7430 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
7435 case OMP_CLAUSE_REDUCTION
:
7436 case OMP_CLAUSE_IN_REDUCTION
:
7438 if (val
== OMP_CLAUSE_DECL (c
))
7440 if (is_task_ctx (ctx
))
7441 by_ref
= use_pointer_for_field (val
, ctx
);
7443 do_out
= !(by_ref
|| omp_is_reference (val
));
7446 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
7455 ref
= build_sender_ref (val
, ctx
);
7456 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
7457 gimplify_assign (ref
, x
, ilist
);
7458 if (is_task_ctx (ctx
))
7459 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
7464 ref
= build_sender_ref (val
, ctx
);
7465 gimplify_assign (var
, ref
, olist
);
7470 /* Generate code to implement SHARED from the sender (aka parent)
7471 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7472 list things that got automatically shared. */
7475 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
7477 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
7479 if (ctx
->record_type
== NULL
)
7482 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
7483 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
7485 ovar
= DECL_ABSTRACT_ORIGIN (f
);
7486 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
7489 nvar
= maybe_lookup_decl (ovar
, ctx
);
7490 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
7493 /* If CTX is a nested parallel directive. Find the immediately
7494 enclosing parallel or workshare construct that contains a
7495 mapping for OVAR. */
7496 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7498 t
= omp_member_access_dummy_var (var
);
7501 var
= DECL_VALUE_EXPR (var
);
7502 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
7504 var
= unshare_and_remap (var
, t
, o
);
7506 var
= unshare_expr (var
);
7509 if (use_pointer_for_field (ovar
, ctx
))
7511 x
= build_sender_ref (ovar
, ctx
);
7512 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
7513 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
7515 gcc_assert (is_parallel_ctx (ctx
)
7516 && DECL_ARTIFICIAL (ovar
));
7517 /* _condtemp_ clause. */
7518 var
= build_constructor (TREE_TYPE (x
), NULL
);
7521 var
= build_fold_addr_expr (var
);
7522 gimplify_assign (x
, var
, ilist
);
7526 x
= build_sender_ref (ovar
, ctx
);
7527 gimplify_assign (x
, var
, ilist
);
7529 if (!TREE_READONLY (var
)
7530 /* We don't need to receive a new reference to a result
7531 or parm decl. In fact we may not store to it as we will
7532 invalidate any pending RSO and generate wrong gimple
7534 && !((TREE_CODE (var
) == RESULT_DECL
7535 || TREE_CODE (var
) == PARM_DECL
)
7536 && DECL_BY_REFERENCE (var
)))
7538 x
= build_sender_ref (ovar
, ctx
);
7539 gimplify_assign (var
, x
, olist
);
7545 /* Emit an OpenACC head marker call, encapulating the partitioning and
7546 other information that must be processed by the target compiler.
7547 Return the maximum number of dimensions the associated loop might
7548 be partitioned over. */
7551 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
7552 gimple_seq
*seq
, omp_context
*ctx
)
7554 unsigned levels
= 0;
7556 tree gang_static
= NULL_TREE
;
7557 auto_vec
<tree
, 5> args
;
7559 args
.quick_push (build_int_cst
7560 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
7561 args
.quick_push (ddvar
);
7562 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7564 switch (OMP_CLAUSE_CODE (c
))
7566 case OMP_CLAUSE_GANG
:
7567 tag
|= OLF_DIM_GANG
;
7568 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
7569 /* static:* is represented by -1, and we can ignore it, as
7570 scheduling is always static. */
7571 if (gang_static
&& integer_minus_onep (gang_static
))
7572 gang_static
= NULL_TREE
;
7576 case OMP_CLAUSE_WORKER
:
7577 tag
|= OLF_DIM_WORKER
;
7581 case OMP_CLAUSE_VECTOR
:
7582 tag
|= OLF_DIM_VECTOR
;
7586 case OMP_CLAUSE_SEQ
:
7590 case OMP_CLAUSE_AUTO
:
7594 case OMP_CLAUSE_INDEPENDENT
:
7595 tag
|= OLF_INDEPENDENT
;
7598 case OMP_CLAUSE_TILE
:
7609 if (DECL_P (gang_static
))
7610 gang_static
= build_outer_var_ref (gang_static
, ctx
);
7611 tag
|= OLF_GANG_STATIC
;
7614 /* In a parallel region, loops are implicitly INDEPENDENT. */
7615 omp_context
*tgt
= enclosing_target_ctx (ctx
);
7616 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
7617 tag
|= OLF_INDEPENDENT
;
7620 /* Tiling could use all 3 levels. */
7624 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7625 Ensure at least one level, or 2 for possible auto
7627 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
7628 << OLF_DIM_BASE
) | OLF_SEQ
));
7630 if (levels
< 1u + maybe_auto
)
7631 levels
= 1u + maybe_auto
;
7634 args
.quick_push (build_int_cst (integer_type_node
, levels
));
7635 args
.quick_push (build_int_cst (integer_type_node
, tag
));
7637 args
.quick_push (gang_static
);
7639 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
7640 gimple_set_location (call
, loc
);
7641 gimple_set_lhs (call
, ddvar
);
7642 gimple_seq_add_stmt (seq
, call
);
7647 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7648 partitioning level of the enclosed region. */
7651 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
7652 tree tofollow
, gimple_seq
*seq
)
7654 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
7655 : IFN_UNIQUE_OACC_TAIL_MARK
);
7656 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
7657 int nargs
= 2 + (tofollow
!= NULL_TREE
);
7658 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
7659 marker
, ddvar
, tofollow
);
7660 gimple_set_location (call
, loc
);
7661 gimple_set_lhs (call
, ddvar
);
7662 gimple_seq_add_stmt (seq
, call
);
7665 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7666 the loop clauses, from which we extract reductions. Initialize
7670 lower_oacc_head_tail (location_t loc
, tree clauses
,
7671 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
7674 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
7675 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
7677 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
7678 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
7679 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
7682 for (unsigned done
= 1; count
; count
--, done
++)
7684 gimple_seq fork_seq
= NULL
;
7685 gimple_seq join_seq
= NULL
;
7687 tree place
= build_int_cst (integer_type_node
, -1);
7688 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7689 fork_kind
, ddvar
, place
);
7690 gimple_set_location (fork
, loc
);
7691 gimple_set_lhs (fork
, ddvar
);
7693 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7694 join_kind
, ddvar
, place
);
7695 gimple_set_location (join
, loc
);
7696 gimple_set_lhs (join
, ddvar
);
7698 /* Mark the beginning of this level sequence. */
7700 lower_oacc_loop_marker (loc
, ddvar
, true,
7701 build_int_cst (integer_type_node
, count
),
7703 lower_oacc_loop_marker (loc
, ddvar
, false,
7704 build_int_cst (integer_type_node
, done
),
7707 lower_oacc_reductions (loc
, clauses
, place
, inner
,
7708 fork
, join
, &fork_seq
, &join_seq
, ctx
);
7710 /* Append this level to head. */
7711 gimple_seq_add_seq (head
, fork_seq
);
7712 /* Prepend it to tail. */
7713 gimple_seq_add_seq (&join_seq
, *tail
);
7719 /* Mark the end of the sequence. */
7720 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
7721 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
7724 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7725 catch handler and return it. This prevents programs from violating the
7726 structured block semantics with throws. */
7729 maybe_catch_exception (gimple_seq body
)
7734 if (!flag_exceptions
)
7737 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
7738 decl
= lang_hooks
.eh_protect_cleanup_actions ();
7740 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
7742 g
= gimple_build_eh_must_not_throw (decl
);
7743 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
7746 return gimple_seq_alloc_with_stmt (g
);
7750 /* Routines to lower OMP directives into OMP-GIMPLE. */
7752 /* If ctx is a worksharing context inside of a cancellable parallel
7753 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7754 and conditional branch to parallel's cancel_label to handle
7755 cancellation in the implicit barrier. */
7758 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
7761 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
7762 if (gimple_omp_return_nowait_p (omp_return
))
7764 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
7765 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
7766 && outer
->cancellable
)
7768 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
7769 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
7770 tree lhs
= create_tmp_var (c_bool_type
);
7771 gimple_omp_return_set_lhs (omp_return
, lhs
);
7772 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
7773 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
7774 fold_convert (c_bool_type
,
7775 boolean_false_node
),
7776 outer
->cancel_label
, fallthru_label
);
7777 gimple_seq_add_stmt (body
, g
);
7778 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
7780 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
7784 /* Find the first task_reduction or reduction clause or return NULL
7785 if there are none. */
7788 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
7789 enum omp_clause_code ccode
)
7793 clauses
= omp_find_clause (clauses
, ccode
);
7794 if (clauses
== NULL_TREE
)
7796 if (ccode
!= OMP_CLAUSE_REDUCTION
7797 || code
== OMP_TASKLOOP
7798 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
7800 clauses
= OMP_CLAUSE_CHAIN (clauses
);
7804 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
7805 gimple_seq
*, gimple_seq
*);
7807 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7808 CTX is the enclosing OMP context for the current statement. */
7811 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7813 tree block
, control
;
7814 gimple_stmt_iterator tgsi
;
7815 gomp_sections
*stmt
;
7817 gbind
*new_stmt
, *bind
;
7818 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
7820 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
7822 push_gimplify_context ();
7828 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
7829 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
7830 tree rtmp
= NULL_TREE
;
7833 tree type
= build_pointer_type (pointer_sized_int_node
);
7834 tree temp
= create_tmp_var (type
);
7835 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
7836 OMP_CLAUSE_DECL (c
) = temp
;
7837 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
7838 gimple_omp_sections_set_clauses (stmt
, c
);
7839 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
7840 gimple_omp_sections_clauses (stmt
),
7841 &ilist
, &tred_dlist
);
7843 rtmp
= make_ssa_name (type
);
7844 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
7847 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
7848 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
7850 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
7851 &ilist
, &dlist
, ctx
, NULL
);
7853 control
= create_tmp_var (unsigned_type_node
, ".section");
7854 gimple_omp_sections_set_control (stmt
, control
);
7856 new_body
= gimple_omp_body (stmt
);
7857 gimple_omp_set_body (stmt
, NULL
);
7858 tgsi
= gsi_start (new_body
);
7859 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
7864 sec_start
= gsi_stmt (tgsi
);
7865 sctx
= maybe_lookup_ctx (sec_start
);
7868 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
7869 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
7870 GSI_CONTINUE_LINKING
);
7871 gimple_omp_set_body (sec_start
, NULL
);
7873 if (gsi_one_before_end_p (tgsi
))
7875 gimple_seq l
= NULL
;
7876 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
7877 &ilist
, &l
, &clist
, ctx
);
7878 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
7879 gimple_omp_section_set_last (sec_start
);
7882 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
7883 GSI_CONTINUE_LINKING
);
7886 block
= make_node (BLOCK
);
7887 bind
= gimple_build_bind (NULL
, new_body
, block
);
7890 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
7894 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
7895 gcall
*g
= gimple_build_call (fndecl
, 0);
7896 gimple_seq_add_stmt (&olist
, g
);
7897 gimple_seq_add_seq (&olist
, clist
);
7898 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
7899 g
= gimple_build_call (fndecl
, 0);
7900 gimple_seq_add_stmt (&olist
, g
);
7903 block
= make_node (BLOCK
);
7904 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
7905 gsi_replace (gsi_p
, new_stmt
, true);
7907 pop_gimplify_context (new_stmt
);
7908 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
7909 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
7910 if (BLOCK_VARS (block
))
7911 TREE_USED (block
) = 1;
7914 gimple_seq_add_seq (&new_body
, ilist
);
7915 gimple_seq_add_stmt (&new_body
, stmt
);
7916 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
7917 gimple_seq_add_stmt (&new_body
, bind
);
7919 t
= gimple_build_omp_continue (control
, control
);
7920 gimple_seq_add_stmt (&new_body
, t
);
7922 gimple_seq_add_seq (&new_body
, olist
);
7923 if (ctx
->cancellable
)
7924 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7925 gimple_seq_add_seq (&new_body
, dlist
);
7927 new_body
= maybe_catch_exception (new_body
);
7929 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
7930 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7931 t
= gimple_build_omp_return (nowait
);
7932 gimple_seq_add_stmt (&new_body
, t
);
7933 gimple_seq_add_seq (&new_body
, tred_dlist
);
7934 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
7937 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
7939 gimple_bind_set_body (new_stmt
, new_body
);
7943 /* A subroutine of lower_omp_single. Expand the simple form of
7944 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7946 if (GOMP_single_start ())
7948 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7950 FIXME. It may be better to delay expanding the logic of this until
7951 pass_expand_omp. The expanded logic may make the job more difficult
7952 to a synchronization analysis pass. */
7955 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
7957 location_t loc
= gimple_location (single_stmt
);
7958 tree tlabel
= create_artificial_label (loc
);
7959 tree flabel
= create_artificial_label (loc
);
7960 gimple
*call
, *cond
;
7963 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
7964 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
7965 call
= gimple_build_call (decl
, 0);
7966 gimple_call_set_lhs (call
, lhs
);
7967 gimple_seq_add_stmt (pre_p
, call
);
7969 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
7970 fold_convert_loc (loc
, TREE_TYPE (lhs
),
7973 gimple_seq_add_stmt (pre_p
, cond
);
7974 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
7975 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7976 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
7980 /* A subroutine of lower_omp_single. Expand the simple form of
7981 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7983 #pragma omp single copyprivate (a, b, c)
7985 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7988 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7994 GOMP_single_copy_end (©out);
8005 FIXME. It may be better to delay expanding the logic of this until
8006 pass_expand_omp. The expanded logic may make the job more difficult
8007 to a synchronization analysis pass. */
8010 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
8013 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
8014 gimple_seq copyin_seq
;
8015 location_t loc
= gimple_location (single_stmt
);
8017 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
8019 ptr_type
= build_pointer_type (ctx
->record_type
);
8020 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
8022 l0
= create_artificial_label (loc
);
8023 l1
= create_artificial_label (loc
);
8024 l2
= create_artificial_label (loc
);
8026 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
8027 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
8028 t
= fold_convert_loc (loc
, ptr_type
, t
);
8029 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
8031 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
8032 build_int_cst (ptr_type
, 0));
8033 t
= build3 (COND_EXPR
, void_type_node
, t
,
8034 build_and_jump (&l0
), build_and_jump (&l1
));
8035 gimplify_and_add (t
, pre_p
);
8037 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
8039 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8042 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
8045 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8046 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
8047 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
8048 gimplify_and_add (t
, pre_p
);
8050 t
= build_and_jump (&l2
);
8051 gimplify_and_add (t
, pre_p
);
8053 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
8055 gimple_seq_add_seq (pre_p
, copyin_seq
);
8057 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
8061 /* Expand code for an OpenMP single directive. */
8064 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8067 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
8069 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8071 push_gimplify_context ();
8073 block
= make_node (BLOCK
);
8074 bind
= gimple_build_bind (NULL
, NULL
, block
);
8075 gsi_replace (gsi_p
, bind
, true);
8078 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
8079 &bind_body
, &dlist
, ctx
, NULL
);
8080 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
8082 gimple_seq_add_stmt (&bind_body
, single_stmt
);
8084 if (ctx
->record_type
)
8085 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
8087 lower_omp_single_simple (single_stmt
, &bind_body
);
8089 gimple_omp_set_body (single_stmt
, NULL
);
8091 gimple_seq_add_seq (&bind_body
, dlist
);
8093 bind_body
= maybe_catch_exception (bind_body
);
8095 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8096 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8097 gimple
*g
= gimple_build_omp_return (nowait
);
8098 gimple_seq_add_stmt (&bind_body_tail
, g
);
8099 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8100 if (ctx
->record_type
)
8102 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8103 tree clobber
= build_clobber (ctx
->record_type
);
8104 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8105 clobber
), GSI_SAME_STMT
);
8107 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8108 gimple_bind_set_body (bind
, bind_body
);
8110 pop_gimplify_context (bind
);
8112 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8113 BLOCK_VARS (block
) = ctx
->block_vars
;
8114 if (BLOCK_VARS (block
))
8115 TREE_USED (block
) = 1;
8119 /* Expand code for an OpenMP master directive. */
8122 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8124 tree block
, lab
= NULL
, x
, bfn_decl
;
8125 gimple
*stmt
= gsi_stmt (*gsi_p
);
8127 location_t loc
= gimple_location (stmt
);
8130 push_gimplify_context ();
8132 block
= make_node (BLOCK
);
8133 bind
= gimple_build_bind (NULL
, NULL
, block
);
8134 gsi_replace (gsi_p
, bind
, true);
8135 gimple_bind_add_stmt (bind
, stmt
);
8137 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8138 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
8139 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
8140 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
8142 gimplify_and_add (x
, &tseq
);
8143 gimple_bind_add_seq (bind
, tseq
);
8145 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8146 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8147 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8148 gimple_omp_set_body (stmt
, NULL
);
8150 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
8152 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8154 pop_gimplify_context (bind
);
8156 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8157 BLOCK_VARS (block
) = ctx
->block_vars
;
8160 /* Helper function for lower_omp_task_reductions. For a specific PASS
8161 find out the current clause it should be processed, or return false
8162 if all have been processed already. */
8165 omp_task_reduction_iterate (int pass
, enum tree_code code
,
8166 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
8167 tree
*type
, tree
*next
)
8169 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
8171 if (ccode
== OMP_CLAUSE_REDUCTION
8172 && code
!= OMP_TASKLOOP
8173 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
8175 *decl
= OMP_CLAUSE_DECL (*c
);
8176 *type
= TREE_TYPE (*decl
);
8177 if (TREE_CODE (*decl
) == MEM_REF
)
8184 if (omp_is_reference (*decl
))
8185 *type
= TREE_TYPE (*type
);
8186 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
8189 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
8198 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8199 OMP_TASKGROUP only with task modifier). Register mapping of those in
8200 START sequence and reducing them and unregister them in the END sequence. */
8203 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
8204 gimple_seq
*start
, gimple_seq
*end
)
8206 enum omp_clause_code ccode
8207 = (code
== OMP_TASKGROUP
8208 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
8209 tree cancellable
= NULL_TREE
;
8210 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
8211 if (clauses
== NULL_TREE
)
8213 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8215 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8216 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8217 && outer
->cancellable
)
8219 cancellable
= error_mark_node
;
8222 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
8225 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8226 tree
*last
= &TYPE_FIELDS (record_type
);
8230 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8232 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8235 DECL_CHAIN (field
) = ifield
;
8236 last
= &DECL_CHAIN (ifield
);
8237 DECL_CONTEXT (field
) = record_type
;
8238 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8239 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8240 DECL_CONTEXT (ifield
) = record_type
;
8241 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
8242 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
8244 for (int pass
= 0; pass
< 2; pass
++)
8246 tree decl
, type
, next
;
8247 for (tree c
= clauses
;
8248 omp_task_reduction_iterate (pass
, code
, ccode
,
8249 &c
, &decl
, &type
, &next
); c
= next
)
8252 tree new_type
= type
;
8254 new_type
= remap_type (type
, &ctx
->outer
->cb
);
8256 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
8257 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
8259 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
8261 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
8262 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
8263 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
8266 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
8267 DECL_CONTEXT (field
) = record_type
;
8268 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8269 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8271 last
= &DECL_CHAIN (field
);
8273 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
8275 DECL_CONTEXT (bfield
) = record_type
;
8276 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
8277 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
8279 last
= &DECL_CHAIN (bfield
);
8283 layout_type (record_type
);
8285 /* Build up an array which registers with the runtime all the reductions
8286 and deregisters them at the end. Format documented in libgomp/task.c. */
8287 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
8288 tree avar
= create_tmp_var_raw (atype
);
8289 gimple_add_tmp_var (avar
);
8290 TREE_ADDRESSABLE (avar
) = 1;
8291 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
8292 NULL_TREE
, NULL_TREE
);
8293 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
8294 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8295 gimple_seq seq
= NULL
;
8296 tree sz
= fold_convert (pointer_sized_int_node
,
8297 TYPE_SIZE_UNIT (record_type
));
8299 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
8300 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
8301 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
8302 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
8303 ctx
->task_reductions
.create (1 + cnt
);
8304 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
8305 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
8307 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
8308 gimple_seq_add_seq (start
, seq
);
8309 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
8310 NULL_TREE
, NULL_TREE
);
8311 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
8312 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8313 NULL_TREE
, NULL_TREE
);
8314 t
= build_int_cst (pointer_sized_int_node
,
8315 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
8316 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8317 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
8318 NULL_TREE
, NULL_TREE
);
8319 t
= build_int_cst (pointer_sized_int_node
, -1);
8320 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8321 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
8322 NULL_TREE
, NULL_TREE
);
8323 t
= build_int_cst (pointer_sized_int_node
, 0);
8324 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8326 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8327 and for each task reduction checks a bool right after the private variable
8328 within that thread's chunk; if the bool is clear, it hasn't been
8329 initialized and thus isn't going to be reduced nor destructed, otherwise
8330 reduce and destruct it. */
8331 tree idx
= create_tmp_var (size_type_node
);
8332 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
8333 tree num_thr_sz
= create_tmp_var (size_type_node
);
8334 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
8335 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
8336 tree lab3
= NULL_TREE
;
8338 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8340 /* For worksharing constructs, only perform it in the master thread,
8341 with the exception of cancelled implicit barriers - then only handle
8342 the current thread. */
8343 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8344 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8345 tree thr_num
= create_tmp_var (integer_type_node
);
8346 g
= gimple_build_call (t
, 0);
8347 gimple_call_set_lhs (g
, thr_num
);
8348 gimple_seq_add_stmt (end
, g
);
8352 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8353 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8354 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8355 if (code
== OMP_FOR
)
8356 c
= gimple_omp_for_clauses (ctx
->stmt
);
8357 else /* if (code == OMP_SECTIONS) */
8358 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8359 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
8361 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
8363 gimple_seq_add_stmt (end
, g
);
8364 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8365 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
8366 gimple_seq_add_stmt (end
, g
);
8367 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
8368 build_one_cst (TREE_TYPE (idx
)));
8369 gimple_seq_add_stmt (end
, g
);
8370 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
8371 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8373 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
8374 gimple_seq_add_stmt (end
, g
);
8375 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8377 if (code
!= OMP_PARALLEL
)
8379 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
8380 tree num_thr
= create_tmp_var (integer_type_node
);
8381 g
= gimple_build_call (t
, 0);
8382 gimple_call_set_lhs (g
, num_thr
);
8383 gimple_seq_add_stmt (end
, g
);
8384 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
8385 gimple_seq_add_stmt (end
, g
);
8387 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8391 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
8392 OMP_CLAUSE__REDUCTEMP_
);
8393 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
8394 t
= fold_convert (size_type_node
, t
);
8395 gimplify_assign (num_thr_sz
, t
, end
);
8397 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8398 NULL_TREE
, NULL_TREE
);
8399 tree data
= create_tmp_var (pointer_sized_int_node
);
8400 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
8401 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
8403 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
8404 ptr
= create_tmp_var (build_pointer_type (record_type
));
8406 ptr
= create_tmp_var (ptr_type_node
);
8407 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
8409 tree field
= TYPE_FIELDS (record_type
);
8412 field
= DECL_CHAIN (DECL_CHAIN (field
));
8413 for (int pass
= 0; pass
< 2; pass
++)
8415 tree decl
, type
, next
;
8416 for (tree c
= clauses
;
8417 omp_task_reduction_iterate (pass
, code
, ccode
,
8418 &c
, &decl
, &type
, &next
); c
= next
)
8420 tree var
= decl
, ref
;
8421 if (TREE_CODE (decl
) == MEM_REF
)
8423 var
= TREE_OPERAND (var
, 0);
8424 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
8425 var
= TREE_OPERAND (var
, 0);
8427 if (TREE_CODE (var
) == ADDR_EXPR
)
8428 var
= TREE_OPERAND (var
, 0);
8429 else if (TREE_CODE (var
) == INDIRECT_REF
)
8430 var
= TREE_OPERAND (var
, 0);
8431 tree orig_var
= var
;
8432 if (is_variable_sized (var
))
8434 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
8435 var
= DECL_VALUE_EXPR (var
);
8436 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
8437 var
= TREE_OPERAND (var
, 0);
8438 gcc_assert (DECL_P (var
));
8440 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8441 if (orig_var
!= var
)
8442 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
8443 else if (TREE_CODE (v
) == ADDR_EXPR
)
8444 t
= build_fold_addr_expr (t
);
8445 else if (TREE_CODE (v
) == INDIRECT_REF
)
8446 t
= build_fold_indirect_ref (t
);
8447 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
8449 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
8450 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
8451 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
8453 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
8454 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
8455 fold_convert (size_type_node
,
8456 TREE_OPERAND (decl
, 1)));
8460 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8461 if (!omp_is_reference (decl
))
8462 t
= build_fold_addr_expr (t
);
8464 t
= fold_convert (pointer_sized_int_node
, t
);
8466 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8467 gimple_seq_add_seq (start
, seq
);
8468 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8469 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8470 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8471 t
= unshare_expr (byte_position (field
));
8472 t
= fold_convert (pointer_sized_int_node
, t
);
8473 ctx
->task_reduction_map
->put (c
, cnt
);
8474 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
8477 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8478 gimple_seq_add_seq (start
, seq
);
8479 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8480 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
8481 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8483 tree bfield
= DECL_CHAIN (field
);
8485 if (code
== OMP_PARALLEL
|| code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8486 /* In parallel or worksharing all threads unconditionally
8487 initialize all their task reduction private variables. */
8488 cond
= boolean_true_node
;
8489 else if (TREE_TYPE (ptr
) == ptr_type_node
)
8491 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8492 unshare_expr (byte_position (bfield
)));
8494 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
8495 gimple_seq_add_seq (end
, seq
);
8496 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
8497 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
8498 build_int_cst (pbool
, 0));
8501 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
8502 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
8503 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8504 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8505 tree condv
= create_tmp_var (boolean_type_node
);
8506 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
8507 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
8509 gimple_seq_add_stmt (end
, g
);
8510 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8511 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
8513 /* If this reduction doesn't need destruction and parallel
8514 has been cancelled, there is nothing to do for this
8515 reduction, so jump around the merge operation. */
8516 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8517 g
= gimple_build_cond (NE_EXPR
, cancellable
,
8518 build_zero_cst (TREE_TYPE (cancellable
)),
8520 gimple_seq_add_stmt (end
, g
);
8521 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8525 if (TREE_TYPE (ptr
) == ptr_type_node
)
8527 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8528 unshare_expr (byte_position (field
)));
8530 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
8531 gimple_seq_add_seq (end
, seq
);
8532 tree pbool
= build_pointer_type (TREE_TYPE (field
));
8533 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
8534 build_int_cst (pbool
, 0));
8537 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
8538 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
8540 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
8541 if (TREE_CODE (decl
) != MEM_REF
&& omp_is_reference (decl
))
8542 ref
= build_simple_mem_ref (ref
);
8543 /* reduction(-:var) sums up the partial results, so it acts
8544 identically to reduction(+:var). */
8545 if (rcode
== MINUS_EXPR
)
8547 if (TREE_CODE (decl
) == MEM_REF
)
8549 tree type
= TREE_TYPE (new_var
);
8550 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8551 tree i
= create_tmp_var (TREE_TYPE (v
));
8552 tree ptype
= build_pointer_type (TREE_TYPE (type
));
8555 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
8556 tree vv
= create_tmp_var (TREE_TYPE (v
));
8557 gimplify_assign (vv
, v
, start
);
8560 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8561 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8562 new_var
= build_fold_addr_expr (new_var
);
8563 new_var
= fold_convert (ptype
, new_var
);
8564 ref
= fold_convert (ptype
, ref
);
8565 tree m
= create_tmp_var (ptype
);
8566 gimplify_assign (m
, new_var
, end
);
8568 m
= create_tmp_var (ptype
);
8569 gimplify_assign (m
, ref
, end
);
8571 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
8572 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
8573 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
8574 gimple_seq_add_stmt (end
, gimple_build_label (body
));
8575 tree priv
= build_simple_mem_ref (new_var
);
8576 tree out
= build_simple_mem_ref (ref
);
8577 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8579 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8580 tree decl_placeholder
8581 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
8582 tree lab6
= NULL_TREE
;
8585 /* If this reduction needs destruction and parallel
8586 has been cancelled, jump around the merge operation
8587 to the destruction. */
8588 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8589 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8590 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8591 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8593 gimple_seq_add_stmt (end
, g
);
8594 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8596 SET_DECL_VALUE_EXPR (placeholder
, out
);
8597 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8598 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
8599 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
8600 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8601 gimple_seq_add_seq (end
,
8602 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8603 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8604 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8606 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8607 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
8610 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8611 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
8614 gimple_seq tseq
= NULL
;
8615 gimplify_stmt (&x
, &tseq
);
8616 gimple_seq_add_seq (end
, tseq
);
8621 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
8622 out
= unshare_expr (out
);
8623 gimplify_assign (out
, x
, end
);
8626 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
8627 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8628 gimple_seq_add_stmt (end
, g
);
8629 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
8630 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8631 gimple_seq_add_stmt (end
, g
);
8632 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
8633 build_int_cst (TREE_TYPE (i
), 1));
8634 gimple_seq_add_stmt (end
, g
);
8635 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
8636 gimple_seq_add_stmt (end
, g
);
8637 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
8639 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8641 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8642 tree oldv
= NULL_TREE
;
8643 tree lab6
= NULL_TREE
;
8646 /* If this reduction needs destruction and parallel
8647 has been cancelled, jump around the merge operation
8648 to the destruction. */
8649 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8650 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8651 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8652 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8654 gimple_seq_add_stmt (end
, g
);
8655 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8657 if (omp_is_reference (decl
)
8658 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
8660 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8661 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8662 tree refv
= create_tmp_var (TREE_TYPE (ref
));
8663 gimplify_assign (refv
, ref
, end
);
8664 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
8665 SET_DECL_VALUE_EXPR (placeholder
, ref
);
8666 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8667 tree d
= maybe_lookup_decl (decl
, ctx
);
8669 if (DECL_HAS_VALUE_EXPR_P (d
))
8670 oldv
= DECL_VALUE_EXPR (d
);
8671 if (omp_is_reference (var
))
8673 tree v
= fold_convert (TREE_TYPE (d
),
8674 build_fold_addr_expr (new_var
));
8675 SET_DECL_VALUE_EXPR (d
, v
);
8678 SET_DECL_VALUE_EXPR (d
, new_var
);
8679 DECL_HAS_VALUE_EXPR_P (d
) = 1;
8680 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8682 SET_DECL_VALUE_EXPR (d
, oldv
);
8685 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
8686 DECL_HAS_VALUE_EXPR_P (d
) = 0;
8688 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8689 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8690 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8691 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8693 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8694 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
8697 gimple_seq tseq
= NULL
;
8698 gimplify_stmt (&x
, &tseq
);
8699 gimple_seq_add_seq (end
, tseq
);
8704 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
8705 ref
= unshare_expr (ref
);
8706 gimplify_assign (ref
, x
, end
);
8708 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8710 field
= DECL_CHAIN (bfield
);
8714 if (code
== OMP_TASKGROUP
)
8716 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
8717 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8718 gimple_seq_add_stmt (start
, g
);
8723 if (code
== OMP_FOR
)
8724 c
= gimple_omp_for_clauses (ctx
->stmt
);
8725 else if (code
== OMP_SECTIONS
)
8726 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8728 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
8729 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
8730 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
8731 build_fold_addr_expr (avar
));
8732 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
8735 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
8736 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
8738 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
8739 gimple_seq_add_stmt (end
, g
);
8740 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
8741 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8743 enum built_in_function bfn
8744 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
8745 t
= builtin_decl_explicit (bfn
);
8746 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
8750 arg
= create_tmp_var (c_bool_type
);
8751 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
8755 arg
= build_int_cst (c_bool_type
, 0);
8756 g
= gimple_build_call (t
, 1, arg
);
8760 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
8761 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8763 gimple_seq_add_stmt (end
, g
);
8764 t
= build_constructor (atype
, NULL
);
8765 TREE_THIS_VOLATILE (t
) = 1;
8766 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
8769 /* Expand code for an OpenMP taskgroup directive. */
8772 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8774 gimple
*stmt
= gsi_stmt (*gsi_p
);
8777 gimple_seq dseq
= NULL
;
8778 tree block
= make_node (BLOCK
);
8780 bind
= gimple_build_bind (NULL
, NULL
, block
);
8781 gsi_replace (gsi_p
, bind
, true);
8782 gimple_bind_add_stmt (bind
, stmt
);
8784 push_gimplify_context ();
8786 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
8788 gimple_bind_add_stmt (bind
, x
);
8790 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
8791 gimple_omp_taskgroup_clauses (stmt
),
8792 gimple_bind_body_ptr (bind
), &dseq
);
8794 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8795 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8796 gimple_omp_set_body (stmt
, NULL
);
8798 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8799 gimple_bind_add_seq (bind
, dseq
);
8801 pop_gimplify_context (bind
);
8803 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8804 BLOCK_VARS (block
) = ctx
->block_vars
;
8808 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8811 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
8814 struct omp_for_data fd
;
8815 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
8818 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
8819 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
8820 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
8824 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8825 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
8826 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
8827 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8829 /* Merge depend clauses from multiple adjacent
8830 #pragma omp ordered depend(sink:...) constructs
8831 into one #pragma omp ordered depend(sink:...), so that
8832 we can optimize them together. */
8833 gimple_stmt_iterator gsi
= *gsi_p
;
8835 while (!gsi_end_p (gsi
))
8837 gimple
*stmt
= gsi_stmt (gsi
);
8838 if (is_gimple_debug (stmt
)
8839 || gimple_code (stmt
) == GIMPLE_NOP
)
8844 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
8846 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
8847 c
= gimple_omp_ordered_clauses (ord_stmt2
);
8849 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
8850 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8853 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
8855 gsi_remove (&gsi
, true);
8859 /* Canonicalize sink dependence clauses into one folded clause if
8862 The basic algorithm is to create a sink vector whose first
8863 element is the GCD of all the first elements, and whose remaining
8864 elements are the minimum of the subsequent columns.
8866 We ignore dependence vectors whose first element is zero because
8867 such dependencies are known to be executed by the same thread.
8869 We take into account the direction of the loop, so a minimum
8870 becomes a maximum if the loop is iterating forwards. We also
8871 ignore sink clauses where the loop direction is unknown, or where
8872 the offsets are clearly invalid because they are not a multiple
8873 of the loop increment.
8877 #pragma omp for ordered(2)
8878 for (i=0; i < N; ++i)
8879 for (j=0; j < M; ++j)
8881 #pragma omp ordered \
8882 depend(sink:i-8,j-2) \
8883 depend(sink:i,j-1) \ // Completely ignored because i+0.
8884 depend(sink:i-4,j-3) \
8885 depend(sink:i-6,j-4)
8886 #pragma omp ordered depend(source)
8891 depend(sink:-gcd(8,4,6),-min(2,3,4))
8896 /* FIXME: Computing GCD's where the first element is zero is
8897 non-trivial in the presence of collapsed loops. Do this later. */
8898 if (fd
.collapse
> 1)
8901 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
8903 /* wide_int is not a POD so it must be default-constructed. */
8904 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
8905 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
8907 tree folded_dep
= NULL_TREE
;
8908 /* TRUE if the first dimension's offset is negative. */
8909 bool neg_offset_p
= false;
8911 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8913 while ((c
= *list_p
) != NULL
)
8915 bool remove
= false;
8917 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
8918 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8919 goto next_ordered_clause
;
8922 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
8923 vec
&& TREE_CODE (vec
) == TREE_LIST
;
8924 vec
= TREE_CHAIN (vec
), ++i
)
8926 gcc_assert (i
< len
);
8928 /* omp_extract_for_data has canonicalized the condition. */
8929 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
8930 || fd
.loops
[i
].cond_code
== GT_EXPR
);
8931 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
8932 bool maybe_lexically_later
= true;
8934 /* While the committee makes up its mind, bail if we have any
8935 non-constant steps. */
8936 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
8937 goto lower_omp_ordered_ret
;
8939 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
8940 if (POINTER_TYPE_P (itype
))
8942 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
8943 TYPE_PRECISION (itype
),
8946 /* Ignore invalid offsets that are not multiples of the step. */
8947 if (!wi::multiple_of_p (wi::abs (offset
),
8948 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
8951 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
8952 "ignoring sink clause with offset that is not "
8953 "a multiple of the loop step");
8955 goto next_ordered_clause
;
8958 /* Calculate the first dimension. The first dimension of
8959 the folded dependency vector is the GCD of the first
8960 elements, while ignoring any first elements whose offset
8964 /* Ignore dependence vectors whose first dimension is 0. */
8968 goto next_ordered_clause
;
8972 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
8974 error_at (OMP_CLAUSE_LOCATION (c
),
8975 "first offset must be in opposite direction "
8976 "of loop iterations");
8977 goto lower_omp_ordered_ret
;
8981 neg_offset_p
= forward
;
8982 /* Initialize the first time around. */
8983 if (folded_dep
== NULL_TREE
)
8986 folded_deps
[0] = offset
;
8989 folded_deps
[0] = wi::gcd (folded_deps
[0],
8993 /* Calculate minimum for the remaining dimensions. */
8996 folded_deps
[len
+ i
- 1] = offset
;
8997 if (folded_dep
== c
)
8998 folded_deps
[i
] = offset
;
8999 else if (maybe_lexically_later
9000 && !wi::eq_p (folded_deps
[i
], offset
))
9002 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
9006 for (j
= 1; j
<= i
; j
++)
9007 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
9010 maybe_lexically_later
= false;
9014 gcc_assert (i
== len
);
9018 next_ordered_clause
:
9020 *list_p
= OMP_CLAUSE_CHAIN (c
);
9022 list_p
= &OMP_CLAUSE_CHAIN (c
);
9028 folded_deps
[0] = -folded_deps
[0];
9030 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
9031 if (POINTER_TYPE_P (itype
))
9034 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
9035 = wide_int_to_tree (itype
, folded_deps
[0]);
9036 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
9037 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
9040 lower_omp_ordered_ret
:
9042 /* Ordered without clauses is #pragma omp threads, while we want
9043 a nop instead if we remove all clauses. */
9044 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
9045 gsi_replace (gsi_p
, gimple_build_nop (), true);
9049 /* Expand code for an OpenMP ordered directive. */
9052 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9055 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
9056 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
9059 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9061 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9064 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
9065 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9066 OMP_CLAUSE_THREADS
);
9068 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9071 /* FIXME: This is needs to be moved to the expansion to verify various
9072 conditions only testable on cfg with dominators computed, and also
9073 all the depend clauses to be merged still might need to be available
9074 for the runtime checks. */
9076 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
9080 push_gimplify_context ();
9082 block
= make_node (BLOCK
);
9083 bind
= gimple_build_bind (NULL
, NULL
, block
);
9084 gsi_replace (gsi_p
, bind
, true);
9085 gimple_bind_add_stmt (bind
, stmt
);
9089 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
9090 build_int_cst (NULL_TREE
, threads
));
9091 cfun
->has_simduid_loops
= true;
9094 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
9096 gimple_bind_add_stmt (bind
, x
);
9098 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
9101 counter
= create_tmp_var (integer_type_node
);
9102 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
9103 gimple_call_set_lhs (g
, counter
);
9104 gimple_bind_add_stmt (bind
, g
);
9106 body
= create_artificial_label (UNKNOWN_LOCATION
);
9107 test
= create_artificial_label (UNKNOWN_LOCATION
);
9108 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
9110 tree simt_pred
= create_tmp_var (integer_type_node
);
9111 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
9112 gimple_call_set_lhs (g
, simt_pred
);
9113 gimple_bind_add_stmt (bind
, g
);
9115 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
9116 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
9117 gimple_bind_add_stmt (bind
, g
);
9119 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
9121 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9122 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9123 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9124 gimple_omp_set_body (stmt
, NULL
);
9128 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
9129 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
9130 gimple_bind_add_stmt (bind
, g
);
9132 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
9133 tree nonneg
= create_tmp_var (integer_type_node
);
9134 gimple_seq tseq
= NULL
;
9135 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
9136 gimple_bind_add_seq (bind
, tseq
);
9138 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
9139 gimple_call_set_lhs (g
, nonneg
);
9140 gimple_bind_add_stmt (bind
, g
);
9142 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
9143 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
9144 gimple_bind_add_stmt (bind
, g
);
9146 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
9149 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
9150 build_int_cst (NULL_TREE
, threads
));
9152 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
9154 gimple_bind_add_stmt (bind
, x
);
9156 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9158 pop_gimplify_context (bind
);
9160 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9161 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9165 /* Expand code for an OpenMP scan directive and the structured block
9166 before the scan directive. */
9169 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9171 gimple
*stmt
= gsi_stmt (*gsi_p
);
9173 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
9174 tree lane
= NULL_TREE
;
9175 gimple_seq before
= NULL
;
9176 omp_context
*octx
= ctx
->outer
;
9178 if (octx
->scan_exclusive
&& !has_clauses
)
9180 gimple_stmt_iterator gsi2
= *gsi_p
;
9182 gimple
*stmt2
= gsi_stmt (gsi2
);
9183 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9184 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9185 the one with exclusive clause(s), comes first. */
9187 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
9188 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
9190 gsi_remove (gsi_p
, false);
9191 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
9192 ctx
= maybe_lookup_ctx (stmt2
);
9194 lower_omp_scan (gsi_p
, ctx
);
9199 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
9200 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9201 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
9202 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9203 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
9204 && !gimple_omp_for_combined_p (octx
->stmt
));
9205 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
9206 if (is_for_simd
&& octx
->for_simd_scan_phase
)
9209 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
9210 OMP_CLAUSE__SIMDUID_
))
9212 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
9213 lane
= create_tmp_var (unsigned_type_node
);
9214 tree t
= build_int_cst (integer_type_node
,
9216 : octx
->scan_inclusive
? 2 : 3);
9218 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
9219 gimple_call_set_lhs (g
, lane
);
9220 gimple_seq_add_stmt (&before
, g
);
9223 if (is_simd
|| is_for
)
9225 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
9226 c
; c
= OMP_CLAUSE_CHAIN (c
))
9227 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9228 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9230 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9231 tree var
= OMP_CLAUSE_DECL (c
);
9232 tree new_var
= lookup_decl (var
, octx
);
9234 tree var2
= NULL_TREE
;
9235 tree var3
= NULL_TREE
;
9236 tree var4
= NULL_TREE
;
9237 tree lane0
= NULL_TREE
;
9238 tree new_vard
= new_var
;
9239 if (omp_is_reference (var
))
9241 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9244 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
9246 val
= DECL_VALUE_EXPR (new_vard
);
9247 if (new_vard
!= new_var
)
9249 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
9250 val
= TREE_OPERAND (val
, 0);
9252 if (TREE_CODE (val
) == ARRAY_REF
9253 && VAR_P (TREE_OPERAND (val
, 0)))
9255 tree v
= TREE_OPERAND (val
, 0);
9256 if (lookup_attribute ("omp simd array",
9257 DECL_ATTRIBUTES (v
)))
9259 val
= unshare_expr (val
);
9260 lane0
= TREE_OPERAND (val
, 1);
9261 TREE_OPERAND (val
, 1) = lane
;
9262 var2
= lookup_decl (v
, octx
);
9263 if (octx
->scan_exclusive
)
9264 var4
= lookup_decl (var2
, octx
);
9266 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9267 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
9270 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9271 var2
, lane
, NULL_TREE
, NULL_TREE
);
9272 TREE_THIS_NOTRAP (var2
) = 1;
9273 if (octx
->scan_exclusive
)
9275 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9276 var4
, lane
, NULL_TREE
,
9278 TREE_THIS_NOTRAP (var4
) = 1;
9289 var2
= build_outer_var_ref (var
, octx
);
9290 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9292 var3
= maybe_lookup_decl (new_vard
, octx
);
9293 if (var3
== new_vard
|| var3
== NULL_TREE
)
9295 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
9297 var4
= maybe_lookup_decl (var3
, octx
);
9298 if (var4
== var3
|| var4
== NULL_TREE
)
9300 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
9311 && octx
->scan_exclusive
9313 && var4
== NULL_TREE
)
9314 var4
= create_tmp_var (TREE_TYPE (val
));
9316 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9318 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9323 /* If we've added a separate identity element
9324 variable, copy it over into val. */
9325 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9327 gimplify_and_add (x
, &before
);
9329 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9331 /* Otherwise, assign to it the identity element. */
9332 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9334 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9335 tree ref
= build_outer_var_ref (var
, octx
);
9336 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9337 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9340 if (new_vard
!= new_var
)
9341 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9342 SET_DECL_VALUE_EXPR (new_vard
, val
);
9344 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9345 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9346 lower_omp (&tseq
, octx
);
9348 SET_DECL_VALUE_EXPR (new_vard
, x
);
9349 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9350 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9351 gimple_seq_add_seq (&before
, tseq
);
9353 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9359 if (octx
->scan_exclusive
)
9361 tree v4
= unshare_expr (var4
);
9362 tree v2
= unshare_expr (var2
);
9363 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
9364 gimplify_and_add (x
, &before
);
9366 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9367 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9368 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9370 if (x
&& new_vard
!= new_var
)
9371 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
9373 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9374 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9375 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9376 lower_omp (&tseq
, octx
);
9377 gimple_seq_add_seq (&before
, tseq
);
9378 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9380 SET_DECL_VALUE_EXPR (new_vard
, x
);
9381 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9382 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9383 if (octx
->scan_inclusive
)
9385 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9387 gimplify_and_add (x
, &before
);
9389 else if (lane0
== NULL_TREE
)
9391 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9393 gimplify_and_add (x
, &before
);
9401 /* input phase. Set val to initializer before
9403 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
9404 gimplify_assign (val
, x
, &before
);
9409 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
9410 if (code
== MINUS_EXPR
)
9413 tree x
= build2 (code
, TREE_TYPE (var2
),
9414 unshare_expr (var2
), unshare_expr (val
));
9415 if (octx
->scan_inclusive
)
9417 gimplify_assign (unshare_expr (var2
), x
, &before
);
9418 gimplify_assign (val
, var2
, &before
);
9422 gimplify_assign (unshare_expr (var4
),
9423 unshare_expr (var2
), &before
);
9424 gimplify_assign (var2
, x
, &before
);
9425 if (lane0
== NULL_TREE
)
9426 gimplify_assign (val
, var4
, &before
);
9430 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
9432 tree vexpr
= unshare_expr (var4
);
9433 TREE_OPERAND (vexpr
, 1) = lane0
;
9434 if (new_vard
!= new_var
)
9435 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
9436 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9440 if (is_simd
&& !is_for_simd
)
9442 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
9443 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
9444 gsi_replace (gsi_p
, gimple_build_nop (), true);
9447 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
9450 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (stmt
));
9451 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
9456 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9457 substitution of a couple of function calls. But in the NAMED case,
9458 requires that languages coordinate a symbol name. It is therefore
9459 best put here in common code. */
9461 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
9464 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9467 tree name
, lock
, unlock
;
9468 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
9470 location_t loc
= gimple_location (stmt
);
9473 name
= gimple_omp_critical_name (stmt
);
9478 if (!critical_name_mutexes
)
9479 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
9481 tree
*n
= critical_name_mutexes
->get (name
);
9486 decl
= create_tmp_var_raw (ptr_type_node
);
9488 new_str
= ACONCAT ((".gomp_critical_user_",
9489 IDENTIFIER_POINTER (name
), NULL
));
9490 DECL_NAME (decl
) = get_identifier (new_str
);
9491 TREE_PUBLIC (decl
) = 1;
9492 TREE_STATIC (decl
) = 1;
9493 DECL_COMMON (decl
) = 1;
9494 DECL_ARTIFICIAL (decl
) = 1;
9495 DECL_IGNORED_P (decl
) = 1;
9497 varpool_node::finalize_decl (decl
);
9499 critical_name_mutexes
->put (name
, decl
);
9504 /* If '#pragma omp critical' is inside offloaded region or
9505 inside function marked as offloadable, the symbol must be
9506 marked as offloadable too. */
9508 if (cgraph_node::get (current_function_decl
)->offloadable
)
9509 varpool_node::get_create (decl
)->offloadable
= 1;
9511 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
9512 if (is_gimple_omp_offloaded (octx
->stmt
))
9514 varpool_node::get_create (decl
)->offloadable
= 1;
9518 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
9519 lock
= build_call_expr_loc (loc
, lock
, 1,
9520 build_fold_addr_expr_loc (loc
, decl
));
9522 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
9523 unlock
= build_call_expr_loc (loc
, unlock
, 1,
9524 build_fold_addr_expr_loc (loc
, decl
));
9528 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
9529 lock
= build_call_expr_loc (loc
, lock
, 0);
9531 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
9532 unlock
= build_call_expr_loc (loc
, unlock
, 0);
9535 push_gimplify_context ();
9537 block
= make_node (BLOCK
);
9538 bind
= gimple_build_bind (NULL
, NULL
, block
);
9539 gsi_replace (gsi_p
, bind
, true);
9540 gimple_bind_add_stmt (bind
, stmt
);
9542 tbody
= gimple_bind_body (bind
);
9543 gimplify_and_add (lock
, &tbody
);
9544 gimple_bind_set_body (bind
, tbody
);
9546 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9547 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9548 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9549 gimple_omp_set_body (stmt
, NULL
);
9551 tbody
= gimple_bind_body (bind
);
9552 gimplify_and_add (unlock
, &tbody
);
9553 gimple_bind_set_body (bind
, tbody
);
9555 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9557 pop_gimplify_context (bind
);
9558 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9559 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9562 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9563 for a lastprivate clause. Given a loop control predicate of (V
9564 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9565 is appended to *DLIST, iterator initialization is appended to
9566 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9567 to be emitted in a critical section. */
9570 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
9571 gimple_seq
*dlist
, gimple_seq
*clist
,
9572 struct omp_context
*ctx
)
9574 tree clauses
, cond
, vinit
;
9575 enum tree_code cond_code
;
9578 cond_code
= fd
->loop
.cond_code
;
9579 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
9581 /* When possible, use a strict equality expression. This can let VRP
9582 type optimizations deduce the value and remove a copy. */
9583 if (tree_fits_shwi_p (fd
->loop
.step
))
9585 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
9586 if (step
== 1 || step
== -1)
9587 cond_code
= EQ_EXPR
;
9590 tree n2
= fd
->loop
.n2
;
9591 if (fd
->collapse
> 1
9592 && TREE_CODE (n2
) != INTEGER_CST
9593 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
9595 struct omp_context
*taskreg_ctx
= NULL
;
9596 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
9598 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
9599 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
9600 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
9602 if (gimple_omp_for_combined_into_p (gfor
))
9604 gcc_assert (ctx
->outer
->outer
9605 && is_parallel_ctx (ctx
->outer
->outer
));
9606 taskreg_ctx
= ctx
->outer
->outer
;
9610 struct omp_for_data outer_fd
;
9611 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
9612 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
9615 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
9616 taskreg_ctx
= ctx
->outer
->outer
;
9618 else if (is_taskreg_ctx (ctx
->outer
))
9619 taskreg_ctx
= ctx
->outer
;
9623 tree taskreg_clauses
9624 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
9625 tree innerc
= omp_find_clause (taskreg_clauses
,
9626 OMP_CLAUSE__LOOPTEMP_
);
9627 gcc_assert (innerc
);
9628 int count
= fd
->collapse
;
9630 && fd
->last_nonrect
== fd
->first_nonrect
+ 1)
9631 if (tree v
= gimple_omp_for_index (fd
->for_stmt
, fd
->last_nonrect
))
9632 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
9634 for (i
= 0; i
< count
; i
++)
9636 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9637 OMP_CLAUSE__LOOPTEMP_
);
9638 gcc_assert (innerc
);
9640 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9641 OMP_CLAUSE__LOOPTEMP_
);
9643 n2
= fold_convert (TREE_TYPE (n2
),
9644 lookup_decl (OMP_CLAUSE_DECL (innerc
),
9648 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
9650 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
9652 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
9653 if (!gimple_seq_empty_p (stmts
))
9655 gimple_seq_add_seq (&stmts
, *dlist
);
9658 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9659 vinit
= fd
->loop
.n1
;
9660 if (cond_code
== EQ_EXPR
9661 && tree_fits_shwi_p (fd
->loop
.n2
)
9662 && ! integer_zerop (fd
->loop
.n2
))
9663 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
9665 vinit
= unshare_expr (vinit
);
9667 /* Initialize the iterator variable, so that threads that don't execute
9668 any iterations don't execute the lastprivate clauses by accident. */
9669 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
9673 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9676 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9677 struct walk_stmt_info
*wi
)
9679 gimple
*stmt
= gsi_stmt (*gsi_p
);
9681 *handled_ops_p
= true;
9682 switch (gimple_code (stmt
))
9686 case GIMPLE_OMP_FOR
:
9687 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
9688 && gimple_omp_for_combined_into_p (stmt
))
9689 *handled_ops_p
= false;
9692 case GIMPLE_OMP_SCAN
:
9693 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
9694 return integer_zero_node
;
9701 /* Helper function for lower_omp_for, add transformations for a worksharing
9702 loop with scan directives inside of it.
9703 For worksharing loop not combined with simd, transform:
9704 #pragma omp for reduction(inscan,+:r) private(i)
9705 for (i = 0; i < n; i = i + 1)
9710 #pragma omp scan inclusive(r)
9716 into two worksharing loops + code to merge results:
9718 num_threads = omp_get_num_threads ();
9719 thread_num = omp_get_thread_num ();
9720 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9725 // For UDRs this is UDR init, or if ctors are needed, copy from
9726 // var3 that has been constructed to contain the neutral element.
9730 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9731 // a shared array with num_threads elements and rprivb to a local array
9732 // number of elements equal to the number of (contiguous) iterations the
9733 // current thread will perform. controlb and controlp variables are
9734 // temporaries to handle deallocation of rprivb at the end of second
9736 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9737 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9738 for (i = 0; i < n; i = i + 1)
9741 // For UDRs this is UDR init or copy from var3.
9743 // This is the input phase from user code.
9747 // For UDRs this is UDR merge.
9749 // Rather than handing it over to the user, save to local thread's
9751 rprivb[ivar] = var2;
9752 // For exclusive scan, the above two statements are swapped.
9756 // And remember the final value from this thread's into the shared
9758 rpriva[(sizetype) thread_num] = var2;
9759 // If more than one thread, compute using Work-Efficient prefix sum
9760 // the inclusive parallel scan of the rpriva array.
9761 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9766 num_threadsu = (unsigned int) num_threads;
9767 thread_numup1 = (unsigned int) thread_num + 1;
9770 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9774 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9779 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9780 mul = REALPART_EXPR <cplx>;
9781 ovf = IMAGPART_EXPR <cplx>;
9782 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9785 andvm1 = andv + 4294967295;
9787 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9789 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9790 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9791 rpriva[l] = rpriva[l - k] + rpriva[l];
9793 if (down == 0) goto <D.2121>; else goto <D.2122>;
9801 if (k != 0) goto <D.2108>; else goto <D.2103>;
9803 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9805 // For UDRs this is UDR init or copy from var3.
9809 var2 = rpriva[thread_num - 1];
9812 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9813 reduction(inscan,+:r) private(i)
9814 for (i = 0; i < n; i = i + 1)
9817 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9818 r = var2 + rprivb[ivar];
9821 // This is the scan phase from user code.
9823 // Plus a bump of the iterator.
9829 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
9830 struct omp_for_data
*fd
, omp_context
*ctx
)
9832 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
9833 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
9835 gimple_seq body
= gimple_omp_body (stmt
);
9836 gimple_stmt_iterator input1_gsi
= gsi_none ();
9837 struct walk_stmt_info wi
;
9838 memset (&wi
, 0, sizeof (wi
));
9840 wi
.info
= (void *) &input1_gsi
;
9841 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
9842 gcc_assert (!gsi_end_p (input1_gsi
));
9844 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
9845 gimple_stmt_iterator gsi
= input1_gsi
;
9847 gimple_stmt_iterator scan1_gsi
= gsi
;
9848 gimple
*scan_stmt1
= gsi_stmt (gsi
);
9849 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
9851 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
9852 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
9853 gimple_omp_set_body (input_stmt1
, NULL
);
9854 gimple_omp_set_body (scan_stmt1
, NULL
);
9855 gimple_omp_set_body (stmt
, NULL
);
9857 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
9858 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
9859 gimple_omp_set_body (stmt
, body
);
9860 gimple_omp_set_body (input_stmt1
, input_body
);
9862 gimple_stmt_iterator input2_gsi
= gsi_none ();
9863 memset (&wi
, 0, sizeof (wi
));
9865 wi
.info
= (void *) &input2_gsi
;
9866 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
9867 gcc_assert (!gsi_end_p (input2_gsi
));
9869 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
9872 gimple_stmt_iterator scan2_gsi
= gsi
;
9873 gimple
*scan_stmt2
= gsi_stmt (gsi
);
9874 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
9875 gimple_omp_set_body (scan_stmt2
, scan_body
);
9877 gimple_stmt_iterator input3_gsi
= gsi_none ();
9878 gimple_stmt_iterator scan3_gsi
= gsi_none ();
9879 gimple_stmt_iterator input4_gsi
= gsi_none ();
9880 gimple_stmt_iterator scan4_gsi
= gsi_none ();
9881 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
9882 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
9883 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
9886 memset (&wi
, 0, sizeof (wi
));
9888 wi
.info
= (void *) &input3_gsi
;
9889 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
9890 gcc_assert (!gsi_end_p (input3_gsi
));
9892 input_stmt3
= gsi_stmt (input3_gsi
);
9896 scan_stmt3
= gsi_stmt (gsi
);
9897 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
9899 memset (&wi
, 0, sizeof (wi
));
9901 wi
.info
= (void *) &input4_gsi
;
9902 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
9903 gcc_assert (!gsi_end_p (input4_gsi
));
9905 input_stmt4
= gsi_stmt (input4_gsi
);
9909 scan_stmt4
= gsi_stmt (gsi
);
9910 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
9912 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
9913 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
9916 tree num_threads
= create_tmp_var (integer_type_node
);
9917 tree thread_num
= create_tmp_var (integer_type_node
);
9918 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9919 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9920 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
9921 gimple_call_set_lhs (g
, num_threads
);
9922 gimple_seq_add_stmt (body_p
, g
);
9923 g
= gimple_build_call (threadnum_decl
, 0);
9924 gimple_call_set_lhs (g
, thread_num
);
9925 gimple_seq_add_stmt (body_p
, g
);
9927 tree ivar
= create_tmp_var (sizetype
);
9928 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
9929 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
9930 tree k
= create_tmp_var (unsigned_type_node
);
9931 tree l
= create_tmp_var (unsigned_type_node
);
9933 gimple_seq clist
= NULL
, mdlist
= NULL
;
9934 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
9935 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
9936 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
9937 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
9938 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9939 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9940 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9942 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9943 tree var
= OMP_CLAUSE_DECL (c
);
9944 tree new_var
= lookup_decl (var
, ctx
);
9945 tree var3
= NULL_TREE
;
9946 tree new_vard
= new_var
;
9947 if (omp_is_reference (var
))
9948 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9949 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9951 var3
= maybe_lookup_decl (new_vard
, ctx
);
9952 if (var3
== new_vard
)
9956 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
9957 tree rpriva
= create_tmp_var (ptype
);
9958 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9959 OMP_CLAUSE_DECL (nc
) = rpriva
;
9961 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9963 tree rprivb
= create_tmp_var (ptype
);
9964 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9965 OMP_CLAUSE_DECL (nc
) = rprivb
;
9966 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
9968 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9970 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
9971 if (new_vard
!= new_var
)
9972 TREE_ADDRESSABLE (var2
) = 1;
9973 gimple_add_tmp_var (var2
);
9975 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
9976 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9977 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9978 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9979 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9981 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
9982 thread_num
, integer_minus_one_node
);
9983 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9984 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9985 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9986 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9987 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9989 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
9990 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9991 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9992 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9993 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9995 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
9996 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9997 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9998 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9999 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10000 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10002 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
10003 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10004 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
10005 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10007 tree var4
= is_for_simd
? new_var
: var2
;
10008 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
10011 var5
= lookup_decl (var
, input_simd_ctx
);
10012 var6
= lookup_decl (var
, scan_simd_ctx
);
10013 if (new_vard
!= new_var
)
10015 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
10016 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
10019 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10021 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
10024 x
= lang_hooks
.decls
.omp_clause_default_ctor
10025 (c
, var2
, build_outer_var_ref (var
, ctx
));
10027 gimplify_and_add (x
, &clist
);
10029 x
= build_outer_var_ref (var
, ctx
);
10030 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
10032 gimplify_and_add (x
, &thr01_list
);
10034 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10035 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10038 x
= unshare_expr (var4
);
10039 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
10040 gimplify_and_add (x
, &thrn1_list
);
10041 x
= unshare_expr (var4
);
10042 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
10043 gimplify_and_add (x
, &thr02_list
);
10045 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
10047 /* Otherwise, assign to it the identity element. */
10048 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10049 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10052 if (new_vard
!= new_var
)
10053 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10054 SET_DECL_VALUE_EXPR (new_vard
, val
);
10055 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10057 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
10058 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10059 lower_omp (&tseq
, ctx
);
10060 gimple_seq_add_seq (&thrn1_list
, tseq
);
10061 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10062 lower_omp (&tseq
, ctx
);
10063 gimple_seq_add_seq (&thr02_list
, tseq
);
10064 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10065 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10066 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10068 SET_DECL_VALUE_EXPR (new_vard
, y
);
10071 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10072 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10076 x
= unshare_expr (var4
);
10077 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
10078 gimplify_and_add (x
, &thrn2_list
);
10082 x
= unshare_expr (rprivb_ref
);
10083 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
10084 gimplify_and_add (x
, &scan1_list
);
10088 if (ctx
->scan_exclusive
)
10090 x
= unshare_expr (rprivb_ref
);
10091 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
10092 gimplify_and_add (x
, &scan1_list
);
10095 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10096 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10097 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10098 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10099 lower_omp (&tseq
, ctx
);
10100 gimple_seq_add_seq (&scan1_list
, tseq
);
10102 if (ctx
->scan_inclusive
)
10104 x
= unshare_expr (rprivb_ref
);
10105 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
10106 gimplify_and_add (x
, &scan1_list
);
10110 x
= unshare_expr (rpriva_ref
);
10111 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
10112 unshare_expr (var4
));
10113 gimplify_and_add (x
, &mdlist
);
10115 x
= unshare_expr (is_for_simd
? var6
: new_var
);
10116 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
10117 gimplify_and_add (x
, &input2_list
);
10120 if (new_vard
!= new_var
)
10121 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10123 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10124 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10125 SET_DECL_VALUE_EXPR (new_vard
, val
);
10126 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10129 SET_DECL_VALUE_EXPR (placeholder
, var6
);
10130 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10133 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10134 lower_omp (&tseq
, ctx
);
10136 SET_DECL_VALUE_EXPR (new_vard
, y
);
10139 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10140 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10144 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
10145 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10146 lower_omp (&tseq
, ctx
);
10148 gimple_seq_add_seq (&input2_list
, tseq
);
10150 x
= build_outer_var_ref (var
, ctx
);
10151 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
10152 gimplify_and_add (x
, &last_list
);
10154 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
10155 gimplify_and_add (x
, &reduc_list
);
10156 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10157 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10159 if (new_vard
!= new_var
)
10160 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10161 SET_DECL_VALUE_EXPR (new_vard
, val
);
10162 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10163 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10164 lower_omp (&tseq
, ctx
);
10165 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10166 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10167 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10169 SET_DECL_VALUE_EXPR (new_vard
, y
);
10172 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10173 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10175 gimple_seq_add_seq (&reduc_list
, tseq
);
10176 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
10177 gimplify_and_add (x
, &reduc_list
);
10179 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
10181 gimplify_and_add (x
, dlist
);
10185 x
= build_outer_var_ref (var
, ctx
);
10186 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
10188 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10189 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
10191 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
10193 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
10195 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10196 if (code
== MINUS_EXPR
)
10200 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
10203 if (ctx
->scan_exclusive
)
10204 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10206 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
10207 gimplify_assign (var2
, x
, &scan1_list
);
10208 if (ctx
->scan_inclusive
)
10209 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10213 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
10216 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
10217 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
10219 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
10222 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
10223 unshare_expr (rprival_ref
));
10224 gimplify_assign (rprival_ref
, x
, &reduc_list
);
10228 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10229 gimple_seq_add_stmt (&scan1_list
, g
);
10230 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10231 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10232 ? scan_stmt4
: scan_stmt2
), g
);
10234 tree controlb
= create_tmp_var (boolean_type_node
);
10235 tree controlp
= create_tmp_var (ptr_type_node
);
10236 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10237 OMP_CLAUSE_DECL (nc
) = controlb
;
10238 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10240 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10241 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10242 OMP_CLAUSE_DECL (nc
) = controlp
;
10243 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10245 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10246 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10247 OMP_CLAUSE_DECL (nc
) = controlb
;
10248 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10250 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10251 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10252 OMP_CLAUSE_DECL (nc
) = controlp
;
10253 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10255 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10257 *cp1
= gimple_omp_for_clauses (stmt
);
10258 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
10259 *cp2
= gimple_omp_for_clauses (new_stmt
);
10260 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
10264 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
10265 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
10267 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
10269 gsi_remove (&input3_gsi
, true);
10270 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
10272 gsi_remove (&scan3_gsi
, true);
10273 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
10275 gsi_remove (&input4_gsi
, true);
10276 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
10278 gsi_remove (&scan4_gsi
, true);
10282 gimple_omp_set_body (scan_stmt1
, scan1_list
);
10283 gimple_omp_set_body (input_stmt2
, input2_list
);
10286 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
10288 gsi_remove (&input1_gsi
, true);
10289 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
10291 gsi_remove (&scan1_gsi
, true);
10292 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
10294 gsi_remove (&input2_gsi
, true);
10295 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
10297 gsi_remove (&scan2_gsi
, true);
10299 gimple_seq_add_seq (body_p
, clist
);
10301 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10302 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10303 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10304 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10305 gimple_seq_add_stmt (body_p
, g
);
10306 g
= gimple_build_label (lab1
);
10307 gimple_seq_add_stmt (body_p
, g
);
10308 gimple_seq_add_seq (body_p
, thr01_list
);
10309 g
= gimple_build_goto (lab3
);
10310 gimple_seq_add_stmt (body_p
, g
);
10311 g
= gimple_build_label (lab2
);
10312 gimple_seq_add_stmt (body_p
, g
);
10313 gimple_seq_add_seq (body_p
, thrn1_list
);
10314 g
= gimple_build_label (lab3
);
10315 gimple_seq_add_stmt (body_p
, g
);
10317 g
= gimple_build_assign (ivar
, size_zero_node
);
10318 gimple_seq_add_stmt (body_p
, g
);
10320 gimple_seq_add_stmt (body_p
, stmt
);
10321 gimple_seq_add_seq (body_p
, body
);
10322 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
10325 g
= gimple_build_omp_return (true);
10326 gimple_seq_add_stmt (body_p
, g
);
10327 gimple_seq_add_seq (body_p
, mdlist
);
10329 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10330 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10331 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
10332 gimple_seq_add_stmt (body_p
, g
);
10333 g
= gimple_build_label (lab1
);
10334 gimple_seq_add_stmt (body_p
, g
);
10336 g
= omp_build_barrier (NULL
);
10337 gimple_seq_add_stmt (body_p
, g
);
10339 tree down
= create_tmp_var (unsigned_type_node
);
10340 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
10341 gimple_seq_add_stmt (body_p
, g
);
10343 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
10344 gimple_seq_add_stmt (body_p
, g
);
10346 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
10347 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
10348 gimple_seq_add_stmt (body_p
, g
);
10350 tree thread_numu
= create_tmp_var (unsigned_type_node
);
10351 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
10352 gimple_seq_add_stmt (body_p
, g
);
10354 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
10355 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
10356 build_int_cst (unsigned_type_node
, 1));
10357 gimple_seq_add_stmt (body_p
, g
);
10359 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10360 g
= gimple_build_label (lab3
);
10361 gimple_seq_add_stmt (body_p
, g
);
10363 tree twok
= create_tmp_var (unsigned_type_node
);
10364 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10365 gimple_seq_add_stmt (body_p
, g
);
10367 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
10368 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
10369 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
10370 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
10371 gimple_seq_add_stmt (body_p
, g
);
10372 g
= gimple_build_label (lab4
);
10373 gimple_seq_add_stmt (body_p
, g
);
10374 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
10375 gimple_seq_add_stmt (body_p
, g
);
10376 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10377 gimple_seq_add_stmt (body_p
, g
);
10379 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
10380 gimple_seq_add_stmt (body_p
, g
);
10381 g
= gimple_build_label (lab6
);
10382 gimple_seq_add_stmt (body_p
, g
);
10384 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10385 gimple_seq_add_stmt (body_p
, g
);
10387 g
= gimple_build_label (lab5
);
10388 gimple_seq_add_stmt (body_p
, g
);
10390 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10391 gimple_seq_add_stmt (body_p
, g
);
10393 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
10394 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
10395 gimple_call_set_lhs (g
, cplx
);
10396 gimple_seq_add_stmt (body_p
, g
);
10397 tree mul
= create_tmp_var (unsigned_type_node
);
10398 g
= gimple_build_assign (mul
, REALPART_EXPR
,
10399 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
10400 gimple_seq_add_stmt (body_p
, g
);
10401 tree ovf
= create_tmp_var (unsigned_type_node
);
10402 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
10403 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
10404 gimple_seq_add_stmt (body_p
, g
);
10406 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
10407 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
10408 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
10410 gimple_seq_add_stmt (body_p
, g
);
10411 g
= gimple_build_label (lab7
);
10412 gimple_seq_add_stmt (body_p
, g
);
10414 tree andv
= create_tmp_var (unsigned_type_node
);
10415 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
10416 gimple_seq_add_stmt (body_p
, g
);
10417 tree andvm1
= create_tmp_var (unsigned_type_node
);
10418 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
10419 build_minus_one_cst (unsigned_type_node
));
10420 gimple_seq_add_stmt (body_p
, g
);
10422 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
10423 gimple_seq_add_stmt (body_p
, g
);
10425 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
10426 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
10427 gimple_seq_add_stmt (body_p
, g
);
10428 g
= gimple_build_label (lab9
);
10429 gimple_seq_add_stmt (body_p
, g
);
10430 gimple_seq_add_seq (body_p
, reduc_list
);
10431 g
= gimple_build_label (lab8
);
10432 gimple_seq_add_stmt (body_p
, g
);
10434 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
10435 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
10436 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
10437 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
10439 gimple_seq_add_stmt (body_p
, g
);
10440 g
= gimple_build_label (lab10
);
10441 gimple_seq_add_stmt (body_p
, g
);
10442 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
10443 gimple_seq_add_stmt (body_p
, g
);
10444 g
= gimple_build_goto (lab12
);
10445 gimple_seq_add_stmt (body_p
, g
);
10446 g
= gimple_build_label (lab11
);
10447 gimple_seq_add_stmt (body_p
, g
);
10448 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10449 gimple_seq_add_stmt (body_p
, g
);
10450 g
= gimple_build_label (lab12
);
10451 gimple_seq_add_stmt (body_p
, g
);
10453 g
= omp_build_barrier (NULL
);
10454 gimple_seq_add_stmt (body_p
, g
);
10456 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
10458 gimple_seq_add_stmt (body_p
, g
);
10460 g
= gimple_build_label (lab2
);
10461 gimple_seq_add_stmt (body_p
, g
);
10463 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10464 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10465 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10466 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10467 gimple_seq_add_stmt (body_p
, g
);
10468 g
= gimple_build_label (lab1
);
10469 gimple_seq_add_stmt (body_p
, g
);
10470 gimple_seq_add_seq (body_p
, thr02_list
);
10471 g
= gimple_build_goto (lab3
);
10472 gimple_seq_add_stmt (body_p
, g
);
10473 g
= gimple_build_label (lab2
);
10474 gimple_seq_add_stmt (body_p
, g
);
10475 gimple_seq_add_seq (body_p
, thrn2_list
);
10476 g
= gimple_build_label (lab3
);
10477 gimple_seq_add_stmt (body_p
, g
);
10479 g
= gimple_build_assign (ivar
, size_zero_node
);
10480 gimple_seq_add_stmt (body_p
, g
);
10481 gimple_seq_add_stmt (body_p
, new_stmt
);
10482 gimple_seq_add_seq (body_p
, new_body
);
10484 gimple_seq new_dlist
= NULL
;
10485 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10486 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10487 tree num_threadsm1
= create_tmp_var (integer_type_node
);
10488 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
10489 integer_minus_one_node
);
10490 gimple_seq_add_stmt (&new_dlist
, g
);
10491 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
10492 gimple_seq_add_stmt (&new_dlist
, g
);
10493 g
= gimple_build_label (lab1
);
10494 gimple_seq_add_stmt (&new_dlist
, g
);
10495 gimple_seq_add_seq (&new_dlist
, last_list
);
10496 g
= gimple_build_label (lab2
);
10497 gimple_seq_add_stmt (&new_dlist
, g
);
10498 gimple_seq_add_seq (&new_dlist
, *dlist
);
10499 *dlist
= new_dlist
;
10502 /* Lower code for an OMP loop directive. */
10505 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10507 tree
*rhs_p
, block
;
10508 struct omp_for_data fd
, *fdp
= NULL
;
10509 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
10511 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
10512 gimple_seq cnt_list
= NULL
, clist
= NULL
;
10513 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
10516 push_gimplify_context ();
10518 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
10520 block
= make_node (BLOCK
);
10521 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
10522 /* Replace at gsi right away, so that 'stmt' is no member
10523 of a sequence anymore as we're going to add to a different
10525 gsi_replace (gsi_p
, new_stmt
, true);
10527 /* Move declaration of temporaries in the loop body before we make
10529 omp_for_body
= gimple_omp_body (stmt
);
10530 if (!gimple_seq_empty_p (omp_for_body
)
10531 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
10534 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
10535 tree vars
= gimple_bind_vars (inner_bind
);
10536 gimple_bind_append_vars (new_stmt
, vars
);
10537 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10538 keep them on the inner_bind and it's block. */
10539 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
10540 if (gimple_bind_block (inner_bind
))
10541 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
10544 if (gimple_omp_for_combined_into_p (stmt
))
10546 omp_extract_for_data (stmt
, &fd
, NULL
);
10549 /* We need two temporaries with fd.loop.v type (istart/iend)
10550 and then (fd.collapse - 1) temporaries with the same
10551 type for count2 ... countN-1 vars if not constant. */
10553 tree type
= fd
.iter_type
;
10554 if (fd
.collapse
> 1
10555 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
10556 count
+= fd
.collapse
- 1;
10558 tree type2
= NULL_TREE
;
10560 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
10561 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
10562 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
10564 tree clauses
= *pc
;
10565 if (fd
.collapse
> 1
10567 && fd
.last_nonrect
== fd
.first_nonrect
+ 1
10568 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
10569 if (tree v
= gimple_omp_for_index (stmt
, fd
.last_nonrect
))
10570 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
10572 v
= gimple_omp_for_index (stmt
, fd
.first_nonrect
);
10573 type2
= TREE_TYPE (v
);
10579 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
10580 OMP_CLAUSE__LOOPTEMP_
);
10581 if (ctx
->simt_stmt
)
10582 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
10583 OMP_CLAUSE__LOOPTEMP_
);
10584 for (i
= 0; i
< count
+ count2
; i
++)
10589 gcc_assert (outerc
);
10590 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
10591 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
10592 OMP_CLAUSE__LOOPTEMP_
);
10596 /* If there are 2 adjacent SIMD stmts, one with _simt_
10597 clause, another without, make sure they have the same
10598 decls in _looptemp_ clauses, because the outer stmt
10599 they are combined into will look up just one inner_stmt. */
10600 if (ctx
->simt_stmt
)
10601 temp
= OMP_CLAUSE_DECL (simtc
);
10603 temp
= create_tmp_var (i
>= count
? type2
: type
);
10604 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
10606 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
10607 OMP_CLAUSE_DECL (*pc
) = temp
;
10608 pc
= &OMP_CLAUSE_CHAIN (*pc
);
10609 if (ctx
->simt_stmt
)
10610 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
10611 OMP_CLAUSE__LOOPTEMP_
);
10616 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10620 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
10621 OMP_CLAUSE_REDUCTION
);
10622 tree rtmp
= NULL_TREE
;
10625 tree type
= build_pointer_type (pointer_sized_int_node
);
10626 tree temp
= create_tmp_var (type
);
10627 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
10628 OMP_CLAUSE_DECL (c
) = temp
;
10629 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
10630 gimple_omp_for_set_clauses (stmt
, c
);
10631 lower_omp_task_reductions (ctx
, OMP_FOR
,
10632 gimple_omp_for_clauses (stmt
),
10633 &tred_ilist
, &tred_dlist
);
10635 rtmp
= make_ssa_name (type
);
10636 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
10639 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
10642 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
10644 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
10645 gimple_omp_for_pre_body (stmt
));
10647 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10649 /* Lower the header expressions. At this point, we can assume that
10650 the header is of the form:
10652 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10654 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10655 using the .omp_data_s mapping, if needed. */
10656 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
10658 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
10659 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
10661 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
10662 TREE_VEC_ELT (*rhs_p
, 1)
10663 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
10664 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
10665 TREE_VEC_ELT (*rhs_p
, 2)
10666 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
10668 else if (!is_gimple_min_invariant (*rhs_p
))
10669 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10670 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10671 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10673 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
10674 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
10676 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
10677 TREE_VEC_ELT (*rhs_p
, 1)
10678 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
10679 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
10680 TREE_VEC_ELT (*rhs_p
, 2)
10681 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
10683 else if (!is_gimple_min_invariant (*rhs_p
))
10684 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10685 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10686 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10688 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
10689 if (!is_gimple_min_invariant (*rhs_p
))
10690 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10693 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
10695 gimple_seq_add_seq (&body
, cnt_list
);
10697 /* Once lowered, extract the bounds and clauses. */
10698 omp_extract_for_data (stmt
, &fd
, NULL
);
10700 if (is_gimple_omp_oacc (ctx
->stmt
)
10701 && !ctx_in_oacc_kernels_region (ctx
))
10702 lower_oacc_head_tail (gimple_location (stmt
),
10703 gimple_omp_for_clauses (stmt
),
10704 &oacc_head
, &oacc_tail
, ctx
);
10706 /* Add OpenACC partitioning and reduction markers just before the loop. */
10708 gimple_seq_add_seq (&body
, oacc_head
);
10710 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
10712 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10713 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10714 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10715 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10717 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
10718 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
10719 OMP_CLAUSE_LINEAR_STEP (c
)
10720 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
10724 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
10725 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10726 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
10729 gimple_seq_add_stmt (&body
, stmt
);
10730 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
10733 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
10736 /* After the loop, add exit clauses. */
10737 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
10741 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
10742 gcall
*g
= gimple_build_call (fndecl
, 0);
10743 gimple_seq_add_stmt (&body
, g
);
10744 gimple_seq_add_seq (&body
, clist
);
10745 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
10746 g
= gimple_build_call (fndecl
, 0);
10747 gimple_seq_add_stmt (&body
, g
);
10750 if (ctx
->cancellable
)
10751 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
10753 gimple_seq_add_seq (&body
, dlist
);
10757 gimple_seq_add_seq (&tred_ilist
, body
);
10761 body
= maybe_catch_exception (body
);
10763 /* Region exit marker goes at the end of the loop body. */
10764 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
10765 gimple_seq_add_stmt (&body
, g
);
10767 gimple_seq_add_seq (&body
, tred_dlist
);
10769 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
10772 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
10774 /* Add OpenACC joining and reduction markers just after the loop. */
10776 gimple_seq_add_seq (&body
, oacc_tail
);
10778 pop_gimplify_context (new_stmt
);
10780 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
10781 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
10782 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
10783 if (BLOCK_VARS (block
))
10784 TREE_USED (block
) = 1;
10786 gimple_bind_set_body (new_stmt
, body
);
10787 gimple_omp_set_body (stmt
, NULL
);
10788 gimple_omp_for_set_pre_body (stmt
, NULL
);
10791 /* Callback for walk_stmts. Check if the current statement only contains
10792 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10795 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
10796 bool *handled_ops_p
,
10797 struct walk_stmt_info
*wi
)
10799 int *info
= (int *) wi
->info
;
10800 gimple
*stmt
= gsi_stmt (*gsi_p
);
10802 *handled_ops_p
= true;
10803 switch (gimple_code (stmt
))
10809 case GIMPLE_OMP_FOR
:
10810 case GIMPLE_OMP_SECTIONS
:
10811 *info
= *info
== 0 ? 1 : -1;
10820 struct omp_taskcopy_context
10822 /* This field must be at the beginning, as we do "inheritance": Some
10823 callback functions for tree-inline.c (e.g., omp_copy_decl)
10824 receive a copy_body_data pointer that is up-casted to an
10825 omp_context pointer. */
10831 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
10833 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
10835 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
10836 return create_tmp_var (TREE_TYPE (var
));
10842 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
10844 tree name
, new_fields
= NULL
, type
, f
;
10846 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
10847 name
= DECL_NAME (TYPE_NAME (orig_type
));
10848 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
10849 TYPE_DECL
, name
, type
);
10850 TYPE_NAME (type
) = name
;
10852 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
10854 tree new_f
= copy_node (f
);
10855 DECL_CONTEXT (new_f
) = type
;
10856 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
10857 TREE_CHAIN (new_f
) = new_fields
;
10858 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10859 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10860 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
10862 new_fields
= new_f
;
10863 tcctx
->cb
.decl_map
->put (f
, new_f
);
10865 TYPE_FIELDS (type
) = nreverse (new_fields
);
10866 layout_type (type
);
10870 /* Create task copyfn. */
10873 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
10875 struct function
*child_cfun
;
10876 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
10877 tree record_type
, srecord_type
, bind
, list
;
10878 bool record_needs_remap
= false, srecord_needs_remap
= false;
10880 struct omp_taskcopy_context tcctx
;
10881 location_t loc
= gimple_location (task_stmt
);
10882 size_t looptempno
= 0;
10884 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
10885 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
10886 gcc_assert (child_cfun
->cfg
== NULL
);
10887 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
10889 /* Reset DECL_CONTEXT on function arguments. */
10890 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
10891 DECL_CONTEXT (t
) = child_fn
;
10893 /* Populate the function. */
10894 push_gimplify_context ();
10895 push_cfun (child_cfun
);
10897 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
10898 TREE_SIDE_EFFECTS (bind
) = 1;
10900 DECL_SAVED_TREE (child_fn
) = bind
;
10901 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
10903 /* Remap src and dst argument types if needed. */
10904 record_type
= ctx
->record_type
;
10905 srecord_type
= ctx
->srecord_type
;
10906 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
10907 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10909 record_needs_remap
= true;
10912 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
10913 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10915 srecord_needs_remap
= true;
10919 if (record_needs_remap
|| srecord_needs_remap
)
10921 memset (&tcctx
, '\0', sizeof (tcctx
));
10922 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
10923 tcctx
.cb
.dst_fn
= child_fn
;
10924 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
10925 gcc_checking_assert (tcctx
.cb
.src_node
);
10926 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
10927 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
10928 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
10929 tcctx
.cb
.eh_lp_nr
= 0;
10930 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
10931 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
10934 if (record_needs_remap
)
10935 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
10936 if (srecord_needs_remap
)
10937 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
10940 tcctx
.cb
.decl_map
= NULL
;
10942 arg
= DECL_ARGUMENTS (child_fn
);
10943 TREE_TYPE (arg
) = build_pointer_type (record_type
);
10944 sarg
= DECL_CHAIN (arg
);
10945 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
10947 /* First pass: initialize temporaries used in record_type and srecord_type
10948 sizes and field offsets. */
10949 if (tcctx
.cb
.decl_map
)
10950 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10951 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10955 decl
= OMP_CLAUSE_DECL (c
);
10956 p
= tcctx
.cb
.decl_map
->get (decl
);
10959 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10960 sf
= (tree
) n
->value
;
10961 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10962 src
= build_simple_mem_ref_loc (loc
, sarg
);
10963 src
= omp_build_component_ref (src
, sf
);
10964 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
10965 append_to_statement_list (t
, &list
);
10968 /* Second pass: copy shared var pointers and copy construct non-VLA
10969 firstprivate vars. */
10970 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10971 switch (OMP_CLAUSE_CODE (c
))
10973 splay_tree_key key
;
10974 case OMP_CLAUSE_SHARED
:
10975 decl
= OMP_CLAUSE_DECL (c
);
10976 key
= (splay_tree_key
) decl
;
10977 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
10978 key
= (splay_tree_key
) &DECL_UID (decl
);
10979 n
= splay_tree_lookup (ctx
->field_map
, key
);
10982 f
= (tree
) n
->value
;
10983 if (tcctx
.cb
.decl_map
)
10984 f
= *tcctx
.cb
.decl_map
->get (f
);
10985 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10986 sf
= (tree
) n
->value
;
10987 if (tcctx
.cb
.decl_map
)
10988 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10989 src
= build_simple_mem_ref_loc (loc
, sarg
);
10990 src
= omp_build_component_ref (src
, sf
);
10991 dst
= build_simple_mem_ref_loc (loc
, arg
);
10992 dst
= omp_build_component_ref (dst
, f
);
10993 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10994 append_to_statement_list (t
, &list
);
10996 case OMP_CLAUSE_REDUCTION
:
10997 case OMP_CLAUSE_IN_REDUCTION
:
10998 decl
= OMP_CLAUSE_DECL (c
);
10999 if (TREE_CODE (decl
) == MEM_REF
)
11001 decl
= TREE_OPERAND (decl
, 0);
11002 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
11003 decl
= TREE_OPERAND (decl
, 0);
11004 if (TREE_CODE (decl
) == INDIRECT_REF
11005 || TREE_CODE (decl
) == ADDR_EXPR
)
11006 decl
= TREE_OPERAND (decl
, 0);
11008 key
= (splay_tree_key
) decl
;
11009 n
= splay_tree_lookup (ctx
->field_map
, key
);
11012 f
= (tree
) n
->value
;
11013 if (tcctx
.cb
.decl_map
)
11014 f
= *tcctx
.cb
.decl_map
->get (f
);
11015 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
11016 sf
= (tree
) n
->value
;
11017 if (tcctx
.cb
.decl_map
)
11018 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11019 src
= build_simple_mem_ref_loc (loc
, sarg
);
11020 src
= omp_build_component_ref (src
, sf
);
11021 if (decl
!= OMP_CLAUSE_DECL (c
)
11022 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
11023 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
11024 src
= build_simple_mem_ref_loc (loc
, src
);
11025 dst
= build_simple_mem_ref_loc (loc
, arg
);
11026 dst
= omp_build_component_ref (dst
, f
);
11027 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
11028 append_to_statement_list (t
, &list
);
11030 case OMP_CLAUSE__LOOPTEMP_
:
11031 /* Fields for first two _looptemp_ clauses are initialized by
11032 GOMP_taskloop*, the rest are handled like firstprivate. */
11033 if (looptempno
< 2)
11039 case OMP_CLAUSE__REDUCTEMP_
:
11040 case OMP_CLAUSE_FIRSTPRIVATE
:
11041 decl
= OMP_CLAUSE_DECL (c
);
11042 if (is_variable_sized (decl
))
11044 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
11047 f
= (tree
) n
->value
;
11048 if (tcctx
.cb
.decl_map
)
11049 f
= *tcctx
.cb
.decl_map
->get (f
);
11050 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
11053 sf
= (tree
) n
->value
;
11054 if (tcctx
.cb
.decl_map
)
11055 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11056 src
= build_simple_mem_ref_loc (loc
, sarg
);
11057 src
= omp_build_component_ref (src
, sf
);
11058 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
11059 src
= build_simple_mem_ref_loc (loc
, src
);
11063 dst
= build_simple_mem_ref_loc (loc
, arg
);
11064 dst
= omp_build_component_ref (dst
, f
);
11065 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
11066 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
11068 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
11069 append_to_statement_list (t
, &list
);
11071 case OMP_CLAUSE_PRIVATE
:
11072 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
11074 decl
= OMP_CLAUSE_DECL (c
);
11075 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
11076 f
= (tree
) n
->value
;
11077 if (tcctx
.cb
.decl_map
)
11078 f
= *tcctx
.cb
.decl_map
->get (f
);
11079 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
11082 sf
= (tree
) n
->value
;
11083 if (tcctx
.cb
.decl_map
)
11084 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11085 src
= build_simple_mem_ref_loc (loc
, sarg
);
11086 src
= omp_build_component_ref (src
, sf
);
11087 if (use_pointer_for_field (decl
, NULL
))
11088 src
= build_simple_mem_ref_loc (loc
, src
);
11092 dst
= build_simple_mem_ref_loc (loc
, arg
);
11093 dst
= omp_build_component_ref (dst
, f
);
11094 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
11095 append_to_statement_list (t
, &list
);
11101 /* Last pass: handle VLA firstprivates. */
11102 if (tcctx
.cb
.decl_map
)
11103 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11104 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11108 decl
= OMP_CLAUSE_DECL (c
);
11109 if (!is_variable_sized (decl
))
11111 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
11114 f
= (tree
) n
->value
;
11115 f
= *tcctx
.cb
.decl_map
->get (f
);
11116 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
11117 ind
= DECL_VALUE_EXPR (decl
);
11118 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
11119 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
11120 n
= splay_tree_lookup (ctx
->sfield_map
,
11121 (splay_tree_key
) TREE_OPERAND (ind
, 0));
11122 sf
= (tree
) n
->value
;
11123 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11124 src
= build_simple_mem_ref_loc (loc
, sarg
);
11125 src
= omp_build_component_ref (src
, sf
);
11126 src
= build_simple_mem_ref_loc (loc
, src
);
11127 dst
= build_simple_mem_ref_loc (loc
, arg
);
11128 dst
= omp_build_component_ref (dst
, f
);
11129 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
11130 append_to_statement_list (t
, &list
);
11131 n
= splay_tree_lookup (ctx
->field_map
,
11132 (splay_tree_key
) TREE_OPERAND (ind
, 0));
11133 df
= (tree
) n
->value
;
11134 df
= *tcctx
.cb
.decl_map
->get (df
);
11135 ptr
= build_simple_mem_ref_loc (loc
, arg
);
11136 ptr
= omp_build_component_ref (ptr
, df
);
11137 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
11138 build_fold_addr_expr_loc (loc
, dst
));
11139 append_to_statement_list (t
, &list
);
11142 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
11143 append_to_statement_list (t
, &list
);
11145 if (tcctx
.cb
.decl_map
)
11146 delete tcctx
.cb
.decl_map
;
11147 pop_gimplify_context (NULL
);
11148 BIND_EXPR_BODY (bind
) = list
;
11153 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
11157 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
11159 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
11160 gcc_assert (clauses
);
11161 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11162 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
11163 switch (OMP_CLAUSE_DEPEND_KIND (c
))
11165 case OMP_CLAUSE_DEPEND_LAST
:
11166 /* Lowering already done at gimplification. */
11168 case OMP_CLAUSE_DEPEND_IN
:
11171 case OMP_CLAUSE_DEPEND_OUT
:
11172 case OMP_CLAUSE_DEPEND_INOUT
:
11175 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
11178 case OMP_CLAUSE_DEPEND_DEPOBJ
:
11181 case OMP_CLAUSE_DEPEND_SOURCE
:
11182 case OMP_CLAUSE_DEPEND_SINK
:
11185 gcc_unreachable ();
11187 if (cnt
[1] || cnt
[3])
11189 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
11190 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
11191 tree array
= create_tmp_var (type
);
11192 TREE_ADDRESSABLE (array
) = 1;
11193 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
11197 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
11198 gimple_seq_add_stmt (iseq
, g
);
11199 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
11202 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
11203 gimple_seq_add_stmt (iseq
, g
);
11204 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
11206 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
11207 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
11208 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
11209 gimple_seq_add_stmt (iseq
, g
);
11211 for (i
= 0; i
< 4; i
++)
11215 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11216 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
11220 switch (OMP_CLAUSE_DEPEND_KIND (c
))
11222 case OMP_CLAUSE_DEPEND_IN
:
11226 case OMP_CLAUSE_DEPEND_OUT
:
11227 case OMP_CLAUSE_DEPEND_INOUT
:
11231 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
11235 case OMP_CLAUSE_DEPEND_DEPOBJ
:
11240 gcc_unreachable ();
11242 tree t
= OMP_CLAUSE_DECL (c
);
11243 t
= fold_convert (ptr_type_node
, t
);
11244 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
11245 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
11246 NULL_TREE
, NULL_TREE
);
11247 g
= gimple_build_assign (r
, t
);
11248 gimple_seq_add_stmt (iseq
, g
);
11251 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
11252 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
11253 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
11254 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
11256 tree clobber
= build_clobber (type
);
11257 g
= gimple_build_assign (array
, clobber
);
11258 gimple_seq_add_stmt (oseq
, g
);
11261 /* Lower the OpenMP parallel or task directive in the current statement
11262 in GSI_P. CTX holds context information for the directive. */
11265 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11269 gimple
*stmt
= gsi_stmt (*gsi_p
);
11270 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
11271 gimple_seq par_body
;
11272 location_t loc
= gimple_location (stmt
);
11274 clauses
= gimple_omp_taskreg_clauses (stmt
);
11275 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11276 && gimple_omp_task_taskwait_p (stmt
))
11284 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
11285 par_body
= gimple_bind_body (par_bind
);
11287 child_fn
= ctx
->cb
.dst_fn
;
11288 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
11289 && !gimple_omp_parallel_combined_p (stmt
))
11291 struct walk_stmt_info wi
;
11294 memset (&wi
, 0, sizeof (wi
));
11296 wi
.val_only
= true;
11297 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
11299 gimple_omp_parallel_set_combined_p (stmt
, true);
11301 gimple_seq dep_ilist
= NULL
;
11302 gimple_seq dep_olist
= NULL
;
11303 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11304 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11306 push_gimplify_context ();
11307 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11308 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
11309 &dep_ilist
, &dep_olist
);
11312 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11313 && gimple_omp_task_taskwait_p (stmt
))
11317 gsi_replace (gsi_p
, dep_bind
, true);
11318 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11319 gimple_bind_add_stmt (dep_bind
, stmt
);
11320 gimple_bind_add_seq (dep_bind
, dep_olist
);
11321 pop_gimplify_context (dep_bind
);
11326 if (ctx
->srecord_type
)
11327 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
11329 gimple_seq tskred_ilist
= NULL
;
11330 gimple_seq tskred_olist
= NULL
;
11331 if ((is_task_ctx (ctx
)
11332 && gimple_omp_task_taskloop_p (ctx
->stmt
)
11333 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
11334 OMP_CLAUSE_REDUCTION
))
11335 || (is_parallel_ctx (ctx
)
11336 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
11337 OMP_CLAUSE__REDUCTEMP_
)))
11339 if (dep_bind
== NULL
)
11341 push_gimplify_context ();
11342 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11344 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
11346 gimple_omp_taskreg_clauses (ctx
->stmt
),
11347 &tskred_ilist
, &tskred_olist
);
11350 push_gimplify_context ();
11352 gimple_seq par_olist
= NULL
;
11353 gimple_seq par_ilist
= NULL
;
11354 gimple_seq par_rlist
= NULL
;
11355 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
11356 lower_omp (&par_body
, ctx
);
11357 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
11358 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
11360 /* Declare all the variables created by mapping and the variables
11361 declared in the scope of the parallel body. */
11362 record_vars_into (ctx
->block_vars
, child_fn
);
11363 maybe_remove_omp_member_access_dummy_vars (par_bind
);
11364 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
11366 if (ctx
->record_type
)
11369 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
11370 : ctx
->record_type
, ".omp_data_o");
11371 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11372 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11373 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
11376 gimple_seq olist
= NULL
;
11377 gimple_seq ilist
= NULL
;
11378 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
11379 lower_send_shared_vars (&ilist
, &olist
, ctx
);
11381 if (ctx
->record_type
)
11383 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
11384 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
11388 /* Once all the expansions are done, sequence all the different
11389 fragments inside gimple_omp_body. */
11391 gimple_seq new_body
= NULL
;
11393 if (ctx
->record_type
)
11395 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
11396 /* fixup_child_record_type might have changed receiver_decl's type. */
11397 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
11398 gimple_seq_add_stmt (&new_body
,
11399 gimple_build_assign (ctx
->receiver_decl
, t
));
11402 gimple_seq_add_seq (&new_body
, par_ilist
);
11403 gimple_seq_add_seq (&new_body
, par_body
);
11404 gimple_seq_add_seq (&new_body
, par_rlist
);
11405 if (ctx
->cancellable
)
11406 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
11407 gimple_seq_add_seq (&new_body
, par_olist
);
11408 new_body
= maybe_catch_exception (new_body
);
11409 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
11410 gimple_seq_add_stmt (&new_body
,
11411 gimple_build_omp_continue (integer_zero_node
,
11412 integer_zero_node
));
11413 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
11414 gimple_omp_set_body (stmt
, new_body
);
11416 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
11417 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11419 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
11420 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
11421 gimple_bind_add_seq (bind
, ilist
);
11422 gimple_bind_add_stmt (bind
, stmt
);
11423 gimple_bind_add_seq (bind
, olist
);
11425 pop_gimplify_context (NULL
);
11429 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11430 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
11431 gimple_bind_add_stmt (dep_bind
, bind
);
11432 gimple_bind_add_seq (dep_bind
, tskred_olist
);
11433 gimple_bind_add_seq (dep_bind
, dep_olist
);
11434 pop_gimplify_context (dep_bind
);
11438 /* Lower the GIMPLE_OMP_TARGET in the current statement
11439 in GSI_P. CTX holds context information for the directive. */
11442 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11445 tree child_fn
, t
, c
;
11446 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
11447 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
11448 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
11449 location_t loc
= gimple_location (stmt
);
11450 bool offloaded
, data_region
;
11451 unsigned int map_cnt
= 0;
11453 offloaded
= is_gimple_omp_offloaded (stmt
);
11454 switch (gimple_omp_target_kind (stmt
))
11456 case GF_OMP_TARGET_KIND_REGION
:
11457 case GF_OMP_TARGET_KIND_UPDATE
:
11458 case GF_OMP_TARGET_KIND_ENTER_DATA
:
11459 case GF_OMP_TARGET_KIND_EXIT_DATA
:
11460 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
11461 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
11462 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
11463 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
11464 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
11465 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
11466 data_region
= false;
11468 case GF_OMP_TARGET_KIND_DATA
:
11469 case GF_OMP_TARGET_KIND_OACC_DATA
:
11470 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
11471 data_region
= true;
11474 gcc_unreachable ();
11477 clauses
= gimple_omp_target_clauses (stmt
);
11479 gimple_seq dep_ilist
= NULL
;
11480 gimple_seq dep_olist
= NULL
;
11481 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11483 push_gimplify_context ();
11484 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11485 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
11486 &dep_ilist
, &dep_olist
);
11493 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
11494 tgt_body
= gimple_bind_body (tgt_bind
);
11496 else if (data_region
)
11497 tgt_body
= gimple_omp_body (stmt
);
11498 child_fn
= ctx
->cb
.dst_fn
;
11500 push_gimplify_context ();
11503 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11504 switch (OMP_CLAUSE_CODE (c
))
11510 case OMP_CLAUSE_MAP
:
11512 /* First check what we're prepared to handle in the following. */
11513 switch (OMP_CLAUSE_MAP_KIND (c
))
11515 case GOMP_MAP_ALLOC
:
11517 case GOMP_MAP_FROM
:
11518 case GOMP_MAP_TOFROM
:
11519 case GOMP_MAP_POINTER
:
11520 case GOMP_MAP_TO_PSET
:
11521 case GOMP_MAP_DELETE
:
11522 case GOMP_MAP_RELEASE
:
11523 case GOMP_MAP_ALWAYS_TO
:
11524 case GOMP_MAP_ALWAYS_FROM
:
11525 case GOMP_MAP_ALWAYS_TOFROM
:
11526 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
11527 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
11528 case GOMP_MAP_STRUCT
:
11529 case GOMP_MAP_ALWAYS_POINTER
:
11530 case GOMP_MAP_ATTACH
:
11531 case GOMP_MAP_DETACH
:
11533 case GOMP_MAP_IF_PRESENT
:
11534 case GOMP_MAP_FORCE_ALLOC
:
11535 case GOMP_MAP_FORCE_TO
:
11536 case GOMP_MAP_FORCE_FROM
:
11537 case GOMP_MAP_FORCE_TOFROM
:
11538 case GOMP_MAP_FORCE_PRESENT
:
11539 case GOMP_MAP_FORCE_DEVICEPTR
:
11540 case GOMP_MAP_DEVICE_RESIDENT
:
11541 case GOMP_MAP_LINK
:
11542 case GOMP_MAP_FORCE_DETACH
:
11543 gcc_assert (is_gimple_omp_oacc (stmt
));
11546 gcc_unreachable ();
11550 case OMP_CLAUSE_TO
:
11551 case OMP_CLAUSE_FROM
:
11553 var
= OMP_CLAUSE_DECL (c
);
11556 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
11557 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11558 && (OMP_CLAUSE_MAP_KIND (c
)
11559 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
11564 if (DECL_SIZE (var
)
11565 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
11567 tree var2
= DECL_VALUE_EXPR (var
);
11568 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
11569 var2
= TREE_OPERAND (var2
, 0);
11570 gcc_assert (DECL_P (var2
));
11575 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11576 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11577 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11579 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11581 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
11582 && varpool_node::get_create (var
)->offloadable
)
11585 tree type
= build_pointer_type (TREE_TYPE (var
));
11586 tree new_var
= lookup_decl (var
, ctx
);
11587 x
= create_tmp_var_raw (type
, get_name (new_var
));
11588 gimple_add_tmp_var (x
);
11589 x
= build_simple_mem_ref (x
);
11590 SET_DECL_VALUE_EXPR (new_var
, x
);
11591 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11596 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11597 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
11598 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
11599 && is_omp_target (stmt
))
11601 gcc_assert (maybe_lookup_field (c
, ctx
));
11606 if (!maybe_lookup_field (var
, ctx
))
11609 /* Don't remap compute constructs' reduction variables, because the
11610 intermediate result must be local to each gang. */
11611 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11612 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
11614 x
= build_receiver_ref (var
, true, ctx
);
11615 tree new_var
= lookup_decl (var
, ctx
);
11617 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11618 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11619 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11620 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11621 x
= build_simple_mem_ref (x
);
11622 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11624 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11625 if (omp_is_reference (new_var
)
11626 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
11627 || DECL_BY_REFERENCE (var
)))
11629 /* Create a local object to hold the instance
11631 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
11632 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
11633 tree inst
= create_tmp_var (type
, id
);
11634 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
11635 x
= build_fold_addr_expr (inst
);
11637 gimplify_assign (new_var
, x
, &fplist
);
11639 else if (DECL_P (new_var
))
11641 SET_DECL_VALUE_EXPR (new_var
, x
);
11642 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11645 gcc_unreachable ();
11650 case OMP_CLAUSE_FIRSTPRIVATE
:
11651 if (is_oacc_parallel_or_serial (ctx
))
11652 goto oacc_firstprivate
;
11654 var
= OMP_CLAUSE_DECL (c
);
11655 if (!omp_is_reference (var
)
11656 && !is_gimple_reg_type (TREE_TYPE (var
)))
11658 tree new_var
= lookup_decl (var
, ctx
);
11659 if (is_variable_sized (var
))
11661 tree pvar
= DECL_VALUE_EXPR (var
);
11662 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11663 pvar
= TREE_OPERAND (pvar
, 0);
11664 gcc_assert (DECL_P (pvar
));
11665 tree new_pvar
= lookup_decl (pvar
, ctx
);
11666 x
= build_fold_indirect_ref (new_pvar
);
11667 TREE_THIS_NOTRAP (x
) = 1;
11670 x
= build_receiver_ref (var
, true, ctx
);
11671 SET_DECL_VALUE_EXPR (new_var
, x
);
11672 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11676 case OMP_CLAUSE_PRIVATE
:
11677 if (is_gimple_omp_oacc (ctx
->stmt
))
11679 var
= OMP_CLAUSE_DECL (c
);
11680 if (is_variable_sized (var
))
11682 tree new_var
= lookup_decl (var
, ctx
);
11683 tree pvar
= DECL_VALUE_EXPR (var
);
11684 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11685 pvar
= TREE_OPERAND (pvar
, 0);
11686 gcc_assert (DECL_P (pvar
));
11687 tree new_pvar
= lookup_decl (pvar
, ctx
);
11688 x
= build_fold_indirect_ref (new_pvar
);
11689 TREE_THIS_NOTRAP (x
) = 1;
11690 SET_DECL_VALUE_EXPR (new_var
, x
);
11691 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11695 case OMP_CLAUSE_USE_DEVICE_PTR
:
11696 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11697 case OMP_CLAUSE_IS_DEVICE_PTR
:
11698 var
= OMP_CLAUSE_DECL (c
);
11700 if (is_variable_sized (var
))
11702 tree new_var
= lookup_decl (var
, ctx
);
11703 tree pvar
= DECL_VALUE_EXPR (var
);
11704 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11705 pvar
= TREE_OPERAND (pvar
, 0);
11706 gcc_assert (DECL_P (pvar
));
11707 tree new_pvar
= lookup_decl (pvar
, ctx
);
11708 x
= build_fold_indirect_ref (new_pvar
);
11709 TREE_THIS_NOTRAP (x
) = 1;
11710 SET_DECL_VALUE_EXPR (new_var
, x
);
11711 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11713 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
11714 && !omp_is_reference (var
)
11715 && !omp_is_allocatable_or_ptr (var
)
11716 && !lang_hooks
.decls
.omp_array_data (var
, true))
11717 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11719 tree new_var
= lookup_decl (var
, ctx
);
11720 tree type
= build_pointer_type (TREE_TYPE (var
));
11721 x
= create_tmp_var_raw (type
, get_name (new_var
));
11722 gimple_add_tmp_var (x
);
11723 x
= build_simple_mem_ref (x
);
11724 SET_DECL_VALUE_EXPR (new_var
, x
);
11725 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11729 tree new_var
= lookup_decl (var
, ctx
);
11730 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
11731 gimple_add_tmp_var (x
);
11732 SET_DECL_VALUE_EXPR (new_var
, x
);
11733 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11740 target_nesting_level
++;
11741 lower_omp (&tgt_body
, ctx
);
11742 target_nesting_level
--;
11744 else if (data_region
)
11745 lower_omp (&tgt_body
, ctx
);
11749 /* Declare all the variables created by mapping and the variables
11750 declared in the scope of the target body. */
11751 record_vars_into (ctx
->block_vars
, child_fn
);
11752 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
11753 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
11758 if (ctx
->record_type
)
11761 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
11762 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11763 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11764 t
= make_tree_vec (3);
11765 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
11766 TREE_VEC_ELT (t
, 1)
11767 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
11768 ".omp_data_sizes");
11769 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
11770 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
11771 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
11772 tree tkind_type
= short_unsigned_type_node
;
11773 int talign_shift
= 8;
11774 TREE_VEC_ELT (t
, 2)
11775 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
11776 ".omp_data_kinds");
11777 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
11778 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
11779 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
11780 gimple_omp_target_set_data_arg (stmt
, t
);
11782 vec
<constructor_elt
, va_gc
> *vsize
;
11783 vec
<constructor_elt
, va_gc
> *vkind
;
11784 vec_alloc (vsize
, map_cnt
);
11785 vec_alloc (vkind
, map_cnt
);
11786 unsigned int map_idx
= 0;
11788 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11789 switch (OMP_CLAUSE_CODE (c
))
11791 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
11792 unsigned int talign
;
11797 case OMP_CLAUSE_MAP
:
11798 case OMP_CLAUSE_TO
:
11799 case OMP_CLAUSE_FROM
:
11800 oacc_firstprivate_map
:
11802 ovar
= OMP_CLAUSE_DECL (c
);
11803 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11804 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11805 || (OMP_CLAUSE_MAP_KIND (c
)
11806 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
11808 if (!DECL_P (ovar
))
11810 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11811 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
11813 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
11814 == get_base_address (ovar
));
11815 nc
= OMP_CLAUSE_CHAIN (c
);
11816 ovar
= OMP_CLAUSE_DECL (nc
);
11820 tree x
= build_sender_ref (ovar
, ctx
);
11822 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
11823 gimplify_assign (x
, v
, &ilist
);
11829 if (DECL_SIZE (ovar
)
11830 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
11832 tree ovar2
= DECL_VALUE_EXPR (ovar
);
11833 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
11834 ovar2
= TREE_OPERAND (ovar2
, 0);
11835 gcc_assert (DECL_P (ovar2
));
11838 if (!maybe_lookup_field (ovar
, ctx
)
11839 && !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11840 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
11841 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)))
11845 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
11846 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
11847 talign
= DECL_ALIGN_UNIT (ovar
);
11850 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11851 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
11852 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
11853 && is_omp_target (stmt
))
11855 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11856 x
= build_sender_ref (c
, ctx
);
11857 gimplify_assign (x
, build_fold_addr_expr (var
), &ilist
);
11861 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11862 x
= build_sender_ref (ovar
, ctx
);
11864 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11865 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11866 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11867 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
11869 gcc_assert (offloaded
);
11871 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
11872 mark_addressable (avar
);
11873 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
11874 talign
= DECL_ALIGN_UNIT (avar
);
11875 avar
= build_fold_addr_expr (avar
);
11876 gimplify_assign (x
, avar
, &ilist
);
11878 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11880 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11881 if (!omp_is_reference (var
))
11883 if (is_gimple_reg (var
)
11884 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11885 TREE_NO_WARNING (var
) = 1;
11886 var
= build_fold_addr_expr (var
);
11889 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11890 gimplify_assign (x
, var
, &ilist
);
11892 else if (is_gimple_reg (var
))
11894 gcc_assert (offloaded
);
11895 tree avar
= create_tmp_var (TREE_TYPE (var
));
11896 mark_addressable (avar
);
11897 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
11898 if (GOMP_MAP_COPY_TO_P (map_kind
)
11899 || map_kind
== GOMP_MAP_POINTER
11900 || map_kind
== GOMP_MAP_TO_PSET
11901 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11903 /* If we need to initialize a temporary
11904 with VAR because it is not addressable, and
11905 the variable hasn't been initialized yet, then
11906 we'll get a warning for the store to avar.
11907 Don't warn in that case, the mapping might
11909 TREE_NO_WARNING (var
) = 1;
11910 gimplify_assign (avar
, var
, &ilist
);
11912 avar
= build_fold_addr_expr (avar
);
11913 gimplify_assign (x
, avar
, &ilist
);
11914 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
11915 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11916 && !TYPE_READONLY (TREE_TYPE (var
)))
11918 x
= unshare_expr (x
);
11919 x
= build_simple_mem_ref (x
);
11920 gimplify_assign (var
, x
, &olist
);
11925 /* While MAP is handled explicitly by the FE,
11926 for 'target update', only the identified is passed. */
11927 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
11928 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
11929 && (omp_is_allocatable_or_ptr (var
)
11930 && omp_check_optional_argument (var
, false)))
11931 var
= build_fold_indirect_ref (var
);
11932 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
11933 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
11934 || (!omp_is_allocatable_or_ptr (var
)
11935 && !omp_check_optional_argument (var
, false)))
11936 var
= build_fold_addr_expr (var
);
11937 gimplify_assign (x
, var
, &ilist
);
11941 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11943 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11944 s
= TREE_TYPE (ovar
);
11945 if (TREE_CODE (s
) == REFERENCE_TYPE
11946 || omp_check_optional_argument (ovar
, false))
11948 s
= TYPE_SIZE_UNIT (s
);
11951 s
= OMP_CLAUSE_SIZE (c
);
11952 if (s
== NULL_TREE
)
11953 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11954 s
= fold_convert (size_type_node
, s
);
11955 purpose
= size_int (map_idx
++);
11956 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11957 if (TREE_CODE (s
) != INTEGER_CST
)
11958 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11960 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
11961 switch (OMP_CLAUSE_CODE (c
))
11963 case OMP_CLAUSE_MAP
:
11964 tkind
= OMP_CLAUSE_MAP_KIND (c
);
11965 tkind_zero
= tkind
;
11966 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
11969 case GOMP_MAP_ALLOC
:
11970 case GOMP_MAP_IF_PRESENT
:
11972 case GOMP_MAP_FROM
:
11973 case GOMP_MAP_TOFROM
:
11974 case GOMP_MAP_ALWAYS_TO
:
11975 case GOMP_MAP_ALWAYS_FROM
:
11976 case GOMP_MAP_ALWAYS_TOFROM
:
11977 case GOMP_MAP_RELEASE
:
11978 case GOMP_MAP_FORCE_TO
:
11979 case GOMP_MAP_FORCE_FROM
:
11980 case GOMP_MAP_FORCE_TOFROM
:
11981 case GOMP_MAP_FORCE_PRESENT
:
11982 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
11984 case GOMP_MAP_DELETE
:
11985 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
11989 if (tkind_zero
!= tkind
)
11991 if (integer_zerop (s
))
11992 tkind
= tkind_zero
;
11993 else if (integer_nonzerop (s
))
11994 tkind_zero
= tkind
;
11997 case OMP_CLAUSE_FIRSTPRIVATE
:
11998 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11999 tkind
= GOMP_MAP_TO
;
12000 tkind_zero
= tkind
;
12002 case OMP_CLAUSE_TO
:
12003 tkind
= GOMP_MAP_TO
;
12004 tkind_zero
= tkind
;
12006 case OMP_CLAUSE_FROM
:
12007 tkind
= GOMP_MAP_FROM
;
12008 tkind_zero
= tkind
;
12011 gcc_unreachable ();
12013 gcc_checking_assert (tkind
12014 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12015 gcc_checking_assert (tkind_zero
12016 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12017 talign
= ceil_log2 (talign
);
12018 tkind
|= talign
<< talign_shift
;
12019 tkind_zero
|= talign
<< talign_shift
;
12020 gcc_checking_assert (tkind
12021 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12022 gcc_checking_assert (tkind_zero
12023 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12024 if (tkind
== tkind_zero
)
12025 x
= build_int_cstu (tkind_type
, tkind
);
12028 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
12029 x
= build3 (COND_EXPR
, tkind_type
,
12030 fold_build2 (EQ_EXPR
, boolean_type_node
,
12031 unshare_expr (s
), size_zero_node
),
12032 build_int_cstu (tkind_type
, tkind_zero
),
12033 build_int_cstu (tkind_type
, tkind
));
12035 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
12040 case OMP_CLAUSE_FIRSTPRIVATE
:
12041 if (is_oacc_parallel_or_serial (ctx
))
12042 goto oacc_firstprivate_map
;
12043 ovar
= OMP_CLAUSE_DECL (c
);
12044 if (omp_is_reference (ovar
))
12045 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
12047 talign
= DECL_ALIGN_UNIT (ovar
);
12048 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
12049 x
= build_sender_ref (ovar
, ctx
);
12050 tkind
= GOMP_MAP_FIRSTPRIVATE
;
12051 type
= TREE_TYPE (ovar
);
12052 if (omp_is_reference (ovar
))
12053 type
= TREE_TYPE (type
);
12054 if ((INTEGRAL_TYPE_P (type
)
12055 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
12056 || TREE_CODE (type
) == POINTER_TYPE
)
12058 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
12060 if (omp_is_reference (var
))
12061 t
= build_simple_mem_ref (var
);
12062 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12063 TREE_NO_WARNING (var
) = 1;
12064 if (TREE_CODE (type
) != POINTER_TYPE
)
12065 t
= fold_convert (pointer_sized_int_node
, t
);
12066 t
= fold_convert (TREE_TYPE (x
), t
);
12067 gimplify_assign (x
, t
, &ilist
);
12069 else if (omp_is_reference (var
))
12070 gimplify_assign (x
, var
, &ilist
);
12071 else if (is_gimple_reg (var
))
12073 tree avar
= create_tmp_var (TREE_TYPE (var
));
12074 mark_addressable (avar
);
12075 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12076 TREE_NO_WARNING (var
) = 1;
12077 gimplify_assign (avar
, var
, &ilist
);
12078 avar
= build_fold_addr_expr (avar
);
12079 gimplify_assign (x
, avar
, &ilist
);
12083 var
= build_fold_addr_expr (var
);
12084 gimplify_assign (x
, var
, &ilist
);
12086 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
12088 else if (omp_is_reference (ovar
))
12089 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
12091 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
12092 s
= fold_convert (size_type_node
, s
);
12093 purpose
= size_int (map_idx
++);
12094 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
12095 if (TREE_CODE (s
) != INTEGER_CST
)
12096 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
12098 gcc_checking_assert (tkind
12099 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12100 talign
= ceil_log2 (talign
);
12101 tkind
|= talign
<< talign_shift
;
12102 gcc_checking_assert (tkind
12103 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12104 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
12105 build_int_cstu (tkind_type
, tkind
));
12108 case OMP_CLAUSE_USE_DEVICE_PTR
:
12109 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12110 case OMP_CLAUSE_IS_DEVICE_PTR
:
12111 ovar
= OMP_CLAUSE_DECL (c
);
12112 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
12114 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
12116 tkind
= (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
12117 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
12118 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
12120 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
12122 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
12123 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
12127 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
12128 x
= build_sender_ref (ovar
, ctx
);
12131 if (is_gimple_omp_oacc (ctx
->stmt
))
12133 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
12135 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
12136 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
12139 type
= TREE_TYPE (ovar
);
12140 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
12141 var
= lang_hooks
.decls
.omp_array_data (ovar
, false);
12142 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12143 && !omp_is_reference (ovar
)
12144 && !omp_is_allocatable_or_ptr (ovar
))
12145 || TREE_CODE (type
) == ARRAY_TYPE
)
12146 var
= build_fold_addr_expr (var
);
12149 if (omp_is_reference (ovar
)
12150 || omp_check_optional_argument (ovar
, false)
12151 || omp_is_allocatable_or_ptr (ovar
))
12153 type
= TREE_TYPE (type
);
12154 if (TREE_CODE (type
) != ARRAY_TYPE
12155 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
12156 && !omp_is_allocatable_or_ptr (ovar
))
12157 || (omp_is_reference (ovar
)
12158 && omp_is_allocatable_or_ptr (ovar
))))
12159 var
= build_simple_mem_ref (var
);
12160 var
= fold_convert (TREE_TYPE (x
), var
);
12164 present
= omp_check_optional_argument (ovar
, true);
12167 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
12168 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
12169 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
12170 tree new_x
= unshare_expr (x
);
12171 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
12173 gcond
*cond
= gimple_build_cond_from_tree (present
,
12176 gimple_seq_add_stmt (&ilist
, cond
);
12177 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
12178 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
12179 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
12180 gimple_seq_add_stmt (&ilist
,
12181 gimple_build_label (notnull_label
));
12182 gimplify_assign (x
, var
, &ilist
);
12183 gimple_seq_add_stmt (&ilist
,
12184 gimple_build_label (opt_arg_label
));
12187 gimplify_assign (x
, var
, &ilist
);
12189 purpose
= size_int (map_idx
++);
12190 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
12191 gcc_checking_assert (tkind
12192 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12193 gcc_checking_assert (tkind
12194 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12195 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
12196 build_int_cstu (tkind_type
, tkind
));
12200 gcc_assert (map_idx
== map_cnt
);
12202 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
12203 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
12204 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
12205 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
12206 for (int i
= 1; i
<= 2; i
++)
12207 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
12209 gimple_seq initlist
= NULL
;
12210 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
12211 TREE_VEC_ELT (t
, i
)),
12212 &initlist
, true, NULL_TREE
);
12213 gimple_seq_add_seq (&ilist
, initlist
);
12215 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
12216 gimple_seq_add_stmt (&olist
,
12217 gimple_build_assign (TREE_VEC_ELT (t
, i
),
12221 tree clobber
= build_clobber (ctx
->record_type
);
12222 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12226 /* Once all the expansions are done, sequence all the different
12227 fragments inside gimple_omp_body. */
12232 && ctx
->record_type
)
12234 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12235 /* fixup_child_record_type might have changed receiver_decl's type. */
12236 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12237 gimple_seq_add_stmt (&new_body
,
12238 gimple_build_assign (ctx
->receiver_decl
, t
));
12240 gimple_seq_add_seq (&new_body
, fplist
);
12242 if (offloaded
|| data_region
)
12244 tree prev
= NULL_TREE
;
12245 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12246 switch (OMP_CLAUSE_CODE (c
))
12251 case OMP_CLAUSE_FIRSTPRIVATE
:
12252 if (is_gimple_omp_oacc (ctx
->stmt
))
12254 var
= OMP_CLAUSE_DECL (c
);
12255 if (omp_is_reference (var
)
12256 || is_gimple_reg_type (TREE_TYPE (var
)))
12258 tree new_var
= lookup_decl (var
, ctx
);
12260 type
= TREE_TYPE (var
);
12261 if (omp_is_reference (var
))
12262 type
= TREE_TYPE (type
);
12263 if ((INTEGRAL_TYPE_P (type
)
12264 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
12265 || TREE_CODE (type
) == POINTER_TYPE
)
12267 x
= build_receiver_ref (var
, false, ctx
);
12268 if (TREE_CODE (type
) != POINTER_TYPE
)
12269 x
= fold_convert (pointer_sized_int_node
, x
);
12270 x
= fold_convert (type
, x
);
12271 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12273 if (omp_is_reference (var
))
12275 tree v
= create_tmp_var_raw (type
, get_name (var
));
12276 gimple_add_tmp_var (v
);
12277 TREE_ADDRESSABLE (v
) = 1;
12278 gimple_seq_add_stmt (&new_body
,
12279 gimple_build_assign (v
, x
));
12280 x
= build_fold_addr_expr (v
);
12282 gimple_seq_add_stmt (&new_body
,
12283 gimple_build_assign (new_var
, x
));
12287 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
12288 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12290 gimple_seq_add_stmt (&new_body
,
12291 gimple_build_assign (new_var
, x
));
12294 else if (is_variable_sized (var
))
12296 tree pvar
= DECL_VALUE_EXPR (var
);
12297 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12298 pvar
= TREE_OPERAND (pvar
, 0);
12299 gcc_assert (DECL_P (pvar
));
12300 tree new_var
= lookup_decl (pvar
, ctx
);
12301 x
= build_receiver_ref (var
, false, ctx
);
12302 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12303 gimple_seq_add_stmt (&new_body
,
12304 gimple_build_assign (new_var
, x
));
12307 case OMP_CLAUSE_PRIVATE
:
12308 if (is_gimple_omp_oacc (ctx
->stmt
))
12310 var
= OMP_CLAUSE_DECL (c
);
12311 if (omp_is_reference (var
))
12313 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12314 tree new_var
= lookup_decl (var
, ctx
);
12315 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12316 if (TREE_CONSTANT (x
))
12318 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
12320 gimple_add_tmp_var (x
);
12321 TREE_ADDRESSABLE (x
) = 1;
12322 x
= build_fold_addr_expr_loc (clause_loc
, x
);
12327 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12328 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12329 gimple_seq_add_stmt (&new_body
,
12330 gimple_build_assign (new_var
, x
));
12333 case OMP_CLAUSE_USE_DEVICE_PTR
:
12334 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12335 case OMP_CLAUSE_IS_DEVICE_PTR
:
12337 gimple_seq assign_body
;
12338 bool is_array_data
;
12339 bool do_optional_check
;
12340 assign_body
= NULL
;
12341 do_optional_check
= false;
12342 var
= OMP_CLAUSE_DECL (c
);
12343 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
12345 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
12346 x
= build_sender_ref (is_array_data
12347 ? (splay_tree_key
) &DECL_NAME (var
)
12348 : (splay_tree_key
) &DECL_UID (var
), ctx
);
12350 x
= build_receiver_ref (var
, false, ctx
);
12354 bool is_ref
= omp_is_reference (var
);
12355 do_optional_check
= true;
12356 /* First, we copy the descriptor data from the host; then
12357 we update its data to point to the target address. */
12358 new_var
= lookup_decl (var
, ctx
);
12359 new_var
= DECL_VALUE_EXPR (new_var
);
12364 var
= build_fold_indirect_ref (var
);
12365 gimplify_expr (&var
, &assign_body
, NULL
, is_gimple_val
,
12367 v
= create_tmp_var_raw (TREE_TYPE (var
), get_name (var
));
12368 gimple_add_tmp_var (v
);
12369 TREE_ADDRESSABLE (v
) = 1;
12370 gimple_seq_add_stmt (&assign_body
,
12371 gimple_build_assign (v
, var
));
12372 tree rhs
= build_fold_addr_expr (v
);
12373 gimple_seq_add_stmt (&assign_body
,
12374 gimple_build_assign (new_var
, rhs
));
12377 gimple_seq_add_stmt (&assign_body
,
12378 gimple_build_assign (new_var
, var
));
12380 tree v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
12382 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12383 gimple_seq_add_stmt (&assign_body
,
12384 gimple_build_assign (v2
, x
));
12386 else if (is_variable_sized (var
))
12388 tree pvar
= DECL_VALUE_EXPR (var
);
12389 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12390 pvar
= TREE_OPERAND (pvar
, 0);
12391 gcc_assert (DECL_P (pvar
));
12392 new_var
= lookup_decl (pvar
, ctx
);
12393 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12394 gimple_seq_add_stmt (&assign_body
,
12395 gimple_build_assign (new_var
, x
));
12397 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12398 && !omp_is_reference (var
)
12399 && !omp_is_allocatable_or_ptr (var
))
12400 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12402 new_var
= lookup_decl (var
, ctx
);
12403 new_var
= DECL_VALUE_EXPR (new_var
);
12404 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
12405 new_var
= TREE_OPERAND (new_var
, 0);
12406 gcc_assert (DECL_P (new_var
));
12407 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12408 gimple_seq_add_stmt (&assign_body
,
12409 gimple_build_assign (new_var
, x
));
12413 tree type
= TREE_TYPE (var
);
12414 new_var
= lookup_decl (var
, ctx
);
12415 if (omp_is_reference (var
))
12417 type
= TREE_TYPE (type
);
12418 if (TREE_CODE (type
) != ARRAY_TYPE
12419 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
12420 || (omp_is_reference (var
)
12421 && omp_is_allocatable_or_ptr (var
))))
12423 tree v
= create_tmp_var_raw (type
, get_name (var
));
12424 gimple_add_tmp_var (v
);
12425 TREE_ADDRESSABLE (v
) = 1;
12426 x
= fold_convert (type
, x
);
12427 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
12429 gimple_seq_add_stmt (&assign_body
,
12430 gimple_build_assign (v
, x
));
12431 x
= build_fold_addr_expr (v
);
12432 do_optional_check
= true;
12435 new_var
= DECL_VALUE_EXPR (new_var
);
12436 x
= fold_convert (TREE_TYPE (new_var
), x
);
12437 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12438 gimple_seq_add_stmt (&assign_body
,
12439 gimple_build_assign (new_var
, x
));
12442 present
= (do_optional_check
12443 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
12447 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
12448 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
12449 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
12450 glabel
*null_glabel
= gimple_build_label (null_label
);
12451 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
12452 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
12453 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12455 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
12457 gcond
*cond
= gimple_build_cond_from_tree (present
,
12460 gimple_seq_add_stmt (&new_body
, cond
);
12461 gimple_seq_add_stmt (&new_body
, null_glabel
);
12462 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
12463 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
12464 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
12465 gimple_seq_add_seq (&new_body
, assign_body
);
12466 gimple_seq_add_stmt (&new_body
,
12467 gimple_build_label (opt_arg_label
));
12470 gimple_seq_add_seq (&new_body
, assign_body
);
12473 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12474 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12475 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12476 or references to VLAs. */
12477 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12478 switch (OMP_CLAUSE_CODE (c
))
12483 case OMP_CLAUSE_MAP
:
12484 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12485 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12487 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12488 poly_int64 offset
= 0;
12490 var
= OMP_CLAUSE_DECL (c
);
12492 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
12493 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
12495 && varpool_node::get_create (var
)->offloadable
)
12497 if (TREE_CODE (var
) == INDIRECT_REF
12498 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
12499 var
= TREE_OPERAND (var
, 0);
12500 if (TREE_CODE (var
) == COMPONENT_REF
)
12502 var
= get_addr_base_and_unit_offset (var
, &offset
);
12503 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
12505 else if (DECL_SIZE (var
)
12506 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12508 tree var2
= DECL_VALUE_EXPR (var
);
12509 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12510 var2
= TREE_OPERAND (var2
, 0);
12511 gcc_assert (DECL_P (var2
));
12514 tree new_var
= lookup_decl (var
, ctx
), x
;
12515 tree type
= TREE_TYPE (new_var
);
12517 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
12518 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
12521 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
12523 new_var
= build2 (MEM_REF
, type
,
12524 build_fold_addr_expr (new_var
),
12525 build_int_cst (build_pointer_type (type
),
12528 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
12530 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
12531 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
12532 new_var
= build2 (MEM_REF
, type
,
12533 build_fold_addr_expr (new_var
),
12534 build_int_cst (build_pointer_type (type
),
12538 is_ref
= omp_is_reference (var
);
12539 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12541 bool ref_to_array
= false;
12544 type
= TREE_TYPE (type
);
12545 if (TREE_CODE (type
) == ARRAY_TYPE
)
12547 type
= build_pointer_type (type
);
12548 ref_to_array
= true;
12551 else if (TREE_CODE (type
) == ARRAY_TYPE
)
12553 tree decl2
= DECL_VALUE_EXPR (new_var
);
12554 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
12555 decl2
= TREE_OPERAND (decl2
, 0);
12556 gcc_assert (DECL_P (decl2
));
12558 type
= TREE_TYPE (new_var
);
12560 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
12561 x
= fold_convert_loc (clause_loc
, type
, x
);
12562 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
12564 tree bias
= OMP_CLAUSE_SIZE (c
);
12566 bias
= lookup_decl (bias
, ctx
);
12567 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
12568 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
12570 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
12571 TREE_TYPE (x
), x
, bias
);
12574 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12575 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12576 if (is_ref
&& !ref_to_array
)
12578 tree t
= create_tmp_var_raw (type
, get_name (var
));
12579 gimple_add_tmp_var (t
);
12580 TREE_ADDRESSABLE (t
) = 1;
12581 gimple_seq_add_stmt (&new_body
,
12582 gimple_build_assign (t
, x
));
12583 x
= build_fold_addr_expr_loc (clause_loc
, t
);
12585 gimple_seq_add_stmt (&new_body
,
12586 gimple_build_assign (new_var
, x
));
12589 else if (OMP_CLAUSE_CHAIN (c
)
12590 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
12592 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12593 == GOMP_MAP_FIRSTPRIVATE_POINTER
12594 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12595 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
12598 case OMP_CLAUSE_PRIVATE
:
12599 var
= OMP_CLAUSE_DECL (c
);
12600 if (is_variable_sized (var
))
12602 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12603 tree new_var
= lookup_decl (var
, ctx
);
12604 tree pvar
= DECL_VALUE_EXPR (var
);
12605 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12606 pvar
= TREE_OPERAND (pvar
, 0);
12607 gcc_assert (DECL_P (pvar
));
12608 tree new_pvar
= lookup_decl (pvar
, ctx
);
12609 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12610 tree al
= size_int (DECL_ALIGN (var
));
12611 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
12612 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12613 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
12614 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12615 gimple_seq_add_stmt (&new_body
,
12616 gimple_build_assign (new_pvar
, x
));
12618 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
12620 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12621 tree new_var
= lookup_decl (var
, ctx
);
12622 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12623 if (TREE_CONSTANT (x
))
12628 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12629 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
12630 tree al
= size_int (TYPE_ALIGN (rtype
));
12631 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12634 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12635 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12636 gimple_seq_add_stmt (&new_body
,
12637 gimple_build_assign (new_var
, x
));
12642 gimple_seq fork_seq
= NULL
;
12643 gimple_seq join_seq
= NULL
;
12645 if (is_oacc_parallel_or_serial (ctx
))
12647 /* If there are reductions on the offloaded region itself, treat
12648 them as a dummy GANG loop. */
12649 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
12651 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
12652 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
12655 gimple_seq_add_seq (&new_body
, fork_seq
);
12656 gimple_seq_add_seq (&new_body
, tgt_body
);
12657 gimple_seq_add_seq (&new_body
, join_seq
);
12660 new_body
= maybe_catch_exception (new_body
);
12662 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12663 gimple_omp_set_body (stmt
, new_body
);
12666 bind
= gimple_build_bind (NULL
, NULL
,
12667 tgt_bind
? gimple_bind_block (tgt_bind
)
12669 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12670 gimple_bind_add_seq (bind
, ilist
);
12671 gimple_bind_add_stmt (bind
, stmt
);
12672 gimple_bind_add_seq (bind
, olist
);
12674 pop_gimplify_context (NULL
);
12678 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12679 gimple_bind_add_stmt (dep_bind
, bind
);
12680 gimple_bind_add_seq (dep_bind
, dep_olist
);
12681 pop_gimplify_context (dep_bind
);
12685 /* Expand code for an OpenMP teams directive. */
12688 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12690 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
12691 push_gimplify_context ();
12693 tree block
= make_node (BLOCK
);
12694 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
12695 gsi_replace (gsi_p
, bind
, true);
12696 gimple_seq bind_body
= NULL
;
12697 gimple_seq dlist
= NULL
;
12698 gimple_seq olist
= NULL
;
12700 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12701 OMP_CLAUSE_NUM_TEAMS
);
12702 if (num_teams
== NULL_TREE
)
12703 num_teams
= build_int_cst (unsigned_type_node
, 0);
12706 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
12707 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
12708 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
12710 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12711 OMP_CLAUSE_THREAD_LIMIT
);
12712 if (thread_limit
== NULL_TREE
)
12713 thread_limit
= build_int_cst (unsigned_type_node
, 0);
12716 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
12717 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
12718 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
12722 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
12723 &bind_body
, &dlist
, ctx
, NULL
);
12724 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
12725 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
12727 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
12729 location_t loc
= gimple_location (teams_stmt
);
12730 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
12731 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
12732 gimple_set_location (call
, loc
);
12733 gimple_seq_add_stmt (&bind_body
, call
);
12735 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
12736 gimple_omp_set_body (teams_stmt
, NULL
);
12737 gimple_seq_add_seq (&bind_body
, olist
);
12738 gimple_seq_add_seq (&bind_body
, dlist
);
12739 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
12740 gimple_bind_set_body (bind
, bind_body
);
12742 pop_gimplify_context (bind
);
12744 gimple_bind_append_vars (bind
, ctx
->block_vars
);
12745 BLOCK_VARS (block
) = ctx
->block_vars
;
12746 if (BLOCK_VARS (block
))
12747 TREE_USED (block
) = 1;
12750 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12751 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12752 of OMP context, but with task_shared_vars set. */
12755 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
12760 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12761 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
12764 if (task_shared_vars
12766 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
12769 /* If a global variable has been privatized, TREE_CONSTANT on
12770 ADDR_EXPR might be wrong. */
12771 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
12772 recompute_tree_invariant_for_addr_expr (t
);
12774 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
12778 /* Data to be communicated between lower_omp_regimplify_operands and
12779 lower_omp_regimplify_operands_p. */
12781 struct lower_omp_regimplify_operands_data
12787 /* Helper function for lower_omp_regimplify_operands. Find
12788 omp_member_access_dummy_var vars and adjust temporarily their
12789 DECL_VALUE_EXPRs if needed. */
12792 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
12795 tree t
= omp_member_access_dummy_var (*tp
);
12798 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
12799 lower_omp_regimplify_operands_data
*ldata
12800 = (lower_omp_regimplify_operands_data
*) wi
->info
;
12801 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
12804 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
12805 ldata
->decls
->safe_push (*tp
);
12806 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
12807 SET_DECL_VALUE_EXPR (*tp
, v
);
12810 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
12814 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12815 of omp_member_access_dummy_var vars during regimplification. */
12818 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
12819 gimple_stmt_iterator
*gsi_p
)
12821 auto_vec
<tree
, 10> decls
;
12824 struct walk_stmt_info wi
;
12825 memset (&wi
, '\0', sizeof (wi
));
12826 struct lower_omp_regimplify_operands_data data
;
12828 data
.decls
= &decls
;
12830 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
12832 gimple_regimplify_operands (stmt
, gsi_p
);
12833 while (!decls
.is_empty ())
12835 tree t
= decls
.pop ();
12836 tree v
= decls
.pop ();
12837 SET_DECL_VALUE_EXPR (t
, v
);
12842 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12844 gimple
*stmt
= gsi_stmt (*gsi_p
);
12845 struct walk_stmt_info wi
;
12848 if (gimple_has_location (stmt
))
12849 input_location
= gimple_location (stmt
);
12851 if (task_shared_vars
)
12852 memset (&wi
, '\0', sizeof (wi
));
12854 /* If we have issued syntax errors, avoid doing any heavy lifting.
12855 Just replace the OMP directives with a NOP to avoid
12856 confusing RTL expansion. */
12857 if (seen_error () && is_gimple_omp (stmt
))
12859 gsi_replace (gsi_p
, gimple_build_nop (), true);
12863 switch (gimple_code (stmt
))
12867 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
12868 if ((ctx
|| task_shared_vars
)
12869 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
12870 lower_omp_regimplify_p
,
12871 ctx
? NULL
: &wi
, NULL
)
12872 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
12873 lower_omp_regimplify_p
,
12874 ctx
? NULL
: &wi
, NULL
)))
12875 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
12879 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
12881 case GIMPLE_EH_FILTER
:
12882 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
12885 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
12886 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
12888 case GIMPLE_TRANSACTION
:
12889 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
12893 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
12894 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
12896 case GIMPLE_OMP_PARALLEL
:
12897 case GIMPLE_OMP_TASK
:
12898 ctx
= maybe_lookup_ctx (stmt
);
12900 if (ctx
->cancellable
)
12901 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12902 lower_omp_taskreg (gsi_p
, ctx
);
12904 case GIMPLE_OMP_FOR
:
12905 ctx
= maybe_lookup_ctx (stmt
);
12907 if (ctx
->cancellable
)
12908 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12909 lower_omp_for (gsi_p
, ctx
);
12911 case GIMPLE_OMP_SECTIONS
:
12912 ctx
= maybe_lookup_ctx (stmt
);
12914 if (ctx
->cancellable
)
12915 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12916 lower_omp_sections (gsi_p
, ctx
);
12918 case GIMPLE_OMP_SINGLE
:
12919 ctx
= maybe_lookup_ctx (stmt
);
12921 lower_omp_single (gsi_p
, ctx
);
12923 case GIMPLE_OMP_MASTER
:
12924 ctx
= maybe_lookup_ctx (stmt
);
12926 lower_omp_master (gsi_p
, ctx
);
12928 case GIMPLE_OMP_TASKGROUP
:
12929 ctx
= maybe_lookup_ctx (stmt
);
12931 lower_omp_taskgroup (gsi_p
, ctx
);
12933 case GIMPLE_OMP_ORDERED
:
12934 ctx
= maybe_lookup_ctx (stmt
);
12936 lower_omp_ordered (gsi_p
, ctx
);
12938 case GIMPLE_OMP_SCAN
:
12939 ctx
= maybe_lookup_ctx (stmt
);
12941 lower_omp_scan (gsi_p
, ctx
);
12943 case GIMPLE_OMP_CRITICAL
:
12944 ctx
= maybe_lookup_ctx (stmt
);
12946 lower_omp_critical (gsi_p
, ctx
);
12948 case GIMPLE_OMP_ATOMIC_LOAD
:
12949 if ((ctx
|| task_shared_vars
)
12950 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12951 as_a
<gomp_atomic_load
*> (stmt
)),
12952 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
12953 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12955 case GIMPLE_OMP_TARGET
:
12956 ctx
= maybe_lookup_ctx (stmt
);
12958 lower_omp_target (gsi_p
, ctx
);
12960 case GIMPLE_OMP_TEAMS
:
12961 ctx
= maybe_lookup_ctx (stmt
);
12963 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
12964 lower_omp_taskreg (gsi_p
, ctx
);
12966 lower_omp_teams (gsi_p
, ctx
);
12970 call_stmt
= as_a
<gcall
*> (stmt
);
12971 fndecl
= gimple_call_fndecl (call_stmt
);
12973 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
12974 switch (DECL_FUNCTION_CODE (fndecl
))
12976 case BUILT_IN_GOMP_BARRIER
:
12980 case BUILT_IN_GOMP_CANCEL
:
12981 case BUILT_IN_GOMP_CANCELLATION_POINT
:
12984 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
12985 cctx
= cctx
->outer
;
12986 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
12987 if (!cctx
->cancellable
)
12989 if (DECL_FUNCTION_CODE (fndecl
)
12990 == BUILT_IN_GOMP_CANCELLATION_POINT
)
12992 stmt
= gimple_build_nop ();
12993 gsi_replace (gsi_p
, stmt
, false);
12997 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
12999 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
13000 gimple_call_set_fndecl (call_stmt
, fndecl
);
13001 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
13004 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
13005 gimple_call_set_lhs (call_stmt
, lhs
);
13006 tree fallthru_label
;
13007 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
13009 g
= gimple_build_label (fallthru_label
);
13010 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
13011 g
= gimple_build_cond (NE_EXPR
, lhs
,
13012 fold_convert (TREE_TYPE (lhs
),
13013 boolean_false_node
),
13014 cctx
->cancel_label
, fallthru_label
);
13015 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
13022 case GIMPLE_ASSIGN
:
13023 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
13025 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
13026 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
13027 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
13028 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
13029 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
13030 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
13031 && (gimple_omp_target_kind (up
->stmt
)
13032 == GF_OMP_TARGET_KIND_DATA
)))
13034 else if (!up
->lastprivate_conditional_map
)
13036 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
13037 if (TREE_CODE (lhs
) == MEM_REF
13038 && DECL_P (TREE_OPERAND (lhs
, 0))
13039 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
13040 0))) == REFERENCE_TYPE
)
13041 lhs
= TREE_OPERAND (lhs
, 0);
13043 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
13046 if (up
->combined_into_simd_safelen1
)
13049 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
13052 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
13053 clauses
= gimple_omp_for_clauses (up
->stmt
);
13055 clauses
= gimple_omp_sections_clauses (up
->stmt
);
13056 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
13057 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
13058 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
13059 OMP_CLAUSE__CONDTEMP_
);
13060 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
13061 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
13062 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
13069 if ((ctx
|| task_shared_vars
)
13070 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
13073 /* Just remove clobbers, this should happen only if we have
13074 "privatized" local addressable variables in SIMD regions,
13075 the clobber isn't needed in that case and gimplifying address
13076 of the ARRAY_REF into a pointer and creating MEM_REF based
13077 clobber would create worse code than we get with the clobber
13079 if (gimple_clobber_p (stmt
))
13081 gsi_replace (gsi_p
, gimple_build_nop (), true);
13084 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
13091 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
13093 location_t saved_location
= input_location
;
13094 gimple_stmt_iterator gsi
;
13095 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
13096 lower_omp_1 (&gsi
, ctx
);
13097 /* During gimplification, we haven't folded statments inside offloading
13098 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
13099 if (target_nesting_level
|| taskreg_nesting_level
)
13100 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
13102 input_location
= saved_location
;
13105 /* Main entry point. */
13107 static unsigned int
13108 execute_lower_omp (void)
13114 /* This pass always runs, to provide PROP_gimple_lomp.
13115 But often, there is nothing to do. */
13116 if (flag_openacc
== 0 && flag_openmp
== 0
13117 && flag_openmp_simd
== 0)
13120 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
13121 delete_omp_context
);
13123 body
= gimple_body (current_function_decl
);
13125 scan_omp (&body
, NULL
);
13126 gcc_assert (taskreg_nesting_level
== 0);
13127 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
13128 finish_taskreg_scan (ctx
);
13129 taskreg_contexts
.release ();
13131 if (all_contexts
->root
)
13133 if (task_shared_vars
)
13134 push_gimplify_context ();
13135 lower_omp (&body
, NULL
);
13136 if (task_shared_vars
)
13137 pop_gimplify_context (NULL
);
13142 splay_tree_delete (all_contexts
);
13143 all_contexts
= NULL
;
13145 BITMAP_FREE (task_shared_vars
);
13146 BITMAP_FREE (global_nonaddressable_vars
);
13148 /* If current function is a method, remove artificial dummy VAR_DECL created
13149 for non-static data member privatization, they aren't needed for
13150 debuginfo nor anything else, have been already replaced everywhere in the
13151 IL and cause problems with LTO. */
13152 if (DECL_ARGUMENTS (current_function_decl
)
13153 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
13154 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
13156 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
13162 const pass_data pass_data_lower_omp
=
13164 GIMPLE_PASS
, /* type */
13165 "omplower", /* name */
13166 OPTGROUP_OMP
, /* optinfo_flags */
13167 TV_NONE
, /* tv_id */
13168 PROP_gimple_any
, /* properties_required */
13169 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
13170 0, /* properties_destroyed */
13171 0, /* todo_flags_start */
13172 0, /* todo_flags_finish */
13175 class pass_lower_omp
: public gimple_opt_pass
13178 pass_lower_omp (gcc::context
*ctxt
)
13179 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
13182 /* opt_pass methods: */
13183 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
13185 }; // class pass_lower_omp
13187 } // anon namespace
13190 make_pass_lower_omp (gcc::context
*ctxt
)
13192 return new pass_lower_omp (ctxt
);
13195 /* The following is a utility to diagnose structured block violations.
13196 It is not part of the "omplower" pass, as that's invoked too late. It
13197 should be invoked by the respective front ends after gimplification. */
13199 static splay_tree all_labels
;
13201 /* Check for mismatched contexts and generate an error if needed. Return
13202 true if an error is detected. */
13205 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
13206 gimple
*branch_ctx
, gimple
*label_ctx
)
13208 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
13209 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
13211 if (label_ctx
== branch_ctx
)
13214 const char* kind
= NULL
;
13218 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
13219 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
13221 gcc_checking_assert (kind
== NULL
);
13227 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
13231 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13232 so we could traverse it and issue a correct "exit" or "enter" error
13233 message upon a structured block violation.
13235 We built the context by building a list with tree_cons'ing, but there is
13236 no easy counterpart in gimple tuples. It seems like far too much work
13237 for issuing exit/enter error messages. If someone really misses the
13238 distinct error message... patches welcome. */
13241 /* Try to avoid confusing the user by producing and error message
13242 with correct "exit" or "enter" verbiage. We prefer "exit"
13243 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13244 if (branch_ctx
== NULL
)
13250 if (TREE_VALUE (label_ctx
) == branch_ctx
)
13255 label_ctx
= TREE_CHAIN (label_ctx
);
13260 error ("invalid exit from %s structured block", kind
);
13262 error ("invalid entry to %s structured block", kind
);
13265 /* If it's obvious we have an invalid entry, be specific about the error. */
13266 if (branch_ctx
== NULL
)
13267 error ("invalid entry to %s structured block", kind
);
13270 /* Otherwise, be vague and lazy, but efficient. */
13271 error ("invalid branch to/from %s structured block", kind
);
13274 gsi_replace (gsi_p
, gimple_build_nop (), false);
13278 /* Pass 1: Create a minimal tree of structured blocks, and record
13279 where each label is found. */
13282 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
13283 struct walk_stmt_info
*wi
)
13285 gimple
*context
= (gimple
*) wi
->info
;
13286 gimple
*inner_context
;
13287 gimple
*stmt
= gsi_stmt (*gsi_p
);
13289 *handled_ops_p
= true;
13291 switch (gimple_code (stmt
))
13295 case GIMPLE_OMP_PARALLEL
:
13296 case GIMPLE_OMP_TASK
:
13297 case GIMPLE_OMP_SECTIONS
:
13298 case GIMPLE_OMP_SINGLE
:
13299 case GIMPLE_OMP_SECTION
:
13300 case GIMPLE_OMP_MASTER
:
13301 case GIMPLE_OMP_ORDERED
:
13302 case GIMPLE_OMP_SCAN
:
13303 case GIMPLE_OMP_CRITICAL
:
13304 case GIMPLE_OMP_TARGET
:
13305 case GIMPLE_OMP_TEAMS
:
13306 case GIMPLE_OMP_TASKGROUP
:
13307 /* The minimal context here is just the current OMP construct. */
13308 inner_context
= stmt
;
13309 wi
->info
= inner_context
;
13310 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
13311 wi
->info
= context
;
13314 case GIMPLE_OMP_FOR
:
13315 inner_context
= stmt
;
13316 wi
->info
= inner_context
;
13317 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13319 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
13320 diagnose_sb_1
, NULL
, wi
);
13321 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
13322 wi
->info
= context
;
13326 splay_tree_insert (all_labels
,
13327 (splay_tree_key
) gimple_label_label (
13328 as_a
<glabel
*> (stmt
)),
13329 (splay_tree_value
) context
);
13339 /* Pass 2: Check each branch and see if its context differs from that of
13340 the destination label's context. */
13343 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
13344 struct walk_stmt_info
*wi
)
13346 gimple
*context
= (gimple
*) wi
->info
;
13348 gimple
*stmt
= gsi_stmt (*gsi_p
);
13350 *handled_ops_p
= true;
13352 switch (gimple_code (stmt
))
13356 case GIMPLE_OMP_PARALLEL
:
13357 case GIMPLE_OMP_TASK
:
13358 case GIMPLE_OMP_SECTIONS
:
13359 case GIMPLE_OMP_SINGLE
:
13360 case GIMPLE_OMP_SECTION
:
13361 case GIMPLE_OMP_MASTER
:
13362 case GIMPLE_OMP_ORDERED
:
13363 case GIMPLE_OMP_SCAN
:
13364 case GIMPLE_OMP_CRITICAL
:
13365 case GIMPLE_OMP_TARGET
:
13366 case GIMPLE_OMP_TEAMS
:
13367 case GIMPLE_OMP_TASKGROUP
:
13369 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
13370 wi
->info
= context
;
13373 case GIMPLE_OMP_FOR
:
13375 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13377 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
13378 diagnose_sb_2
, NULL
, wi
);
13379 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
13380 wi
->info
= context
;
13385 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
13386 tree lab
= gimple_cond_true_label (cond_stmt
);
13389 n
= splay_tree_lookup (all_labels
,
13390 (splay_tree_key
) lab
);
13391 diagnose_sb_0 (gsi_p
, context
,
13392 n
? (gimple
*) n
->value
: NULL
);
13394 lab
= gimple_cond_false_label (cond_stmt
);
13397 n
= splay_tree_lookup (all_labels
,
13398 (splay_tree_key
) lab
);
13399 diagnose_sb_0 (gsi_p
, context
,
13400 n
? (gimple
*) n
->value
: NULL
);
13407 tree lab
= gimple_goto_dest (stmt
);
13408 if (TREE_CODE (lab
) != LABEL_DECL
)
13411 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13412 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
13416 case GIMPLE_SWITCH
:
13418 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
13420 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
13422 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
13423 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13424 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
13430 case GIMPLE_RETURN
:
13431 diagnose_sb_0 (gsi_p
, context
, NULL
);
13441 static unsigned int
13442 diagnose_omp_structured_block_errors (void)
13444 struct walk_stmt_info wi
;
13445 gimple_seq body
= gimple_body (current_function_decl
);
13447 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
13449 memset (&wi
, 0, sizeof (wi
));
13450 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
13452 memset (&wi
, 0, sizeof (wi
));
13453 wi
.want_locations
= true;
13454 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
13456 gimple_set_body (current_function_decl
, body
);
13458 splay_tree_delete (all_labels
);
13466 const pass_data pass_data_diagnose_omp_blocks
=
13468 GIMPLE_PASS
, /* type */
13469 "*diagnose_omp_blocks", /* name */
13470 OPTGROUP_OMP
, /* optinfo_flags */
13471 TV_NONE
, /* tv_id */
13472 PROP_gimple_any
, /* properties_required */
13473 0, /* properties_provided */
13474 0, /* properties_destroyed */
13475 0, /* todo_flags_start */
13476 0, /* todo_flags_finish */
13479 class pass_diagnose_omp_blocks
: public gimple_opt_pass
13482 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13483 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
13486 /* opt_pass methods: */
13487 virtual bool gate (function
*)
13489 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
13491 virtual unsigned int execute (function
*)
13493 return diagnose_omp_structured_block_errors ();
13496 }; // class pass_diagnose_omp_blocks
13498 } // anon namespace
13501 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13503 return new pass_diagnose_omp_blocks (ctxt
);
13507 #include "gt-omp-low.h"